diff --git a/.buildkite/pipelines/intake.template.yml b/.buildkite/pipelines/intake.template.yml index 32b0a12f06a0e..66b989d94455c 100644 --- a/.buildkite/pipelines/intake.template.yml +++ b/.buildkite/pipelines/intake.template.yml @@ -32,6 +32,14 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk + - label: part4 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart4 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk - group: bwc-snapshots steps: - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 5f332b1b5a730..49c2d34df7e31 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -33,6 +33,14 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk + - label: part4 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart4 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk - group: bwc-snapshots steps: - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" @@ -40,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.15", "8.11.2", "8.12.0"] + BWC_VERSION: ["7.17.16", "8.11.2", "8.12.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 832a4698aaa51..fab90c8ed6d17 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -1073,6 +1073,22 @@ steps: env: BWC_VERSION: 7.17.15 + - label: "{{matrix.image}} / 7.17.16 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.16 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.16 + - label: "{{matrix.image}} / 8.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic.bwc.template.yml b/.buildkite/pipelines/periodic.bwc.template.yml index 8a8c43d75e3ef..34e9aa656e340 100644 --- a/.buildkite/pipelines/periodic.bwc.template.yml +++ b/.buildkite/pipelines/periodic.bwc.template.yml @@ -4,7 +4,7 @@ agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: $BWC_VERSION \ No newline at end of file diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index ac81188efe946..88738c88ef5a0 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -8,7 +8,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.0.0 @@ -18,7 +18,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.0.1 @@ -28,7 +28,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.1.0 @@ -38,7 +38,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.1.1 @@ -48,7 +48,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.2.0 @@ -58,7 +58,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.2.1 @@ -68,7 +68,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.3.0 @@ -78,7 +78,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.3.1 @@ -88,7 +88,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.3.2 @@ -98,7 +98,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.4.0 @@ -108,7 +108,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.4.1 @@ -118,7 +118,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.4.2 @@ -128,7 +128,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.5.0 @@ -138,7 +138,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.5.1 @@ -148,7 +148,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.5.2 @@ -158,7 +158,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.6.0 @@ -168,7 +168,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.6.1 @@ -178,7 +178,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.6.2 @@ -188,7 +188,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.7.0 @@ -198,7 +198,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.7.1 @@ -208,7 +208,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.8.0 @@ -218,7 +218,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.8.1 @@ -228,7 +228,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.9.0 @@ -238,7 +238,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.9.1 @@ -248,7 +248,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.9.2 @@ -258,7 +258,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.9.3 @@ -268,7 +268,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.10.0 @@ -278,7 +278,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.10.1 @@ -288,7 +288,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.10.2 @@ -298,7 +298,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.11.0 @@ -308,7 +308,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.11.1 @@ -318,7 +318,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.11.2 @@ -328,7 +328,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.12.0 @@ -338,7 +338,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.12.1 @@ -348,7 +348,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.13.0 @@ -358,7 +358,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.13.1 @@ -368,7 +368,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.13.2 @@ -378,7 +378,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.13.3 @@ -388,7 +388,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.13.4 @@ -398,7 +398,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.14.0 @@ -408,7 +408,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.14.1 @@ -418,7 +418,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.14.2 @@ -428,7 +428,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.15.0 @@ -438,7 +438,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.15.1 @@ -448,7 +448,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.15.2 @@ -458,7 +458,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.16.0 @@ -468,7 +468,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.16.1 @@ -478,7 +478,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.16.2 @@ -488,7 +488,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.16.3 @@ -498,7 +498,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.0 @@ -508,7 +508,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.1 @@ -518,7 +518,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.2 @@ -528,7 +528,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.3 @@ -538,7 +538,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.4 @@ -548,7 +548,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.5 @@ -558,7 +558,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.6 @@ -568,7 +568,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.7 @@ -578,7 +578,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.8 @@ -588,7 +588,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.9 @@ -598,7 +598,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.10 @@ -608,7 +608,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.11 @@ -618,7 +618,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.12 @@ -628,7 +628,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.13 @@ -638,7 +638,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.14 @@ -648,17 +648,27 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.15 + - label: 7.17.16 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.16#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.16 - label: 8.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.0#bwcTest timeout_in_minutes: 300 agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.0.0 @@ -668,7 +678,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.0.1 @@ -678,7 +688,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.1.0 @@ -688,7 +698,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.1.1 @@ -698,7 +708,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.1.2 @@ -708,7 +718,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.1.3 @@ -718,7 +728,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.2.0 @@ -728,7 +738,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.2.1 @@ -738,7 +748,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.2.2 @@ -748,7 +758,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.2.3 @@ -758,7 +768,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.3.0 @@ -768,7 +778,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.3.1 @@ -778,7 +788,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.3.2 @@ -788,7 +798,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.3.3 @@ -798,7 +808,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.4.0 @@ -808,7 +818,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.4.1 @@ -818,7 +828,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.4.2 @@ -828,7 +838,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.4.3 @@ -838,7 +848,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.5.0 @@ -848,7 +858,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.5.1 @@ -858,7 +868,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.5.2 @@ -868,7 +878,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.5.3 @@ -878,7 +888,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.6.0 @@ -888,7 +898,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.6.1 @@ -898,7 +908,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.6.2 @@ -908,7 +918,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.7.0 @@ -918,7 +928,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.7.1 @@ -928,7 +938,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.8.0 @@ -938,7 +948,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.8.1 @@ -948,7 +958,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.8.2 @@ -958,7 +968,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.9.0 @@ -968,7 +978,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.9.1 @@ -978,7 +988,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.9.2 @@ -988,7 +998,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.10.0 @@ -998,7 +1008,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.10.1 @@ -1008,7 +1018,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.10.2 @@ -1018,7 +1028,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.10.3 @@ -1028,7 +1038,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.10.4 @@ -1038,7 +1048,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.11.0 @@ -1048,7 +1058,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.11.1 @@ -1058,7 +1068,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.11.2 @@ -1068,7 +1078,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.12.0 diff --git a/.buildkite/pipelines/pull-request/bwc-snapshots.yml b/.buildkite/pipelines/pull-request/bwc-snapshots.yml index 21873475056ea..5a9fc2d938ac0 100644 --- a/.buildkite/pipelines/pull-request/bwc-snapshots.yml +++ b/.buildkite/pipelines/pull-request/bwc-snapshots.yml @@ -16,5 +16,5 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/part-4-fips.yml b/.buildkite/pipelines/pull-request/part-4-fips.yml new file mode 100644 index 0000000000000..11a50456ca4c0 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-4-fips.yml @@ -0,0 +1,11 @@ +config: + allow-labels: "Team:Security" +steps: + - label: part-4-fips + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed -Dtests.fips.enabled=true checkPart4 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/part-4-windows.yml b/.buildkite/pipelines/pull-request/part-4-windows.yml new file mode 100644 index 0000000000000..0493e8af0cf8f --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-4-windows.yml @@ -0,0 +1,14 @@ +config: + allow-labels: "test-windows" +steps: + - label: part-4-windows + command: .\.buildkite\scripts\run-script.ps1 bash .buildkite/scripts/windows-run-gradle.sh + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-windows-2022 + machineType: custom-32-98304 + diskType: pd-ssd + diskSizeGb: 350 + env: + GRADLE_TASK: checkPart4 diff --git a/.buildkite/pipelines/pull-request/part-4.yml b/.buildkite/pipelines/pull-request/part-4.yml new file mode 100644 index 0000000000000..af11f08953d07 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-4.yml @@ -0,0 +1,11 @@ +config: + skip-target-branches: "7.17" +steps: + - label: part-4 + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart4 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 4687a1b1d24b2..581ec2f1565b6 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -64,6 +64,7 @@ BWC_VERSION: - "7.17.13" - "7.17.14" - "7.17.15" + - "7.17.16" - "8.0.0" - "8.0.1" - "8.1.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index eb2482365f683..7970d655f4014 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - - "7.17.15" + - "7.17.16" - "8.11.2" - "8.12.0" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4f9f432bca467..cb674221913de 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -675,12 +675,28 @@ number, there are a few rules that need to be followed: once merged into `main`. 2. To create a new component version, add a new constant to the respective class with a descriptive name of the change being made. Increment the integer - number according to the partciular `*Version` class. + number according to the particular `*Version` class. If your pull request has a conflict around your new version constant, you need to update your PR from `main` and change your PR to use the next available version number. +### Checking for cluster features + +As part of developing a new feature or change, you might need to determine +if all nodes in a cluster have been upgraded to support your new feature. +This can be done using `FeatureService`. To define and check for a new +feature in a cluster: + +1. Define a new `NodeFeature` constant with a unique id for the feature + in a class related to the change you're doing. +2. Return that constant from an instance of `FeatureSpecification.getFeatures`, + either an existing implementation or a new implementation. Make sure + the implementation is added as a SPI implementation in `module-info.java` + and `META-INF/services`. +3. To check if all nodes in the cluster support the new feature, call +`FeatureService.clusterHasFeature(ClusterState, NodeFeature)` + ### Creating A Distribution Run all build commands from within the root directory: diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy index e31594ad2e4a6..96e342e995a36 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy @@ -31,7 +31,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest { Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - + 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright @@ -39,7 +39,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest { documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. - + THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. @@ -58,11 +58,11 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest { id 'java' id 'elasticsearch.global-build-info' } - + apply plugin:'elasticsearch.build' group = 'org.acme' description = "some example project" - + repositories { maven { name = "local-test" @@ -73,7 +73,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest { } mavenCentral() } - + dependencies { jarHell 'org.elasticsearch:elasticsearch-core:current' } @@ -89,7 +89,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest { * Side Public License, v 1. */ package org.elasticsearch; - + public class SampleClass { } """.stripIndent() @@ -117,7 +117,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest { noticeFile.set(file("NOTICE")) """ when: - def result = gradleRunner("assemble").build() + def result = gradleRunner("assemble", "-x", "generateHistoricalFeaturesMetadata").build() then: result.task(":assemble").outcome == TaskOutcome.SUCCESS file("build/distributions/hello-world.jar").exists() @@ -146,7 +146,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest { } licenseFile.set(file("LICENSE")) noticeFile.set(file("NOTICE")) - + tasks.named("forbiddenApisMain").configure {enabled = false } tasks.named('checkstyleMain').configure { enabled = false } tasks.named('loggerUsageCheck').configure { enabled = false } diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy index e17f9c7537777..9d32eaadf7aec 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy @@ -29,7 +29,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { id 'elasticsearch.java' id 'elasticsearch.publish' } - + version = "1.0" group = 'org.acme' description = "custom project description" @@ -92,11 +92,11 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { id 'elasticsearch.publish' id 'com.github.johnrengelman.shadow' } - + repositories { mavenCentral() } - + dependencies { implementation 'org.slf4j:log4j-over-slf4j:1.7.30' shadow 'org.slf4j:slf4j-api:1.7.30' @@ -110,8 +110,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { } } version = "1.0" - group = 'org.acme' - description = 'some description' + group = 'org.acme' + description = 'some description' """ when: @@ -179,7 +179,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { } dependencies { - shadow project(":someLib") + shadow project(":someLib") } publishing { repositories { @@ -192,10 +192,10 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { allprojects { apply plugin: 'elasticsearch.java' version = "1.0" - group = 'org.acme' + group = 'org.acme' } - description = 'some description' + description = 'some description' """ when: @@ -263,13 +263,13 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { id 'elasticsearch.publish' id 'com.github.johnrengelman.shadow' } - + esplugin { name = 'hello-world-plugin' classname 'org.acme.HelloWorldPlugin' description = "custom project description" } - + publishing { repositories { maven { @@ -277,17 +277,17 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { } } } - + // requires elasticsearch artifact available tasks.named('bundlePlugin').configure { enabled = false } licenseFile.set(file('license.txt')) noticeFile.set(file('notice.txt')) version = "1.0" - group = 'org.acme' + group = 'org.acme' """ when: - def result = gradleRunner('assemble', '--stacktrace').build() + def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateHistoricalFeaturesMetadata').build() then: result.task(":generatePom").outcome == TaskOutcome.SUCCESS @@ -348,19 +348,19 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { id 'elasticsearch.internal-es-plugin' id 'elasticsearch.publish' } - + esplugin { name = 'hello-world-plugin' classname 'org.acme.HelloWorldPlugin' description = "custom project description" } - + // requires elasticsearch artifact available tasks.named('bundlePlugin').configure { enabled = false } licenseFile.set(file('license.txt')) noticeFile.set(file('notice.txt')) version = "2.0" - group = 'org.acme' + group = 'org.acme' """ when: @@ -420,9 +420,9 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { apply plugin:'elasticsearch.publish' version = "1.0" - group = 'org.acme' + group = 'org.acme' description = "just a test project" - + ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0']) """ diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java index f709600fc7979..70d130605c15e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.gradle.internal.conventions.util.Util; import org.elasticsearch.gradle.internal.info.BuildParams; import org.elasticsearch.gradle.internal.precommit.JarHellPrecommitPlugin; +import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginPropertiesExtension; import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; @@ -36,6 +37,7 @@ public void apply(Project project) { project.getPluginManager().apply(PluginBuildPlugin.class); project.getPluginManager().apply(JarHellPrecommitPlugin.class); project.getPluginManager().apply(ElasticsearchJavaPlugin.class); + project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class); // Clear default dependencies added by public PluginBuildPlugin as we add our // own project dependencies for internal builds // TODO remove once we removed default dependencies from PluginBuildPlugin diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java index 6849796579ad9..6c7bc6753531c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java @@ -11,6 +11,7 @@ import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.precommit.InternalPrecommitTasks; import org.elasticsearch.gradle.internal.snyk.SnykDependencyMonitoringGradlePlugin; +import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; import org.gradle.api.InvalidUserDataException; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -61,6 +62,7 @@ public void apply(final Project project) { project.getPluginManager().apply(ElasticsearchJavadocPlugin.class); project.getPluginManager().apply(DependenciesInfoPlugin.class); project.getPluginManager().apply(SnykDependencyMonitoringGradlePlugin.class); + project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class); InternalPrecommitTasks.create(project, true); configureLicenseAndNotice(project); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java index b32c566363e88..93753f7c7ac56 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java @@ -143,6 +143,7 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:security"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:server:multi-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:server:single-node"); + map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:server:mixed-cluster"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:fleet:qa:rest"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:graph:qa:with-security"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:identity-provider:qa:idp-rest-tests"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index bcbe1740630ce..42d3a770dbbcc 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -101,7 +101,7 @@ public void apply(Project project) { addDistributionSysprop(t, DISTRIBUTION_SYSPROP, distribution::getFilepath); addDistributionSysprop(t, EXAMPLE_PLUGIN_SYSPROP, () -> examplePlugin.getSingleFile().toString()); t.exclude("**/PackageUpgradeTests.class"); - }, distribution.getArchiveDependencies(), examplePlugin.getDependencies()); + }, distribution, examplePlugin.getDependencies()); if (distribution.getPlatform() == Platform.WINDOWS) { windowsTestTasks.add(destructiveTask); @@ -235,6 +235,7 @@ private static ElasticsearchDistribution createDistro( d.setBundledJdk(bundledJdk); } d.setVersion(version); + d.setPreferArchive(true); }); // Allow us to gracefully omit building Docker distributions if Docker is not available on the system. diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataPlugin.java new file mode 100644 index 0000000000000..bd9df6d3903ca --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataPlugin.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.test; + +import org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.type.ArtifactTypeDefinition; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.TaskProvider; + +import java.util.Map; + +/** + * Extracts historical feature metadata into a machine-readable format for use in backward compatibility testing. + */ +public class HistoricalFeaturesMetadataPlugin implements Plugin { + public static final String HISTORICAL_FEATURES_JSON = "historical-features.json"; + public static final String FEATURES_METADATA_TYPE = "features-metadata-json"; + public static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadata"; + + @Override + public void apply(Project project) { + Configuration featureMetadataExtractorConfig = project.getConfigurations().create("featuresMetadataExtractor", c -> { + // Don't bother adding this dependency if the project doesn't exist which simplifies testing + if (project.findProject(":test:metadata-extractor") != null) { + c.defaultDependencies(d -> d.add(project.getDependencies().project(Map.of("path", ":test:metadata-extractor")))); + } + }); + + SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); + SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); + + TaskProvider generateTask = project.getTasks() + .register("generateHistoricalFeaturesMetadata", HistoricalFeaturesMetadataTask.class, task -> { + task.setClasspath( + featureMetadataExtractorConfig.plus(mainSourceSet.getRuntimeClasspath()) + .plus(project.getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME)) + ); + task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(HISTORICAL_FEATURES_JSON)); + }); + + Configuration featuresMetadataArtifactConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> { + c.setCanBeResolved(false); + c.setCanBeConsumed(true); + c.attributes(a -> { a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, FEATURES_METADATA_TYPE); }); + }); + + project.getArtifacts().add(featuresMetadataArtifactConfig.getName(), generateTask); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataTask.java new file mode 100644 index 0000000000000..0891225d1e1ef --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataTask.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.test; + +import org.elasticsearch.gradle.LoggedExec; +import org.gradle.api.DefaultTask; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.FileCollection; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.Classpath; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.TaskAction; +import org.gradle.process.ExecOperations; +import org.gradle.workers.WorkAction; +import org.gradle.workers.WorkParameters; +import org.gradle.workers.WorkerExecutor; + +import javax.inject.Inject; + +@CacheableTask +public abstract class HistoricalFeaturesMetadataTask extends DefaultTask { + private FileCollection classpath; + + @OutputFile + public abstract RegularFileProperty getOutputFile(); + + @Classpath + public FileCollection getClasspath() { + return classpath; + } + + public void setClasspath(FileCollection classpath) { + this.classpath = classpath; + } + + @Inject + public abstract WorkerExecutor getWorkerExecutor(); + + @TaskAction + public void execute() { + getWorkerExecutor().noIsolation().submit(HistoricalFeaturesMetadataWorkAction.class, params -> { + params.getClasspath().setFrom(getClasspath()); + params.getOutputFile().set(getOutputFile()); + }); + } + + public interface HistoricalFeaturesWorkParameters extends WorkParameters { + ConfigurableFileCollection getClasspath(); + + RegularFileProperty getOutputFile(); + } + + public abstract static class HistoricalFeaturesMetadataWorkAction implements WorkAction { + private final ExecOperations execOperations; + + @Inject + public HistoricalFeaturesMetadataWorkAction(ExecOperations execOperations) { + this.execOperations = execOperations; + } + + @Override + public void execute() { + LoggedExec.javaexec(execOperations, spec -> { + spec.getMainClass().set("org.elasticsearch.extractor.features.HistoricalFeaturesMetadataExtractor"); + spec.classpath(getParameters().getClasspath()); + spec.args(getParameters().getOutputFile().get().getAsFile().getAbsolutePath()); + }); + } + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 32e7f10d14355..566e93d8a3f53 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -21,6 +21,7 @@ import org.elasticsearch.gradle.internal.ElasticsearchTestBasePlugin; import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin; import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; import org.elasticsearch.gradle.plugin.BasePluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginPropertiesExtension; @@ -35,6 +36,7 @@ import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Dependency; +import org.gradle.api.artifacts.DependencySet; import org.gradle.api.artifacts.ProjectDependency; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; @@ -74,6 +76,9 @@ public class RestTestBasePlugin implements Plugin { private static final String PLUGINS_CONFIGURATION = "clusterPlugins"; private static final String EXTRACTED_PLUGINS_CONFIGURATION = "extractedPlugins"; private static final Attribute CONFIGURATION_ATTRIBUTE = Attribute.of("test-cluster-artifacts", String.class); + private static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadataDeps"; + private static final String DEFAULT_DISTRO_FEATURES_METADATA_CONFIGURATION = "defaultDistrofeaturesMetadataDeps"; + private static final String TESTS_FEATURES_METADATA_PATH = "tests.features.metadata.path"; private final ProviderFactory providerFactory; @@ -107,6 +112,36 @@ public void apply(Project project) { extractedPluginsConfiguration.extendsFrom(pluginsConfiguration); configureArtifactTransforms(project); + // Create configuration for aggregating historical feature metadata + Configuration featureMetadataConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> { + c.setCanBeConsumed(false); + c.setCanBeResolved(true); + c.attributes( + a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) + ); + c.defaultDependencies(d -> d.add(project.getDependencies().project(Map.of("path", ":server")))); + c.withDependencies(dependencies -> { + // We can't just use Configuration#extendsFrom() here as we'd inherit the wrong project configuration + copyDependencies(project, dependencies, modulesConfiguration); + copyDependencies(project, dependencies, pluginsConfiguration); + }); + }); + + Configuration defaultDistroFeatureMetadataConfig = project.getConfigurations() + .create(DEFAULT_DISTRO_FEATURES_METADATA_CONFIGURATION, c -> { + c.setCanBeConsumed(false); + c.setCanBeResolved(true); + c.attributes( + a -> a.attribute( + ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, + HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE + ) + ); + c.defaultDependencies( + d -> d.add(project.getDependencies().project(Map.of("path", ":distribution", "configuration", "featuresMetadata"))) + ); + }); + // For plugin and module projects, register the current project plugin bundle as a dependency project.getPluginManager().withPlugin("elasticsearch.esplugin", plugin -> { if (GradleUtils.isModuleProject(project.getPath())) { @@ -124,6 +159,10 @@ public void apply(Project project) { task.dependsOn(integTestDistro, modulesConfiguration); registerDistributionInputs(task, integTestDistro); + // Pass feature metadata on to tests + task.getInputs().files(featureMetadataConfig).withPathSensitivity(PathSensitivity.NONE); + nonInputSystemProperties.systemProperty(TESTS_FEATURES_METADATA_PATH, () -> featureMetadataConfig.getAsPath()); + // Enable parallel execution for these tests since each test gets its own cluster task.setMaxParallelForks(task.getProject().getGradle().getStartParameter().getMaxWorkerCount() / 2); nonInputSystemProperties.systemProperty(TESTS_MAX_PARALLEL_FORKS_SYSPROP, () -> String.valueOf(task.getMaxParallelForks())); @@ -163,6 +202,11 @@ public Void call(Object... args) { DEFAULT_DISTRIBUTION_SYSPROP, providerFactory.provider(() -> defaultDistro.getExtracted().getSingleFile().getPath()) ); + + // If we are using the default distribution we need to register all module feature metadata + task.getInputs().files(defaultDistroFeatureMetadataConfig).withPathSensitivity(PathSensitivity.NONE); + nonInputSystemProperties.systemProperty(TESTS_FEATURES_METADATA_PATH, defaultDistroFeatureMetadataConfig::getAsPath); + return null; } }); @@ -198,6 +242,14 @@ public Void call(Object... args) { }); } + private void copyDependencies(Project project, DependencySet dependencies, Configuration configuration) { + configuration.getDependencies() + .stream() + .filter(d -> d instanceof ProjectDependency) + .map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependency) d).getDependencyProject().getPath()))) + .forEach(dependencies::add); + } + private ElasticsearchDistribution createDistribution(Project project, String name, String version) { return createDistribution(project, name, version, null); } diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index 34f39bbc4ca54..48c888acd35e2 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -158,6 +158,8 @@ org.elasticsearch.cluster.ClusterState#compatibilityVersions() @defaultMessage ClusterFeatures#nodeFeatures is for internal use only. Use FeatureService#clusterHasFeature to determine if a feature is present on the cluster. org.elasticsearch.cluster.ClusterFeatures#nodeFeatures() +@defaultMessage ClusterFeatures#allNodeFeatures is for internal use only. Use FeatureService#clusterHasFeature to determine if a feature is present on the cluster. +org.elasticsearch.cluster.ClusterFeatures#allNodeFeatures() @defaultMessage ClusterFeatures#clusterHasFeature is for internal use only. Use FeatureService#clusterHasFeature to determine if a feature is present on the cluster. org.elasticsearch.cluster.ClusterFeatures#clusterHasFeature(org.elasticsearch.features.NodeFeature) diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java b/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java index f9805680ce8d4..eca0fb319cea4 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java @@ -53,6 +53,7 @@ public String toString() { private final Property platform; private final Property bundledJdk; private final Property failIfUnavailable; + private final Property preferArchive; private final ConfigurableFileCollection extracted; private Action distributionFinalizer; private boolean frozen = false; @@ -75,6 +76,7 @@ public String toString() { this.platform = objectFactory.property(Platform.class); this.bundledJdk = objectFactory.property(Boolean.class); this.failIfUnavailable = objectFactory.property(Boolean.class).convention(true); + this.preferArchive = objectFactory.property(Boolean.class).convention(false); this.extracted = extractedConfiguration; this.distributionFinalizer = distributionFinalizer; } @@ -141,6 +143,14 @@ public void setFailIfUnavailable(boolean failIfUnavailable) { this.failIfUnavailable.set(failIfUnavailable); } + public boolean getPreferArchive() { + return preferArchive.get(); + } + + public void setPreferArchive(boolean preferArchive) { + this.preferArchive.set(preferArchive); + } + public void setArchitecture(Architecture architecture) { this.architecture.set(architecture); } @@ -188,7 +198,9 @@ public TaskDependency getBuildDependencies() { return task -> Collections.emptySet(); } else { maybeFreeze(); - return getType().shouldExtract() ? extracted.getBuildDependencies() : configuration.getBuildDependencies(); + return getType().shouldExtract() && (preferArchive.get() == false) + ? extracted.getBuildDependencies() + : configuration.getBuildDependencies(); } } @@ -253,13 +265,4 @@ void finalizeValues() { type.finalizeValue(); bundledJdk.finalizeValue(); } - - public TaskDependency getArchiveDependencies() { - if (skippingDockerDistributionBuild()) { - return task -> Collections.emptySet(); - } else { - maybeFreeze(); - return configuration.getBuildDependencies(); - } - } } diff --git a/build.gradle b/build.gradle index d05c2bf53f660..acd8d6788318f 100644 --- a/build.gradle +++ b/build.gradle @@ -161,8 +161,10 @@ tasks.register("verifyVersions") { String versionMapping = backportConfig.get("branchLabelMapping").fields().find { it.value.textValue() == 'main' }.key String expectedMapping = "^v${versions.elasticsearch.replaceAll('-SNAPSHOT', '')}\$" if (versionMapping != expectedMapping) { - throw new GradleException("Backport label mapping for branch 'main' is '${versionMapping}' but should be " + - "'${expectedMapping}'. Update .backportrc.json.") + throw new GradleException( + "Backport label mapping for branch 'main' is '${versionMapping}' but should be " + + "'${expectedMapping}'. Update .backportrc.json." + ) } } } @@ -211,9 +213,9 @@ allprojects { project.ext { // for ide hacks... isEclipse = providers.systemProperty("eclipse.launcher").isPresent() || // Detects gradle launched from Eclipse's IDE - providers.systemProperty("eclipse.application").isPresent() || // Detects gradle launched from the Eclipse compiler server - gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff - gradle.startParameter.taskNames.contains('cleanEclipse') + providers.systemProperty("eclipse.application").isPresent() || // Detects gradle launched from the Eclipse compiler server + gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff + gradle.startParameter.taskNames.contains('cleanEclipse') } ext.bwc_tests_enabled = bwc_tests_enabled @@ -229,10 +231,10 @@ allprojects { eclipse.classpath.file.whenMerged { classpath -> if (false == forbiddenApisTest.bundledSignatures.contains('jdk-non-portable')) { classpath.entries - .findAll { it.kind == "con" && it.toString().contains("org.eclipse.jdt.launching.JRE_CONTAINER") } - .each { - it.accessRules.add(new AccessRule("accessible", "com/sun/net/httpserver/*")) - } + .findAll { it.kind == "con" && it.toString().contains("org.eclipse.jdt.launching.JRE_CONTAINER") } + .each { + it.accessRules.add(new AccessRule("accessible", "com/sun/net/httpserver/*")) + } } } } @@ -248,6 +250,8 @@ allprojects { plugins.withId('lifecycle-base') { if (project.path.startsWith(":x-pack:")) { if (project.path.contains("security") || project.path.contains(":ml")) { + tasks.register('checkPart4') { dependsOn 'check' } + } else if (project.path == ":x-pack:plugin" || project.path.contains("ql") || project.path.contains("smoke-test")) { tasks.register('checkPart3') { dependsOn 'check' } } else { tasks.register('checkPart2') { dependsOn 'check' } @@ -256,7 +260,7 @@ allprojects { tasks.register('checkPart1') { dependsOn 'check' } } - tasks.register('functionalTests') { dependsOn 'check'} + tasks.register('functionalTests') { dependsOn 'check' } } /* @@ -281,7 +285,7 @@ allprojects { // :test:framework:test cannot run before and after :server:test return } - tasks.matching { it.name.equals('integTest')}.configureEach {integTestTask -> + tasks.matching { it.name.equals('integTest') }.configureEach { integTestTask -> integTestTask.mustRunAfter tasks.matching { it.name.equals("test") } } @@ -290,7 +294,7 @@ allprojects { Project upstreamProject = dep.dependencyProject if (project.path != upstreamProject?.path) { for (String taskName : ['test', 'integTest']) { - project.tasks.matching { it.name == taskName }.configureEach {task -> + project.tasks.matching { it.name == taskName }.configureEach { task -> task.shouldRunAfter(upstreamProject.tasks.matching { upStreamTask -> upStreamTask.name == taskName }) } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index fca1e5d29efaf..fdbb5d0c86d6f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -241,7 +240,7 @@ static Request search(SearchRequest searchRequest, String searchEndpoint) throws return request; } - static void addSearchRequestParams(Params params, SearchRequest searchRequest) { + private static void addSearchRequestParams(Params params, SearchRequest searchRequest) { params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true"); params.withRouting(searchRequest.routing()); params.withPreference(searchRequest.preference()); @@ -268,53 +267,28 @@ static void addSearchRequestParams(Params params, SearchRequest searchRequest) { } } - static Request searchScroll(SearchScrollRequest searchScrollRequest) throws IOException { - Request request = new Request(HttpPost.METHOD_NAME, "/_search/scroll"); - request.setEntity(createEntity(searchScrollRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { return createEntity(toXContent, xContentType, ToXContent.EMPTY_PARAMS); } - static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType, ToXContent.Params toXContentParams) + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType, ToXContent.Params toXContentParams) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, toXContentParams, false).toBytesRef(); return new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); } - @Deprecated - static String endpoint(String index, String type, String id) { + private static String endpoint(String index, String type, String id) { return new EndpointBuilder().addPathPart(index, type, id).build(); } - static String endpoint(String index, String id) { + private static String endpoint(String index, String id) { return new EndpointBuilder().addPathPart(index, "_doc", id).build(); } - @Deprecated - static String endpoint(String index, String type, String id, String endpoint) { - return new EndpointBuilder().addPathPart(index, type, id).addPathPartAsIs(endpoint).build(); - } - - static String endpoint(String[] indices, String endpoint) { + private static String endpoint(String[] indices, String endpoint) { return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).build(); } - @Deprecated - static String endpoint(String[] indices, String[] types, String endpoint) { - return new EndpointBuilder().addCommaSeparatedPathParts(indices) - .addCommaSeparatedPathParts(types) - .addPathPartAsIs(endpoint) - .build(); - } - - @Deprecated - static String endpoint(String[] indices, String endpoint, String type) { - return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).addPathPart(type).build(); - } - /** * Returns a {@link ContentType} from a given {@link XContentType}. * @@ -322,7 +296,7 @@ static String endpoint(String[] indices, String endpoint, String type) { * @return the {@link ContentType} */ @SuppressForbidden(reason = "Only allowed place to convert a XContentType to a ContentType") - public static ContentType createContentType(final XContentType xContentType) { + private static ContentType createContentType(final XContentType xContentType) { return ContentType.create(xContentType.mediaTypeWithoutParameters(), (Charset) null); } @@ -330,7 +304,7 @@ public static ContentType createContentType(final XContentType xContentType) { * Utility class to help with common parameter names and patterns. Wraps * a {@link Request} and adds the parameters to it directly. */ - static class Params { + private static class Params { private final Map parameters = new HashMap<>(); Params() {} @@ -478,7 +452,7 @@ Params withIgnoreUnavailable(boolean ignoreUnavailable) { * * @return the {@link IndexRequest}'s content type */ - static XContentType enforceSameContentType(IndexRequest indexRequest, @Nullable XContentType xContentType) { + private static XContentType enforceSameContentType(IndexRequest indexRequest, @Nullable XContentType xContentType) { XContentType requestContentType = indexRequest.getContentType(); if (requestContentType.canonical() != XContentType.JSON && requestContentType.canonical() != XContentType.SMILE) { throw new IllegalArgumentException( @@ -505,7 +479,7 @@ static XContentType enforceSameContentType(IndexRequest indexRequest, @Nullable /** * Utility class to build request's endpoint given its parts as strings */ - static class EndpointBuilder { + private static class EndpointBuilder { private final StringJoiner joiner = new StringJoiner("/", "/", ""); @@ -532,7 +506,7 @@ EndpointBuilder addPathPartAsIs(String... parts) { return this; } - String build() { + private String build() { return joiner.toString(); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index b0998957910a2..5d779ea17f534 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.aggregations.bucket.adjacency.AdjacencyMatrixAggregationBuilder; import org.elasticsearch.aggregations.bucket.adjacency.ParsedAdjacencyMatrix; @@ -159,7 +158,6 @@ import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -180,29 +178,6 @@ * High level REST client that wraps an instance of the low level {@link RestClient} and allows to build requests and read responses. The * {@link RestClient} instance is internally built based on the provided {@link RestClientBuilder} and it gets closed automatically when * closing the {@link RestHighLevelClient} instance that wraps it. - *

- * - * In case an already existing instance of a low-level REST client needs to be provided, this class can be subclassed and the - * {@link #RestHighLevelClient(RestClient, CheckedConsumer, List)} constructor can be used. - *

- * - * This class can also be sub-classed to expose additional client methods that make use of endpoints added to Elasticsearch through plugins, - * or to add support for custom response sections, again added to Elasticsearch through plugins. - *

- * - * The majority of the methods in this class come in two flavors, a blocking and an asynchronous version (e.g. - * {@link #search(SearchRequest, RequestOptions)} and {@link #searchAsync(SearchRequest, RequestOptions, ActionListener)}, where the later - * takes an implementation of an {@link ActionListener} as an argument that needs to implement methods that handle successful responses and - * failure scenarios. Most of the blocking calls can throw an {@link IOException} or an unchecked {@link ElasticsearchException} in the - * following cases: - * - *

    - *
  • an {@link IOException} is usually thrown in case of failing to parse the REST response in the high-level REST client, the request - * times out or similar cases where there is no response coming back from the Elasticsearch server
  • - *
  • an {@link ElasticsearchException} is usually thrown in case where the server returns a 4xx or 5xx error code. The high-level client - * then tries to parse the response body error details into a generic ElasticsearchException and suppresses the original - * {@link ResponseException}
  • - *
* * @deprecated The High Level Rest Client is deprecated in favor of the * @@ -216,7 +191,7 @@ public class RestHighLevelClient implements Closeable { /** * Environment variable determining whether to send the 7.x compatibility header */ - public static final String API_VERSIONING_ENV_VARIABLE = "ELASTIC_CLIENT_APIVERSIONING"; + private static final String API_VERSIONING_ENV_VARIABLE = "ELASTIC_CLIENT_APIVERSIONING"; // To be called using performClientRequest and performClientRequestAsync to ensure version compatibility check private final RestClient client; @@ -227,14 +202,6 @@ public class RestHighLevelClient implements Closeable { /** Do not access directly but through getVersionValidationFuture() */ private volatile ListenableFuture> versionValidationFuture; - /** - * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the - * {@link RestClient} to be used to perform requests. - */ - public RestHighLevelClient(RestClientBuilder restClientBuilder) { - this(restClientBuilder.build(), RestClient::close, Collections.emptyList()); - } - /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClient} that it should use to perform requests and * a list of entries that allow to parse custom response sections added to Elasticsearch through plugins. @@ -331,23 +298,6 @@ public final IndexResponse index(IndexRequest indexRequest, RequestOptions optio return performRequestAndParseEntity(indexRequest, RequestConverters::index, options, IndexResponse::fromXContent, emptySet()); } - /** - * Executes a search request using the Search API. - * See Search API on elastic.co - * @param searchRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - */ - public final SearchResponse search(SearchRequest searchRequest, RequestOptions options) throws IOException { - return performRequestAndParseEntity( - searchRequest, - r -> RequestConverters.search(r, "_search"), - options, - SearchResponse::fromXContent, - emptySet() - ); - } - /** * Asynchronously executes a search using the Search API. * See Search API on elastic.co @@ -368,27 +318,7 @@ public final Cancellable searchAsync(SearchRequest searchRequest, RequestOptions } /** - * Executes a search using the Search Scroll API. - * See Search - * Scroll API on elastic.co - * @param searchScrollRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - */ - public final SearchResponse scroll(SearchScrollRequest searchScrollRequest, RequestOptions options) throws IOException { - return performRequestAndParseEntity( - searchScrollRequest, - RequestConverters::searchScroll, - options, - SearchResponse::fromXContent, - emptySet() - ); - } - - /** - * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation - * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. */ @Deprecated private Resp performRequestAndParseEntity( @@ -402,8 +332,7 @@ private Resp performRequestAndParseEntity( } /** - * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation - * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. */ @Deprecated private Resp performRequest( @@ -458,8 +387,7 @@ private Resp internalPerformRequest( } /** - * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation - * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. * @return Cancellable instance that may be used to cancel the request */ @Deprecated @@ -482,8 +410,7 @@ private Cancellable performRequestAsyncAndPars } /** - * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation - * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. * @return Cancellable instance that may be used to cancel the request */ @Deprecated diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java deleted file mode 100644 index b7635f7054299..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client; - -import java.util.Optional; - -/** - * Defines a validation layer for Requests. - */ -public interface Validatable { - - Validatable EMPTY = new Validatable() { - }; - - /** - * Perform validation. This method does not have to be overridden in the event that no validation needs to be done, - * or the validation was done during object construction time. A {@link ValidationException} that is not null is - * assumed to contain validation errors and will be thrown. - * - * @return An {@link Optional} {@link ValidationException} that contains a list of validation errors. - */ - default Optional validate() { - return Optional.empty(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java deleted file mode 100644 index d5701c5723096..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client; - -import org.elasticsearch.core.Nullable; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -/** - * Encapsulates an accumulation of validation errors - */ -public class ValidationException extends IllegalArgumentException { - - /** - * Creates {@link ValidationException} instance initialized with given error messages. - * @param error the errors to add - * @return {@link ValidationException} instance - */ - public static ValidationException withError(String... error) { - return withErrors(Arrays.asList(error)); - } - - /** - * Creates {@link ValidationException} instance initialized with given error messages. - * @param errors the list of errors to add - * @return {@link ValidationException} instance - */ - public static ValidationException withErrors(List errors) { - ValidationException e = new ValidationException(); - for (String error : errors) { - e.addValidationError(error); - } - return e; - } - - private final List validationErrors = new ArrayList<>(); - - /** - * Add a new validation error to the accumulating validation errors - * @param error the error to add - */ - public void addValidationError(final String error) { - validationErrors.add(error); - } - - /** - * Adds validation errors from an existing {@link ValidationException} to - * the accumulating validation errors - * @param exception the {@link ValidationException} to add errors from - */ - public final void addValidationErrors(final @Nullable ValidationException exception) { - if (exception != null) { - for (String error : exception.validationErrors()) { - addValidationError(error); - } - } - } - - /** - * Returns the validation errors accumulated - */ - public final List validationErrors() { - return validationErrors; - } - - @Override - public final String getMessage() { - StringBuilder sb = new StringBuilder(); - sb.append("Validation Failed: "); - int index = 0; - for (String error : validationErrors) { - sb.append(++index).append(": ").append(error).append(";"); - } - return sb.toString(); - } -} diff --git a/distribution/build.gradle b/distribution/build.gradle index 90af1472deb2e..e45f1d09625d6 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -14,6 +14,7 @@ import org.elasticsearch.gradle.internal.ConcatFilesTask import org.elasticsearch.gradle.internal.DependenciesInfoPlugin import org.elasticsearch.gradle.internal.NoticeTask import org.elasticsearch.gradle.internal.info.BuildParams +import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin import java.nio.file.Files import java.nio.file.Path @@ -30,6 +31,15 @@ configurations { attribute(Category.CATEGORY_ATTRIBUTE, project.getObjects().named(Category.class, Category.DOCUMENTATION)) } } + featuresMetadata { + attributes { + attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) + } + } +} + +dependencies { + featuresMetadata project(':server') } def thisProj = project @@ -196,6 +206,7 @@ project.rootProject.subprojects.findAll { it.parent.path == ':modules' }.each { } distro.copyModule(processDefaultOutputsTaskProvider, module) + dependencies.add('featuresMetadata', module) if (module.name.startsWith('transport-') || (BuildParams.snapshotBuild == false && module.name == 'apm')) { distro.copyModule(processIntegTestOutputsTaskProvider, module) } @@ -214,6 +225,7 @@ xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule -> } } distro.copyModule(processDefaultOutputsTaskProvider, xpackModule) + dependencies.add('featuresMetadata', xpackModule) if (xpackModule.name.equals('core') || xpackModule.name.equals('security')) { distro.copyModule(processIntegTestOutputsTaskProvider, xpackModule) } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java index b6cd680cb5816..9dcd630f52631 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java @@ -145,7 +145,7 @@ static List apmJvmOptions(Settings settings, @Nullable SecureSettings se // Configures a log file to write to. Don't disable writing to a log file, // as the agent will then require extra Security Manager permissions when // it tries to do something else, and it's just painful. - propertiesMap.put("log_file", logsDir.resolve("apm-agent.log").toString()); + propertiesMap.put("log_file", logsDir.resolve("apm-agent.json").toString()); // No point doing anything if we don't have a destination for the trace data, and it can't be configured dynamically if (propertiesMap.containsKey("server_url") == false && propertiesMap.containsKey("server_urls") == false) { diff --git a/docs/changelog/101609.yaml b/docs/changelog/101609.yaml new file mode 100644 index 0000000000000..27993574743d2 --- /dev/null +++ b/docs/changelog/101609.yaml @@ -0,0 +1,9 @@ +pr: 101609 +summary: > + Add a node feature join barrier. This prevents nodes from joining clusters that do not have + all the features already present in the cluster. This ensures that once a features is supported + by all the nodes in a cluster, that feature will never then not be supported in the future. + This is the corresponding functionality for the version join barrier, but for features +area: "Cluster Coordination" +type: feature +issues: [] diff --git a/docs/changelog/101826.yaml b/docs/changelog/101826.yaml new file mode 100644 index 0000000000000..87f3f8df1b0c2 --- /dev/null +++ b/docs/changelog/101826.yaml @@ -0,0 +1,6 @@ +pr: 101826 +summary: Support keyed histograms +area: Aggregations +type: enhancement +issues: + - 100242 diff --git a/docs/changelog/101859.yaml b/docs/changelog/101859.yaml new file mode 100644 index 0000000000000..54f3fb12810ca --- /dev/null +++ b/docs/changelog/101859.yaml @@ -0,0 +1,6 @@ +pr: 101859 +summary: Cover head/tail commands edge cases and data types coverage +area: EQL +type: bug +issues: + - 101724 diff --git a/docs/changelog/102056.yaml b/docs/changelog/102056.yaml new file mode 100644 index 0000000000000..455f66ba90b03 --- /dev/null +++ b/docs/changelog/102056.yaml @@ -0,0 +1,5 @@ +pr: 102056 +summary: Use `BulkRequest` to store Application Privileges +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/102057.yaml b/docs/changelog/102057.yaml new file mode 100644 index 0000000000000..d5b664ba14c29 --- /dev/null +++ b/docs/changelog/102057.yaml @@ -0,0 +1,6 @@ +pr: 102057 +summary: Simplify `BlobStoreRepository` idle check +area: Snapshot/Restore +type: bug +issues: + - 101948 diff --git a/docs/changelog/102065.yaml b/docs/changelog/102065.yaml new file mode 100644 index 0000000000000..1a9a219df4502 --- /dev/null +++ b/docs/changelog/102065.yaml @@ -0,0 +1,5 @@ +pr: 102065 +summary: Add more desired balance stats +area: Allocation +type: enhancement +issues: [] diff --git a/docs/changelog/102075.yaml b/docs/changelog/102075.yaml new file mode 100644 index 0000000000000..54daae04169db --- /dev/null +++ b/docs/changelog/102075.yaml @@ -0,0 +1,5 @@ +pr: 102075 +summary: Accept a single or multiple inputs to `_inference` +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/102089.yaml b/docs/changelog/102089.yaml new file mode 100644 index 0000000000000..9f33c0648d09f --- /dev/null +++ b/docs/changelog/102089.yaml @@ -0,0 +1,5 @@ +pr: 102089 +summary: Add prefix strings option to trained models +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/102114.yaml b/docs/changelog/102114.yaml new file mode 100644 index 0000000000000..a08389da0351b --- /dev/null +++ b/docs/changelog/102114.yaml @@ -0,0 +1,6 @@ +pr: 102114 +summary: Fix double-completion in `SecurityUsageTransportAction` +area: Security +type: bug +issues: + - 102111 diff --git a/docs/changelog/102140.yaml b/docs/changelog/102140.yaml new file mode 100644 index 0000000000000..0f086649b9710 --- /dev/null +++ b/docs/changelog/102140.yaml @@ -0,0 +1,6 @@ +pr: 102140 +summary: Collect data tiers usage stats more efficiently +area: ILM+SLM +type: bug +issues: + - 100230 \ No newline at end of file diff --git a/docs/changelog/102151.yaml b/docs/changelog/102151.yaml new file mode 100644 index 0000000000000..652ae555af97d --- /dev/null +++ b/docs/changelog/102151.yaml @@ -0,0 +1,5 @@ +pr: 102151 +summary: Default `run_ml_inference` should be true +area: Application +type: bug +issues: [] diff --git a/docs/changelog/102172.yaml b/docs/changelog/102172.yaml new file mode 100644 index 0000000000000..485c2c4327e11 --- /dev/null +++ b/docs/changelog/102172.yaml @@ -0,0 +1,5 @@ +pr: 102172 +summary: Adjust Histogram's bucket accounting to be iteratively +area: Aggregations +type: bug +issues: [] diff --git a/docs/changelog/102188.yaml b/docs/changelog/102188.yaml new file mode 100644 index 0000000000000..595a8395fab5c --- /dev/null +++ b/docs/changelog/102188.yaml @@ -0,0 +1,5 @@ +pr: 102188 +summary: Track blocks in `AsyncOperator` +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/102190.yaml b/docs/changelog/102190.yaml new file mode 100644 index 0000000000000..cd04e041fca5e --- /dev/null +++ b/docs/changelog/102190.yaml @@ -0,0 +1,5 @@ +pr: 102190 +summary: Track pages in ESQL enrich request/response +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/102208.yaml b/docs/changelog/102208.yaml new file mode 100644 index 0000000000000..b566a85753d82 --- /dev/null +++ b/docs/changelog/102208.yaml @@ -0,0 +1,5 @@ +pr: 102208 +summary: Add static node settings to set default values for max merged segment sizes +area: Engine +type: enhancement +issues: [] diff --git a/docs/changelog/102250.yaml b/docs/changelog/102250.yaml new file mode 100644 index 0000000000000..755341d9a3a64 --- /dev/null +++ b/docs/changelog/102250.yaml @@ -0,0 +1,6 @@ +pr: 102250 +summary: "[ILM] Fix downsample to skip already downsampled indices" +area: ILM+SLM +type: bug +issues: + - 102249 diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 82831ef943398..e54825406257f 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -7,50 +7,14 @@ This guide shows how you can use {esql} to query and aggregate your data. -TIP: To get started with {esql} without setting up your own deployment, visit -the public {esql} demo environment at -https://esql.demo.elastic.co/[esql.demo.elastic.co]. It comes with preloaded -data sets and sample queries. - [discrete] [[esql-getting-started-prerequisites]] === Prerequisites -To follow along with the queries in this getting started guide, first ingest -some sample data using the following requests: - -[source,console] ----- -PUT sample_data -{ - "mappings": { - "properties": { - "client.ip": { - "type": "ip" - }, - "message": { - "type": "keyword" - } - } - } -} - -PUT sample_data/_bulk -{"index": {}} -{"@timestamp": "2023-10-23T12:15:03.360Z", "client.ip": "172.21.2.162", "message": "Connected to 10.1.0.3", "event.duration": 3450233} -{"index": {}} -{"@timestamp": "2023-10-23T12:27:28.948Z", "client.ip": "172.21.2.113", "message": "Connected to 10.1.0.2", "event.duration": 2764889} -{"index": {}} -{"@timestamp": "2023-10-23T13:33:34.937Z", "client.ip": "172.21.0.5", "message": "Disconnected", "event.duration": 1232382} -{"index": {}} -{"@timestamp": "2023-10-23T13:51:54.732Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 725448} -{"index": {}} -{"@timestamp": "2023-10-23T13:52:55.015Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 8268153} -{"index": {}} -{"@timestamp": "2023-10-23T13:53:55.832Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 5033755} -{"index": {}} -{"@timestamp": "2023-10-23T13:55:01.543Z", "client.ip": "172.21.3.15", "message": "Connected to 10.1.0.1", "event.duration": 1756467} ----- +To follow along with the queries in this guide, you can either set up your own +deployment, or use Elastic's public {esql} demo environment. + +include::{es-repo-dir}/tab-widgets/esql/esql-getting-started-widget-sample-data.asciidoc[] [discrete] [[esql-getting-started-running-queries]] @@ -58,7 +22,7 @@ PUT sample_data/_bulk In {kib}, you can use Console or Discover to run {esql} queries: -include::{es-repo-dir}/tab-widgets/esql/esql-getting-started-widget.asciidoc[] +include::{es-repo-dir}/tab-widgets/esql/esql-getting-started-widget-discover-console.asciidoc[] [discrete] [[esql-getting-started-first-query]] @@ -300,57 +264,9 @@ image::images/esql/esql-enrich.png[align="center"] Before you can use `ENRICH`, you first need to <> and <> -an <>. The following requests create and -execute a policy that links an IP address to an environment ("Development", -"QA", or "Production"): - -[source,console] ----- -PUT clientips -{ - "mappings": { - "properties": { - "client.ip": { - "type": "keyword" - }, - "env": { - "type": "keyword" - } - } - } -} - -PUT clientips/_bulk -{ "index" : {}} -{ "client.ip": "172.21.0.5", "env": "Development" } -{ "index" : {}} -{ "client.ip": "172.21.2.113", "env": "QA" } -{ "index" : {}} -{ "client.ip": "172.21.2.162", "env": "QA" } -{ "index" : {}} -{ "client.ip": "172.21.3.15", "env": "Production" } -{ "index" : {}} -{ "client.ip": "172.21.3.16", "env": "Production" } - -PUT /_enrich/policy/clientip_policy -{ - "match": { - "indices": "clientips", - "match_field": "client.ip", - "enrich_fields": ["env"] - } -} - -PUT /_enrich/policy/clientip_policy/_execute ----- - -//// -[source,console] ----- -DELETE /_enrich/policy/clientip_policy ----- -// TEST[continued] -//// +an <>. + +include::{es-repo-dir}/tab-widgets/esql/esql-getting-started-widget-enrich-policy.asciidoc[] After creating and executing a policy, you can use it with the `ENRICH` command: diff --git a/docs/reference/esql/esql-limitations.asciidoc b/docs/reference/esql/esql-limitations.asciidoc index 3abe6a6df7e01..c7829ab9fba81 100644 --- a/docs/reference/esql/esql-limitations.asciidoc +++ b/docs/reference/esql/esql-limitations.asciidoc @@ -57,6 +57,7 @@ include::processing-commands/limit.asciidoc[tag=limitation] ** `completion` ** `dense_vector` ** `double_range` +** `flattened` ** `float_range` ** `histogram` ** `integer_range` @@ -157,6 +158,12 @@ return `null` when applied to a multivalued field, unless documented otherwise. Work around this limitation by converting the field to single value with one of the <>. +[discrete] +[[esql-limitations-timezone]] +=== Timezone support + +{esql} only supports the UTC timezone. + [discrete] [[esql-limitations-kibana]] === Kibana limitations diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index 437871d31a88f..afa9ab7254cfa 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -68,11 +68,6 @@ responses. See <>. `query`:: (Required, object) {esql} query to run. For syntax, refer to <>. -[[esql-search-api-time-zone]] -`time_zone`:: -(Optional, string) ISO-8601 time zone ID for the search. Several {esql} -date/time functions use this time zone. Defaults to `Z` (UTC). - [discrete] [role="child_attributes"] [[esql-query-api-response-body]] diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index f26a73d093091..f8515a8b33c39 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -25,9 +25,9 @@ Performs an inference task on an input text by using an {infer} model. [[post-inference-api-desc]] ==== {api-description-title} -The perform {infer} API enables you to use {infer} models to perform specific -tasks on data that you provide as an input. The API returns a response with the -resutls of the tasks. The {infer} model you use can perform one specific task +The perform {infer} API enables you to use {infer} models to perform specific +tasks on data that you provide as an input. The API returns a response with the +resutls of the tasks. The {infer} model you use can perform one specific task that has been defined when the model was created with the <>. @@ -50,8 +50,9 @@ The type of {infer} task that the model performs. == {api-request-body-title} `input`:: -(Required, string) +(Required, array of strings) The text on which you want to perform the {infer} task. +`input` can be a single string or an array. [discrete] @@ -77,23 +78,26 @@ The API returns the following response: [source,console-result] ------------------------------------------------------------ { - "sparse_embedding": { - "port": 2.1259406, - "sky": 1.7073475, - "color": 1.6922266, - "dead": 1.6247464, - "television": 1.3525393, - "above": 1.2425821, - "tuned": 1.1440028, - "colors": 1.1218185, - "tv": 1.0111054, - "ports": 1.0067928, - "poem": 1.0042328, - "channel": 0.99471164, - "tune": 0.96235967, - "scene": 0.9020516, + "sparse_embedding": [ + { + "port": 2.1259406, + "sky": 1.7073475, + "color": 1.6922266, + "dead": 1.6247464, + "television": 1.3525393, + "above": 1.2425821, + "tuned": 1.1440028, + "colors": 1.1218185, + "tv": 1.0111054, + "ports": 1.0067928, + "poem": 1.0042328, + "channel": 0.99471164, + "tune": 0.96235967, + "scene": 0.9020516, + (...) + }, (...) - } + ] } ------------------------------------------------------------ -// NOTCONSOLE \ No newline at end of file +// NOTCONSOLE diff --git a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc index 5696a032b165c..45517b99c2177 100644 --- a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc @@ -443,7 +443,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] (Optional, object) include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + -Refer to <> to review the properties of the +Refer to <> to review the properties of the `tokenization` object. ===== @@ -469,7 +469,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] (Optional, object) include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + -Refer to <> to review the +Refer to <> to review the properties of the `tokenization` object. ===== @@ -488,7 +488,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] (Optional, object) include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + -Refer to <> to review the properties of the +Refer to <> to review the properties of the `tokenization` object. ===== @@ -514,7 +514,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati Recommended to set `max_sentence_length` to `386` with `128` of `span` and set `truncate` to `none`. + -Refer to <> to review the properties of the +Refer to <> to review the properties of the `tokenization` object. ===== @@ -546,7 +546,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-classific `num_top_classes`:::: (Optional, integer) -Specifies the number of top class predictions to return. Defaults to all classes +Specifies the number of top class predictions to return. Defaults to all classes (-1). `results_field`:::: @@ -557,7 +557,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] (Optional, object) include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + -Refer to <> to review the properties of the +Refer to <> to review the properties of the `tokenization` object. ===== @@ -580,7 +580,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] (Optional, object) include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + -Refer to <> to review the properties of the +Refer to <> to review the properties of the `tokenization` object. ===== @@ -599,7 +599,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarit (Optional, object) include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + -Refer to <> to review the properties of the +Refer to <> to review the properties of the `tokenization` object. ===== @@ -634,7 +634,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] (Optional, object) include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + -Refer to <> to review the properties of the +Refer to <> to review the properties of the `tokenization` object. ===== ==== @@ -701,6 +701,33 @@ the platform identifiers used by Elasticsearch, so one of, `linux-x86_64`, For portable models (those that work independent of processor architecture or OS features), leave this field unset. +//Begin prefix_strings +`prefix_strings`:: +(Optional, object) +Certain NLP models are trained in such a way that a prefix string should +be applied to the input text before the input is evaluated. The prefix +may be different depending on the intention. For asymmetric tasks such +as infromation retrieval the prefix applied to a passage as it is indexed +can be different to the prefix applied when searching those passages. + +`prefix_strings` has 2 options, a prefix string that is always applied +in the search context and one that is always applied when ingesting the +docs. Both are optional. ++ +.Properties of `prefix_strings` +[%collapsible%open] +==== +`search`::: +(Optional, string) +The prefix string to prepend to the input text for requests +originating from a search query. + +`ingest`::: +(Optional, string) +The prefix string to prepend to the input text for requests +at ingest where the {infer} ingest processor is used. // TODO is there a shortcut for Inference ingest processor? +==== +//End prefix_strings `tags`:: (Optional, string) diff --git a/docs/reference/release-notes/8.10.0.asciidoc b/docs/reference/release-notes/8.10.0.asciidoc index 9fbe7a2b1d099..34d1d26e5d69a 100644 --- a/docs/reference/release-notes/8.10.0.asciidoc +++ b/docs/reference/release-notes/8.10.0.asciidoc @@ -35,6 +35,8 @@ delete all the snapshots in the repository taken with version 8.10.0 or later using a cluster running version 8.10.4. // end::repositorydata-format-change[] +include::8.7.1.asciidoc[tag=no-preventive-gc-issue] + [[breaking-8.10.0]] [float] === Breaking changes diff --git a/docs/reference/release-notes/8.10.1.asciidoc b/docs/reference/release-notes/8.10.1.asciidoc index d049d5b33b1f7..0cb00699eeac7 100644 --- a/docs/reference/release-notes/8.10.1.asciidoc +++ b/docs/reference/release-notes/8.10.1.asciidoc @@ -9,6 +9,8 @@ Also see <>. include::8.10.0.asciidoc[tag=repositorydata-format-change] +include::8.7.1.asciidoc[tag=no-preventive-gc-issue] + [[bug-8.10.1]] [float] === Bug fixes diff --git a/docs/reference/release-notes/8.10.2.asciidoc b/docs/reference/release-notes/8.10.2.asciidoc index c428b4534fe79..911a410104a26 100644 --- a/docs/reference/release-notes/8.10.2.asciidoc +++ b/docs/reference/release-notes/8.10.2.asciidoc @@ -7,4 +7,6 @@ include::8.10.0.asciidoc[tag=repositorydata-format-change] +include::8.7.1.asciidoc[tag=no-preventive-gc-issue] + Also see <>. diff --git a/docs/reference/release-notes/8.10.3.asciidoc b/docs/reference/release-notes/8.10.3.asciidoc index b7828f52ad082..119930058a42e 100644 --- a/docs/reference/release-notes/8.10.3.asciidoc +++ b/docs/reference/release-notes/8.10.3.asciidoc @@ -7,6 +7,19 @@ include::8.10.0.asciidoc[tag=repositorydata-format-change] +// tag::no-preventive-gc-issue[] +* High Memory Pressure due to a GC change in JDK 21 ++ +This version of Elasticsearch is bundled with JDK 21. In JDK 21 +https://bugs.openjdk.org/browse/JDK-8297639[Preventive GC has been removed]. +This may lead to increased memory pressure and an increased number of CircuitBreakerExceptions when retrieving large +documents under some particular load. (issue: {es-issue}99592[#99592]) ++ +If you needed to explicitly <>, we recommend you avoid to upgrade to this version, as the settings to enable Preventive GC have been removed +from JDK 21. +// end::no-preventive-gc-issue[] + Also see <>. [[bug-8.10.3]] diff --git a/docs/reference/release-notes/8.10.4.asciidoc b/docs/reference/release-notes/8.10.4.asciidoc index f2e95af71afcb..6c49bae1e2150 100644 --- a/docs/reference/release-notes/8.10.4.asciidoc +++ b/docs/reference/release-notes/8.10.4.asciidoc @@ -25,6 +25,8 @@ first. If you cannot repair the repository in this way, first delete all the snapshots in the repository taken with version 8.10.0 or later using a cluster running version 8.10.4. +include::8.10.3.asciidoc[tag=no-preventive-gc-issue] + Also see <>. [[bug-8.10.4]] diff --git a/docs/reference/release-notes/8.11.0.asciidoc b/docs/reference/release-notes/8.11.0.asciidoc index 16ff5edd6d91a..acb27dc180727 100644 --- a/docs/reference/release-notes/8.11.0.asciidoc +++ b/docs/reference/release-notes/8.11.0.asciidoc @@ -10,6 +10,11 @@ Also see <>. Infra/Core:: * Remove `transport_versions` from cluster state API {es-pull}99223[#99223] +[[known-issues-8.11.0]] +[float] +=== Known issues +include::8.10.3.asciidoc[tag=no-preventive-gc-issue] + [[bug-8.11.0]] [float] === Bug fixes diff --git a/docs/reference/release-notes/8.7.1.asciidoc b/docs/reference/release-notes/8.7.1.asciidoc index a0513bc1a8f0e..70f5e4add88ca 100644 --- a/docs/reference/release-notes/8.7.1.asciidoc +++ b/docs/reference/release-notes/8.7.1.asciidoc @@ -18,6 +18,23 @@ This issue is fixed in 8.8.0. include::8.6.0.asciidoc[tag=reconciliation-imbalance-known-issue] +// tag::no-preventive-gc-issue[] +* High Memory Pressure due to a GC JVM setting change ++ +This version of Elasticsearch is bundled with JDK 20. In JDK 20 +https://bugs.openjdk.org/browse/JDK-8293861[Preventive GC is disabled by default]. +This may lead to increased memory pressure and an increased number of CircuitBreakerExceptions when retrieving large +documents under some load patterns. (issue: {es-issue}99592[#99592]) ++ +If this change affects your use of Elasticsearch, consider re-enabling the previous behaviour +by adding the JVM arguments `-XX:+UnlockDiagnosticVMOptions -XX:+G1UsePreventiveGC` (reference: +https://www.oracle.com/java/technologies/javase/20-relnote-issues.html#JDK-8293861[JDK 20 release notes]). It is +important to note that this workaround is temporary and works only with JDK 20, which is bundled with Elasticsearch up +to version 8.10.2 inclusive. Successive versions are bundling JDK 21+, where this setting +https://bugs.openjdk.org/browse/JDK-8297639[has been removed]. Specifying those JVM arguments will prevent the +JVM (and therefore Elasticsearch Nodes) from starting. +// end::no-preventive-gc-issue[] + [[bug-8.7.1]] [float] === Bug fixes diff --git a/docs/reference/release-notes/8.8.2.asciidoc b/docs/reference/release-notes/8.8.2.asciidoc index d7e6b9b1fcc76..8a24ae2e8d4ef 100644 --- a/docs/reference/release-notes/8.8.2.asciidoc +++ b/docs/reference/release-notes/8.8.2.asciidoc @@ -3,6 +3,11 @@ Also see <>. +[[known-issues-8.8.2]] +[float] +=== Known issues +include::8.7.1.asciidoc[tag=no-preventive-gc-issue] + [[bug-8.8.2]] [float] === Bug fixes diff --git a/docs/reference/release-notes/8.9.0.asciidoc b/docs/reference/release-notes/8.9.0.asciidoc index 2b7b143c268dc..c49eac9f0327c 100644 --- a/docs/reference/release-notes/8.9.0.asciidoc +++ b/docs/reference/release-notes/8.9.0.asciidoc @@ -12,6 +12,8 @@ task is longer than the model's max_sequence_length and truncate is set to none then inference fails with the message `question answering result has invalid dimension`. (issue: {es-issue}97917[#97917]) +include::8.7.1.asciidoc[tag=no-preventive-gc-issue] + [[breaking-8.9.0]] [float] === Breaking changes diff --git a/docs/reference/release-notes/8.9.1.asciidoc b/docs/reference/release-notes/8.9.1.asciidoc index 18c226538c4b9..680860622c1bb 100644 --- a/docs/reference/release-notes/8.9.1.asciidoc +++ b/docs/reference/release-notes/8.9.1.asciidoc @@ -3,6 +3,11 @@ Also see <>. +[[known-issues-8.9.1]] +[float] +=== Known issues +include::8.7.1.asciidoc[tag=no-preventive-gc-issue] + [[bug-8.9.1]] [float] === Bug fixes diff --git a/docs/reference/release-notes/8.9.2.asciidoc b/docs/reference/release-notes/8.9.2.asciidoc index 6b00405261daf..8464d21e1ccc4 100644 --- a/docs/reference/release-notes/8.9.2.asciidoc +++ b/docs/reference/release-notes/8.9.2.asciidoc @@ -3,6 +3,11 @@ Also see <>. +[[known-issues-8.9.2]] +[float] +=== Known issues +include::8.7.1.asciidoc[tag=no-preventive-gc-issue] + [float] [[security-updates-8.9.2]] === Security updates diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index 4bf1ceabe08d8..c39719f1a3b61 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -76,12 +76,10 @@ to search one or more `dense_vector` fields with indexing enabled. requires the following mapping options: + -- -* An `index` value of `true`. - * A `similarity` value. This value determines the similarity metric used to score documents based on similarity between the query and document vector. For a list of available metrics, see the <> -parameter documentation. +parameter documentation. The `similarity` setting defaults to `cosine`. [source,console] ---- @@ -92,13 +90,11 @@ PUT image-index "image-vector": { "type": "dense_vector", "dims": 3, - "index": true, "similarity": "l2_norm" }, "title-vector": { "type": "dense_vector", "dims": 5, - "index": true, "similarity": "l2_norm" }, "title": { @@ -158,7 +154,7 @@ NOTE: Support for approximate kNN search was added in version 8.0. Before this, `dense_vector` fields did not support enabling `index` in the mapping. If you created an index prior to 8.0 containing `dense_vector` fields, then to support approximate kNN search the data must be reindexed using a new field -mapping that sets `index: true`. +mapping that sets `index: true` which is the default option. [discrete] [[tune-approximate-knn-for-speed-accuracy]] @@ -199,9 +195,7 @@ PUT byte-image-index "byte-image-vector": { "type": "dense_vector", "element_type": "byte", - "dims": 2, - "index": true, - "similarity": "cosine" + "dims": 2 }, "title": { "type": "text" @@ -516,9 +510,7 @@ PUT passage_vectors "properties": { "vector": { "type": "dense_vector", - "dims": 2, - "index": true, - "similarity": "cosine" + "dims": 2 }, "text": { "type": "text", @@ -877,7 +869,6 @@ PUT image-index "image-vector": { "type": "dense_vector", "dims": 3, - "index": true, "similarity": "l2_norm", "index_options": { "type": "hnsw", @@ -912,8 +903,8 @@ the global top `k` matches across shards. You cannot set the To run an exact kNN search, use a `script_score` query with a vector function. . Explicitly map one or more `dense_vector` fields. If you don't intend to use -the field for approximate kNN, omit the `index` mapping option or set it to -`false`. This can significantly improve indexing speed. +the field for approximate kNN, set the `index` mapping option to `false`. This +can significantly improve indexing speed. + [source,console] ---- diff --git a/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc b/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc index 252ef827649fa..2b2090405af60 100644 --- a/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc @@ -178,6 +178,20 @@ complete. In this case it still instructs some of the other nodes in the cluster to attempt to read the blob, but all of these reads must fail to find the blob. +Linearizable registers are special blobs that {es} manipulates using an atomic +compare-and-exchange operation. This operation ensures correct and +strongly-consistent behavior even when the blob is accessed by multiple nodes +at the same time. The detailed implementation of the compare-and-exchange +operation on linearizable registers varies by repository type. Repository +analysis verifies that that uncontended compare-and-exchange operations on a +linearizable register blob always succeed. Repository analysis also verifies +that contended operations either succeed or report the contention but do not +return incorrect results. If an operation fails due to contention, {es} retries +the operation until it succeeds. Most of the compare-and-exchange operations +performed by repository analysis atomically increment a counter which is +represented as an 8-byte blob. Some operations also verify the behavior on +small blobs with sizes other than 8 bytes. + [[repo-analysis-api-path-params]] ==== {api-path-parms-title} diff --git a/docs/reference/snapshot-restore/repository-azure.asciidoc b/docs/reference/snapshot-restore/repository-azure.asciidoc index e848ec9620cb4..35cf454906050 100644 --- a/docs/reference/snapshot-restore/repository-azure.asciidoc +++ b/docs/reference/snapshot-restore/repository-azure.asciidoc @@ -257,3 +257,15 @@ following naming rules: permitted in container names. * All letters in a container name must be lowercase. * Container names must be from 3 through 63 characters long. + +[[repository-azure-linearizable-registers]] +==== Linearizable register implementation + +The linearizable register implementation for Azure repositories is based on +Azure's support for strongly consistent leases. Each lease may only be held by +a single node at any time. The node presents its lease when performing a read +or write operation on a protected blob. Lease-protected operations fail if the +lease is invalid or expired. To perform a compare-and-exchange operation on a +register, {es} first obtains a lease on the blob, then reads the blob contents +under the lease, and finally uploads the updated blob under the same lease. +This process ensures that the read and write operations happen atomically. diff --git a/docs/reference/snapshot-restore/repository-gcs.asciidoc b/docs/reference/snapshot-restore/repository-gcs.asciidoc index d99b9bc81567f..b359952715a73 100644 --- a/docs/reference/snapshot-restore/repository-gcs.asciidoc +++ b/docs/reference/snapshot-restore/repository-gcs.asciidoc @@ -275,3 +275,13 @@ The service account used to access the bucket must have the "Writer" access to t 3. Go to the https://console.cloud.google.com/storage/browser[Storage Browser]. 4. Select the bucket and "Edit bucket permission". 5. The service account must be configured as a "User" with "Writer" access. + +[[repository-gcs-linearizable-registers]] +==== Linearizable register implementation + +The linearizable register implementation for GCS repositories is based on GCS's +support for strongly consistent preconditions on put-blob operations. To +perform a compare-and-exchange operation on a register, {es} retrieves the +register blob and its current generation, and then uploads the updated blob +using the observed generation as its precondition. The precondition ensures +that the generation has not changed in the meantime. diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 70993f5b515b3..3f2210f51cbb5 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -12,7 +12,7 @@ https://www.elastic.co/cloud/.* To register an S3 repository, specify the type as `s3` when creating the repository. The repository defaults to using https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html[ECS -IAM Role] credentials for authentication. You can also use <> Kubernetes service accounts. +IAM Role] credentials for authentication. You can also use <> for authentication. The only mandatory setting is the bucket name: @@ -198,75 +198,6 @@ pattern then you should set this setting to `true` when upgrading. https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/ClientConfiguration.html#setSignerOverride-java.lang.String-[AWS Java SDK documentation] for details. Defaults to empty string which means that no signing algorithm override will be used. -[discrete] -[[repository-s3-compatible-services]] -===== S3-compatible services - -There are a number of storage systems that provide an S3-compatible API, and -the `repository-s3` type allows you to use these systems in place of AWS S3. -To do so, you should set the `s3.client.CLIENT_NAME.endpoint` setting to the -system's endpoint. This setting accepts IP addresses and hostnames and may -include a port. For example, the endpoint may be `172.17.0.2` or -`172.17.0.2:9000`. - -By default {es} communicates with your storage system using HTTPS, and -validates the repository's certificate chain using the JVM-wide truststore. -Ensure that the JVM-wide truststore includes an entry for your repository. If -you wish to use unsecured HTTP communication instead of HTTPS, set -`s3.client.CLIENT_NAME.protocol` to `http`. - -https://minio.io[MinIO] is an example of a storage system that provides an -S3-compatible API. The `repository-s3` type allows {es} to work with -MinIO-backed repositories as well as repositories stored on AWS S3. Other -S3-compatible storage systems may also work with {es}, but these are not -covered by the {es} test suite. - -Note that some storage systems claim to be S3-compatible but do not faithfully -emulate S3's behaviour in full. The `repository-s3` type requires full -compatibility with S3. In particular it must support the same set of API -endpoints, return the same errors in case of failures, and offer consistency and -performance at least as good as S3 even when accessed concurrently by multiple -nodes. You will need to work with the supplier of your storage system to address -any incompatibilities you encounter. Please do not report {es} issues involving -storage systems which claim to be S3-compatible unless you can demonstrate that -the same issue exists when using a genuine AWS S3 repository. - -You can perform some basic checks of the suitability of your storage system -using the {ref}/repo-analysis-api.html[repository analysis API]. If this API -does not complete successfully, or indicates poor performance, then your -storage system is not fully compatible with AWS S3 and therefore unsuitable for -use as a snapshot repository. However, these checks do not guarantee full -compatibility. - -Most storage systems can be configured to log the details of their interaction -with {es}. If you are investigating a suspected incompatibility with AWS S3, it -is usually simplest to collect these logs and provide them to the supplier of -your storage system for further analysis. If the incompatibility is not clear -from the logs emitted by the storage system, configure {es} to log every -request it makes to the S3 API by <> of the `com.amazonaws.request` logger to `DEBUG`: - -[source,console] ----- -PUT /_cluster/settings -{ - "persistent": { - "logger.com.amazonaws.request": "DEBUG" - } -} ----- -// TEST[skip:we don't really want to change this logger] - -Collect the Elasticsearch logs covering the time period of the failed analysis -from all nodes in your cluster and share them with the supplier of your storage -system along with the analysis response so they can use them to determine the -problem. See the -https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-logging.html[AWS Java SDK] -documentation for further information, including details about other loggers -that can be used to obtain even more verbose logs. When you have finished -collecting the logs needed by your supplier, set the logger settings back to -`null` to return to the default logging configuration. See <> -and <> for more information. [[repository-s3-repository]] ==== Repository settings @@ -401,7 +332,7 @@ This sets up a repository that uses all client settings from the client `my.s3.endpoint` by the repository settings. [[repository-s3-permissions]] -===== Recommended S3 permissions +==== Recommended S3 permissions In order to restrict the Elasticsearch snapshot process to the minimum required resources, we recommend using Amazon IAM in conjunction with pre-existing S3 @@ -493,7 +424,28 @@ bucket, in this example, named "foo". The bucket needs to exist to register a repository for snapshots. If you did not create the bucket then the repository registration will fail. -===== Cleaning up multi-part uploads +[[iam-kubernetes-service-accounts]] +[discrete] +===== Using IAM roles for Kubernetes service accounts for authentication + +If you want to use https://aws.amazon.com/blogs/opensource/introducing-fine-grained-iam-roles-service-accounts/[Kubernetes service accounts] +for authentication, you need to add a symlink to the `$AWS_WEB_IDENTITY_TOKEN_FILE` environment variable +(which should be automatically set by a Kubernetes pod) in the S3 repository config directory, so the repository +can have the read access for the service account (a repository can't read any files outside its config directory). +For example: + +[source,bash] +---- +mkdir -p "${ES_PATH_CONF}/repository-s3" +ln -s $AWS_WEB_IDENTITY_TOKEN_FILE "${ES_PATH_CONF}/repository-s3/aws-web-identity-token-file" +---- + +IMPORTANT: The symlink must be created on all data and master eligible nodes and be readable +by the `elasticsearch` user. By default, {es} runs as user `elasticsearch` using uid:gid `1000:0`. + +If the symlink exists, it will be used by default by all S3 repositories that don't have explicit `client` credentials. + +==== Cleaning up multi-part uploads {es} uses S3's multi-part upload process to upload larger blobs to the repository. The multi-part upload process works by dividing each blob into @@ -521,7 +473,6 @@ a bucket lifecycle policy] to automatically abort incomplete uploads once they reach a certain age. [[repository-s3-aws-vpc]] -[discrete] ==== AWS VPC bandwidth settings AWS instances resolve S3 endpoints to a public IP. If the Elasticsearch @@ -537,23 +488,81 @@ bandwidth of your VPC's NAT instance. Instances residing in a public subnet in an AWS VPC will connect to S3 via the VPC's internet gateway and not be bandwidth limited by the VPC's NAT instance. +[[repository-s3-compatible-services]] +==== S3-compatible services -[[iam-kubernetes-service-accounts]] -[discrete] -==== Using IAM roles for Kubernetes service accounts for authentication -If you want to use https://aws.amazon.com/blogs/opensource/introducing-fine-grained-iam-roles-service-accounts/[Kubernetes service accounts] -for authentication, you need to add a symlink to the `$AWS_WEB_IDENTITY_TOKEN_FILE` environment variable -(which should be automatically set by a Kubernetes pod) in the S3 repository config directory, so the repository -can have the read access for the service account (a repository can't read any files outside its config directory). -For example: +There are a number of storage systems that provide an S3-compatible API, and +the `repository-s3` type allows you to use these systems in place of AWS S3. +To do so, you should set the `s3.client.CLIENT_NAME.endpoint` setting to the +system's endpoint. This setting accepts IP addresses and hostnames and may +include a port. For example, the endpoint may be `172.17.0.2` or +`172.17.0.2:9000`. -[source,bash] +By default {es} communicates with your storage system using HTTPS, and +validates the repository's certificate chain using the JVM-wide truststore. +Ensure that the JVM-wide truststore includes an entry for your repository. If +you wish to use unsecured HTTP communication instead of HTTPS, set +`s3.client.CLIENT_NAME.protocol` to `http`. + +https://minio.io[MinIO] is an example of a storage system that provides an +S3-compatible API. The `repository-s3` type allows {es} to work with +MinIO-backed repositories as well as repositories stored on AWS S3. Other +S3-compatible storage systems may also work with {es}, but these are not +covered by the {es} test suite. + +Note that some storage systems claim to be S3-compatible but do not faithfully +emulate S3's behaviour in full. The `repository-s3` type requires full +compatibility with S3. In particular it must support the same set of API +endpoints, return the same errors in case of failures, and offer consistency and +performance at least as good as S3 even when accessed concurrently by multiple +nodes. You will need to work with the supplier of your storage system to address +any incompatibilities you encounter. Please do not report {es} issues involving +storage systems which claim to be S3-compatible unless you can demonstrate that +the same issue exists when using a genuine AWS S3 repository. + +You can perform some basic checks of the suitability of your storage system +using the {ref}/repo-analysis-api.html[repository analysis API]. If this API +does not complete successfully, or indicates poor performance, then your +storage system is not fully compatible with AWS S3 and therefore unsuitable for +use as a snapshot repository. However, these checks do not guarantee full +compatibility. + +Most storage systems can be configured to log the details of their interaction +with {es}. If you are investigating a suspected incompatibility with AWS S3, it +is usually simplest to collect these logs and provide them to the supplier of +your storage system for further analysis. If the incompatibility is not clear +from the logs emitted by the storage system, configure {es} to log every +request it makes to the S3 API by <> of the `com.amazonaws.request` logger to `DEBUG`: + +[source,console] ---- -mkdir -p "${ES_PATH_CONF}/repository-s3" -ln -s $AWS_WEB_IDENTITY_TOKEN_FILE "${ES_PATH_CONF}/repository-s3/aws-web-identity-token-file" +PUT /_cluster/settings +{ + "persistent": { + "logger.com.amazonaws.request": "DEBUG" + } +} ---- +// TEST[skip:we don't really want to change this logger] -IMPORTANT: The symlink must be created on all data and master eligible nodes and be readable -by the `elasticsearch` user. By default, {es} runs as user `elasticsearch` using uid:gid `1000:0`. +Collect the Elasticsearch logs covering the time period of the failed analysis +from all nodes in your cluster and share them with the supplier of your storage +system along with the analysis response so they can use them to determine the +problem. See the +https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-logging.html[AWS Java SDK] +documentation for further information, including details about other loggers +that can be used to obtain even more verbose logs. When you have finished +collecting the logs needed by your supplier, set the logger settings back to +`null` to return to the default logging configuration. See <> +and <> for more information. -If the symlink exists, it will be used by default by all S3 repositories that don't have explicit `client` credentials. +[[repository-s3-linearizable-registers]] +==== Linearizable register implementation + +The linearizable register implementation for S3 repositories is based on the +strongly consistent semantics of the multipart upload API. {es} first creates a +multipart upload to indicate its intention to perform a linearizable register +operation. {es} then lists and cancels all other multipart uploads for the same +register. {es} then attempts to complete the upload. If the upload completes +successfully then the compare-and-exchange operation was atomic. diff --git a/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc b/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc index 0bd64d43f1381..6be49d9d4422f 100644 --- a/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc +++ b/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc @@ -84,3 +84,12 @@ each node, but for these accounts to have different numeric user or group IDs. If your shared file system uses NFS then ensure that every node is running with the same numeric UID and GID, or else update your NFS configuration to account for the variance in numeric IDs across nodes. + +[[repository-fs-linearizable-registers]] +==== Linearizable register implementation + +The linearizable register implementation for shared filesystem repositories is +based around file locking. To perform a compare-and-exchange operation on a +register, {es} first locks he underlying file and then writes the updated +contents under the same lock. This ensures that the file has not changed in the +meantime. diff --git a/docs/reference/tab-widgets/esql/esql-getting-started.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-discover-console.asciidoc similarity index 89% rename from docs/reference/tab-widgets/esql/esql-getting-started.asciidoc rename to docs/reference/tab-widgets/esql/esql-getting-started-discover-console.asciidoc index 0ebcb7c92e59f..b8998ef199c99 100644 --- a/docs/reference/tab-widgets/esql/esql-getting-started.asciidoc +++ b/docs/reference/tab-widgets/esql/esql-getting-started-discover-console.asciidoc @@ -34,6 +34,9 @@ FROM sample_data include::../../esql/esql-kibana.asciidoc[tag=esql-mode] +Adjust the time filter so it includes the timestamps in the sample data (October +23rd, 2023). + After switching to {esql} mode, the query bar shows a sample query. You can replace this query with the queries in this getting started guide. diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc new file mode 100644 index 0000000000000..39560c7500b42 --- /dev/null +++ b/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc @@ -0,0 +1,65 @@ +// tag::own-deployment[] + +The following requests create and execute a policy called `clientip_policy`. The +policy links an IP address to an environment ("Development", "QA", or +"Production"): + +[source,console] +---- +PUT clientips +{ + "mappings": { + "properties": { + "client.ip": { + "type": "keyword" + }, + "env": { + "type": "keyword" + } + } + } +} + +PUT clientips/_bulk +{ "index" : {}} +{ "client.ip": "172.21.0.5", "env": "Development" } +{ "index" : {}} +{ "client.ip": "172.21.2.113", "env": "QA" } +{ "index" : {}} +{ "client.ip": "172.21.2.162", "env": "QA" } +{ "index" : {}} +{ "client.ip": "172.21.3.15", "env": "Production" } +{ "index" : {}} +{ "client.ip": "172.21.3.16", "env": "Production" } + +PUT /_enrich/policy/clientip_policy +{ + "match": { + "indices": "clientips", + "match_field": "client.ip", + "enrich_fields": ["env"] + } +} + +PUT /_enrich/policy/clientip_policy/_execute +---- + +//// +[source,console] +---- +DELETE /_enrich/policy/clientip_policy +---- +// TEST[continued] +//// + +// end::own-deployment[] + + +// tag::demo-env[] + +On the demo environment at https://esql.demo.elastic.co/[esql.demo.elastic.co], +an enrich policy called `clientip_policy` has already been created an executed. +The policy links an IP address to an environment ("Development", "QA", or +"Production") + +// end::demo-env[] diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc new file mode 100644 index 0000000000000..434954d8d400a --- /dev/null +++ b/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc @@ -0,0 +1,48 @@ +// tag::own-deployment[] + +First ingest some sample data. In {kib}, open the main menu and select *Dev +Tools*. Run the the following two requests: + +[source,console] +---- +PUT sample_data +{ + "mappings": { + "properties": { + "client.ip": { + "type": "ip" + }, + "message": { + "type": "keyword" + } + } + } +} + +PUT sample_data/_bulk +{"index": {}} +{"@timestamp": "2023-10-23T12:15:03.360Z", "client.ip": "172.21.2.162", "message": "Connected to 10.1.0.3", "event.duration": 3450233} +{"index": {}} +{"@timestamp": "2023-10-23T12:27:28.948Z", "client.ip": "172.21.2.113", "message": "Connected to 10.1.0.2", "event.duration": 2764889} +{"index": {}} +{"@timestamp": "2023-10-23T13:33:34.937Z", "client.ip": "172.21.0.5", "message": "Disconnected", "event.duration": 1232382} +{"index": {}} +{"@timestamp": "2023-10-23T13:51:54.732Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 725448} +{"index": {}} +{"@timestamp": "2023-10-23T13:52:55.015Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 8268153} +{"index": {}} +{"@timestamp": "2023-10-23T13:53:55.832Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 5033755} +{"index": {}} +{"@timestamp": "2023-10-23T13:55:01.543Z", "client.ip": "172.21.3.15", "message": "Connected to 10.1.0.1", "event.duration": 1756467} +---- + +// end::own-deployment[] + + +// tag::demo-env[] + +The data set used in this guide has been preloaded into the Elastic {esql} +public demo environment. Visit +https://esql.demo.elastic.co/[esql.demo.elastic.co] to start using it. + +// end::demo-env[] diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-widget.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-widget-discover-console.asciidoc similarity index 72% rename from docs/reference/tab-widgets/esql/esql-getting-started-widget.asciidoc rename to docs/reference/tab-widgets/esql/esql-getting-started-widget-discover-console.asciidoc index 49dc573f3b0bb..dff80e25812c3 100644 --- a/docs/reference/tab-widgets/esql/esql-getting-started-widget.asciidoc +++ b/docs/reference/tab-widgets/esql/esql-getting-started-widget-discover-console.asciidoc @@ -1,6 +1,6 @@ ++++ -
-
+
+
@@ -31,7 +31,7 @@ include::esql-getting-started.asciidoc[tag=console] hidden=""> ++++ -include::esql-getting-started.asciidoc[tag=discover] +include::esql-getting-started-discover-console.asciidoc[tag=discover] ++++
diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-widget-enrich-policy.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-widget-enrich-policy.asciidoc new file mode 100644 index 0000000000000..cafefeb2652e4 --- /dev/null +++ b/docs/reference/tab-widgets/esql/esql-getting-started-widget-enrich-policy.asciidoc @@ -0,0 +1,39 @@ +++++ +
+
+ + +
+
+++++ + +include::esql-getting-started-enrich-policy.asciidoc[tag=own-deployment] + +++++ +
+ +
+++++ \ No newline at end of file diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-widget-sample-data.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-widget-sample-data.asciidoc new file mode 100644 index 0000000000000..4a33cf3f08866 --- /dev/null +++ b/docs/reference/tab-widgets/esql/esql-getting-started-widget-sample-data.asciidoc @@ -0,0 +1,39 @@ +++++ +
+
+ + +
+
+++++ + +include::esql-getting-started-sample-data.asciidoc[tag=own-deployment] + +++++ +
+ +
+++++ \ No newline at end of file diff --git a/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc b/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc index 2fe2f9cea83f9..b702a1fc8f426 100644 --- a/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc @@ -63,9 +63,7 @@ PUT my-index "properties": { "my_embeddings.predicted_value": { <1> "type": "dense_vector", <2> - "dims": 384,<3> - "index": true, - "similarity": "cosine" + "dims": 384 <3> }, "my_text_field": { <4> "type": "text" <5> diff --git a/libs/core/src/main/java/org/elasticsearch/core/UpdateForV9.java b/libs/core/src/main/java/org/elasticsearch/core/UpdateForV9.java new file mode 100644 index 0000000000000..2a31e2ccde222 --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/core/UpdateForV9.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.core; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation to identify a block of code (a whole class, a method, or a field) that needs to be reviewed (for cleanup, remove or change) + * before releasing 9.0 + */ +@Retention(RetentionPolicy.SOURCE) +@Target({ ElementType.LOCAL_VARIABLE, ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE }) +public @interface UpdateForV9 { +} diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 188d0d282ffb4..4f1c33819fee9 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -435,16 +435,12 @@ public void testComposableTemplateOnlyMatchingWithDataStreamName() throws Except }"""; PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request("id_1"); request.indexTemplate( - new ComposableIndexTemplate( - List.of(dataStreamName), // use no wildcard, so that backing indices don't match just by name - new Template(null, new CompressedXContent(mapping), null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + // use no wildcard, so that backing indices don't match just by name + .indexPatterns(List.of(dataStreamName)) + .template(new Template(null, new CompressedXContent(mapping), null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); @@ -517,16 +513,11 @@ public void testTimeStampValidationInvalidFieldMapping() throws Exception { }"""; PutComposableIndexTemplateAction.Request createTemplateRequest = new PutComposableIndexTemplateAction.Request("logs-foo"); createTemplateRequest.indexTemplate( - new ComposableIndexTemplate( - List.of("logs-*"), - new Template(null, new CompressedXContent(mapping), null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs-*")) + .template(new Template(null, new CompressedXContent(mapping), null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); Exception e = expectThrows( @@ -672,16 +663,14 @@ public void testCannotDeleteComposableTemplateUsedByDataStream() throws Exceptio // Now replace it with a higher-priority template and delete the old one PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request("id2"); request.indexTemplate( - new ComposableIndexTemplate( - Collections.singletonList("metrics-foobar*"), // Match the other data stream with a slightly different pattern - new Template(null, null, null), - null, - 2L, // Higher priority than the other composable template - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + // Match the other data stream with a slightly different pattern + .indexPatterns(Collections.singletonList("metrics-foobar*")) + .template(new Template(null, null, null)) + // Higher priority than the other composable template + .priority(2L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); @@ -1211,15 +1200,11 @@ public void testIndexDocsWithCustomRoutingTargetingDataStreamIsNotAllowed() thro } public void testIndexDocsWithCustomRoutingAllowed() throws Exception { - ComposableIndexTemplate template = new ComposableIndexTemplate( - List.of("logs-foobar*"), - new Template(null, null, null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, true) - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs-foobar*")) + .template(new Template(null, null, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, true)) + .build(); client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("id1").indexTemplate(template) @@ -1357,16 +1342,11 @@ public void testMultipleTimestampValuesInDocument() throws Exception { public void testMixedAutoCreate() throws Exception { PutComposableIndexTemplateAction.Request createTemplateRequest = new PutComposableIndexTemplateAction.Request("logs-foo"); createTemplateRequest.indexTemplate( - new ComposableIndexTemplate( - List.of("logs-foo*"), - new Template(null, new CompressedXContent(generateMapping("@timestamp")), null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs-foo*")) + .template(new Template(null, new CompressedXContent(generateMapping("@timestamp")), null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, createTemplateRequest).actionGet(); @@ -1936,19 +1916,17 @@ public void testPartitionedTemplate() throws IOException { /** * partition size with no routing required */ - ComposableIndexTemplate template = new ComposableIndexTemplate( - List.of("logs"), - new Template( - Settings.builder().put("index.number_of_shards", "3").put("index.routing_partition_size", "2").build(), - null, - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, true) - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs")) + .template( + new Template( + Settings.builder().put("index.number_of_shards", "3").put("index.routing_partition_size", "2").build(), + null, + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, true)) + .build(); ComposableIndexTemplate finalTemplate = template; client().execute( PutComposableIndexTemplateAction.INSTANCE, @@ -1957,24 +1935,22 @@ public void testPartitionedTemplate() throws IOException { /** * partition size with routing required */ - template = new ComposableIndexTemplate( - List.of("logs"), - new Template( - Settings.builder().put("index.number_of_shards", "3").put("index.routing_partition_size", "2").build(), - new CompressedXContent(""" - { - "_routing": { - "required": true - } - }"""), - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, true) - ); + template = ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs")) + .template( + new Template( + Settings.builder().put("index.number_of_shards", "3").put("index.routing_partition_size", "2").build(), + new CompressedXContent(""" + { + "_routing": { + "required": true + } + }"""), + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, true)) + .build(); client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("my-it").indexTemplate(template) @@ -1983,19 +1959,17 @@ public void testPartitionedTemplate() throws IOException { /** * routing settings with allow custom routing false */ - template = new ComposableIndexTemplate( - List.of("logs"), - new Template( - Settings.builder().put("index.number_of_shards", "3").put("index.routing_partition_size", "2").build(), - null, - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false) - ); + template = ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs")) + .template( + new Template( + Settings.builder().put("index.number_of_shards", "3").put("index.routing_partition_size", "2").build(), + null, + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build(); ComposableIndexTemplate finalTemplate1 = template; Exception e = expectThrows( IllegalArgumentException.class, @@ -2013,24 +1987,22 @@ public void testPartitionedTemplate() throws IOException { } public void testRoutingEnabledInMappingDisabledInDataStreamTemplate() throws IOException { - ComposableIndexTemplate template = new ComposableIndexTemplate( - List.of("logs"), - new Template( - Settings.builder().put("index.number_of_shards", "3").put("index.routing_partition_size", "2").build(), - new CompressedXContent(""" - { - "_routing": { - "required": true - } - }"""), - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false) - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs")) + .template( + new Template( + Settings.builder().put("index.number_of_shards", "3").put("index.routing_partition_size", "2").build(), + new CompressedXContent(""" + { + "_routing": { + "required": true + } + }"""), + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build(); Exception e = expectThrows( IllegalArgumentException.class, () -> client().execute( @@ -2046,28 +2018,26 @@ public void testSearchWithRouting() throws IOException, ExecutionException, Inte /** * partition size with routing required */ - ComposableIndexTemplate template = new ComposableIndexTemplate( - List.of("my-logs"), - new Template( - Settings.builder() - .put("index.number_of_shards", "10") - .put("index.number_of_routing_shards", "10") - .put("index.routing_partition_size", "4") - .build(), - new CompressedXContent(""" - { - "_routing": { - "required": true - } - }"""), - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, true) - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(List.of("my-logs")) + .template( + new Template( + Settings.builder() + .put("index.number_of_shards", "10") + .put("index.number_of_routing_shards", "10") + .put("index.routing_partition_size", "4") + .build(), + new CompressedXContent(""" + { + "_routing": { + "required": true + } + }"""), + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, true)) + .build(); client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("my-it").indexTemplate(template) @@ -2328,16 +2298,12 @@ static void putComposableIndexTemplate( ) throws IOException { PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); request.indexTemplate( - new ComposableIndexTemplate( - patterns, - new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), aliases, lifecycle), - null, - null, - null, - metadata, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), aliases, lifecycle)) + .metadata(metadata) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataTierDataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataTierDataStreamIT.java index aeb7516c35816..69c28a06bb206 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataTierDataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataTierDataStreamIT.java @@ -33,16 +33,11 @@ public void testDefaultDataStreamAllocateToHot() { startHotOnlyNode(); ensureGreen(); - ComposableIndexTemplate template = new ComposableIndexTemplate( - Collections.singletonList(index), - null, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList(index)) + + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("template").indexTemplate(template) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamIT.java index 922b58e3920e1..734e2d7273d19 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamIT.java @@ -317,15 +317,11 @@ public Collection getSystemDataStreamDescriptors() { ".test-data-stream", "system data stream test", Type.EXTERNAL, - new ComposableIndexTemplate( - List.of(".test-data-stream"), - new Template(Settings.EMPTY, mappings, null), - null, - null, - null, - null, - new DataStreamTemplate() - ), + ComposableIndexTemplate.builder() + .indexPatterns(List.of(".test-data-stream")) + .template(new Template(Settings.EMPTY, mappings, null)) + .dataStreamTemplate(new DataStreamTemplate()) + .build(), Map.of(), List.of("product"), ExecutorNames.DEFAULT_SYSTEM_DATA_STREAM_THREAD_POOLS diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java index 0f60cbba0a4ff..b0724a9c9c0e3 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java @@ -238,15 +238,10 @@ public Collection getSystemDataStreamDescriptors() { SYSTEM_DATA_STREAM_NAME, "a system data stream for testing", SystemDataStreamDescriptor.Type.EXTERNAL, - new ComposableIndexTemplate( - List.of(".system-data-stream"), - null, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ), + ComposableIndexTemplate.builder() + .indexPatterns(List.of(".system-data-stream")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(), Map.of(), Collections.singletonList("test"), new ExecutorNames(ThreadPool.Names.SYSTEM_CRITICAL_READ, ThreadPool.Names.SYSTEM_READ, ThreadPool.Names.SYSTEM_WRITE) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 5dbf52f33d7da..ab42d831c6545 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -115,16 +115,11 @@ public void testTimeRanges() throws Exception { if (randomBoolean()) { var request = new PutComposableIndexTemplateAction.Request("id"); request.indexTemplate( - new ComposableIndexTemplate( - List.of("k8s*"), - new Template(templateSettings.build(), mapping, null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("k8s*")) + .template(new Template(templateSettings.build(), mapping, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } else { @@ -134,16 +129,12 @@ public void testTimeRanges() throws Exception { var putTemplateRequest = new PutComposableIndexTemplateAction.Request("id"); putTemplateRequest.indexTemplate( - new ComposableIndexTemplate( - List.of("k8s*"), - new Template(templateSettings.build(), null, null), - List.of("1"), - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("k8s*")) + .template(new Template(templateSettings.build(), null, null)) + .componentTemplates(List.of("1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, putTemplateRequest).actionGet(); } @@ -249,20 +240,17 @@ public void testInvalidTsdbTemplatesNoTimeSeriesDimensionAttribute() throws Exce { var request = new PutComposableIndexTemplateAction.Request("id"); request.indexTemplate( - new ComposableIndexTemplate( - List.of("k8s*"), - new Template( - Settings.builder().put("index.mode", "time_series").put("index.routing_path", "metricset").build(), - new CompressedXContent(mappingTemplate), - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("k8s*")) + .template( + new Template( + Settings.builder().put("index.mode", "time_series").put("index.routing_path", "metricset").build(), + new CompressedXContent(mappingTemplate), + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ); var e = expectThrows( IllegalArgumentException.class, @@ -280,20 +268,17 @@ public void testInvalidTsdbTemplatesNoTimeSeriesDimensionAttribute() throws Exce { var request = new PutComposableIndexTemplateAction.Request("id"); request.indexTemplate( - new ComposableIndexTemplate( - List.of("k8s*"), - new Template( - Settings.builder().put("index.mode", "time_series").build(), - new CompressedXContent(mappingTemplate), - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("k8s*")) + .template( + new Template( + Settings.builder().put("index.mode", "time_series").build(), + new CompressedXContent(mappingTemplate), + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ); var e = expectThrows( InvalidIndexTemplateException.class, @@ -317,20 +302,17 @@ public void testInvalidTsdbTemplatesNoKeywordFieldType() throws Exception { }"""; var request = new PutComposableIndexTemplateAction.Request("id"); request.indexTemplate( - new ComposableIndexTemplate( - List.of("k8s*"), - new Template( - Settings.builder().put("index.mode", "time_series").put("index.routing_path", "metricset").build(), - new CompressedXContent(mappingTemplate), - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("k8s*")) + .template( + new Template( + Settings.builder().put("index.mode", "time_series").put("index.routing_path", "metricset").build(), + new CompressedXContent(mappingTemplate), + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ); Exception e = expectThrows( IllegalArgumentException.class, @@ -360,20 +342,17 @@ public void testInvalidTsdbTemplatesMissingSettings() throws Exception { }"""; var request = new PutComposableIndexTemplateAction.Request("id"); request.indexTemplate( - new ComposableIndexTemplate( - List.of("k8s*"), - new Template( - Settings.builder().put("index.routing_path", "metricset").build(), - new CompressedXContent(mappingTemplate), - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("k8s*")) + .template( + new Template( + Settings.builder().put("index.routing_path", "metricset").build(), + new CompressedXContent(mappingTemplate), + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ); var e = expectThrows( IllegalArgumentException.class, @@ -389,16 +368,11 @@ public void testSkippingShards() throws Exception { var templateSettings = Settings.builder().put("index.mode", "time_series").put("index.routing_path", "metricset").build(); var request = new PutComposableIndexTemplateAction.Request("id1"); request.indexTemplate( - new ComposableIndexTemplate( - List.of("pattern-1"), - new Template(templateSettings, mapping, null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("pattern-1")) + .template(new Template(templateSettings, mapping, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); var indexRequest = new IndexRequest("pattern-1").opType(DocWriteRequest.OpType.CREATE).setRefreshPolicy("true"); @@ -408,16 +382,11 @@ public void testSkippingShards() throws Exception { { var request = new PutComposableIndexTemplateAction.Request("id2"); request.indexTemplate( - new ComposableIndexTemplate( - List.of("pattern-2"), - new Template(null, mapping, null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("pattern-2")) + .template(new Template(null, mapping, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); var indexRequest = new IndexRequest("pattern-2").opType(DocWriteRequest.OpType.CREATE).setRefreshPolicy("true"); @@ -457,26 +426,23 @@ public void testTrimId() throws Exception { String dataStreamName = "k8s"; var putTemplateRequest = new PutComposableIndexTemplateAction.Request("id"); putTemplateRequest.indexTemplate( - new ComposableIndexTemplate( - List.of(dataStreamName + "*"), - new Template( - Settings.builder() - .put("index.mode", "time_series") - .put("index.number_of_replicas", 0) - // Reduce sync interval to speedup this integraton test, - // otherwise by default it will take 30 seconds before minimum retained seqno is updated: - .put("index.soft_deletes.retention_lease.sync_interval", "100ms") - .build(), - new CompressedXContent(MAPPING_TEMPLATE), - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName + "*")) + .template( + new Template( + Settings.builder() + .put("index.mode", "time_series") + .put("index.number_of_replicas", 0) + // Reduce sync interval to speedup this integraton test, + // otherwise by default it will take 30 seconds before minimum retained seqno is updated: + .put("index.soft_deletes.retention_lease.sync_interval", "100ms") + .build(), + new CompressedXContent(MAPPING_TEMPLATE), + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, putTemplateRequest).actionGet(); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java index d2baec3150392..8e590d3f28346 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java @@ -199,20 +199,18 @@ public Collection getSystemDataStreamDescriptors() { ".test-data-stream", "system data stream test", Type.EXTERNAL, - new ComposableIndexTemplate( - List.of(".test-data-stream"), - new Template( - Settings.EMPTY, - mappings, - null, - DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build() - ), - null, - null, - null, - null, - new DataStreamTemplate() - ), + ComposableIndexTemplate.builder() + .indexPatterns(List.of(".test-data-stream")) + .template( + new Template( + Settings.EMPTY, + mappings, + null, + DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build() + ) + ) + .dataStreamTemplate(new DataStreamTemplate()) + .build(), Map.of(), List.of("product"), ExecutorNames.DEFAULT_SYSTEM_DATA_STREAM_THREAD_POOLS diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index 0d3588ba20b9a..5bbc007cfb272 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -182,16 +182,10 @@ public void testOriginationDate() throws Exception { }"""; PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request("id2"); request.indexTemplate( - new ComposableIndexTemplate( - List.of("index_*"), - new Template(null, CompressedXContent.fromJSON(mapping), null, null), - null, - null, - null, - null, - null, - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("index_*")) + .template(new Template(null, CompressedXContent.fromJSON(mapping), null, null)) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); @@ -716,16 +710,12 @@ static void putComposableIndexTemplate( ) throws IOException { PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); request.indexTemplate( - new ComposableIndexTemplate( - patterns, - new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle), - null, - null, - null, - metadata, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle)) + .metadata(metadata) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java index c9968a545cb7d..57febae28bb4d 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java @@ -351,16 +351,12 @@ static void putComposableIndexTemplate( ) throws IOException { PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); request.indexTemplate( - new ComposableIndexTemplate( - patterns, - new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle), - null, - null, - null, - metadata, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle)) + .metadata(metadata) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java index da0caff9e591d..928512f659039 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java @@ -226,16 +226,11 @@ private String createDataStream(boolean hidden) throws Exception { Template idxTemplate = new Template(null, new CompressedXContent(""" {"properties":{"@timestamp":{"type":"date"},"data":{"type":"keyword"}}} """), null); - ComposableIndexTemplate template = new ComposableIndexTemplate( - List.of(dataStreamName + "*"), - idxTemplate, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(hidden, false), - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName + "*")) + .template(idxTemplate) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(hidden, false)) + .build(); assertAcked( client().execute( PutComposableIndexTemplateAction.INSTANCE, diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataIndexTemplateServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataIndexTemplateServiceTests.java index 4f36feba17c89..e7339cc3f334a 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataIndexTemplateServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataIndexTemplateServiceTests.java @@ -59,16 +59,13 @@ public void testRequireRoutingPath() throws Exception { // Missing routing path should fail validation var componentTemplate = new ComponentTemplate(new Template(null, new CompressedXContent("{}"), null), null, null); var state = service.addComponentTemplate(ClusterState.EMPTY_STATE, true, "1", componentTemplate); - var indexTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-*-*"), - new Template(builder().put("index.mode", "time_series").build(), null, null), - List.of("1"), - 100L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-*-*")) + .template(new Template(builder().put("index.mode", "time_series").build(), null, null)) + .componentTemplates(List.of("1")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build(); var e = expectThrows(InvalidIndexTemplateException.class, () -> service.addIndexTemplateV2(state, false, "1", indexTemplate)); assertThat(e.getMessage(), containsString("[index.mode=time_series] requires a non-empty [index.routing_path]")); } @@ -81,16 +78,13 @@ public void testRequireRoutingPath() throws Exception { null ); state = service.addComponentTemplate(state, true, "1", componentTemplate); - var indexTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-*-*"), - new Template(builder().put("index.mode", "time_series").build(), null, null), - List.of("1"), - 100L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-*-*")) + .template(new Template(builder().put("index.mode", "time_series").build(), null, null)) + .componentTemplates(List.of("1")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build(); state = service.addIndexTemplateV2(state, false, "1", indexTemplate); assertThat(state.getMetadata().templatesV2().get("1"), equalTo(indexTemplate)); } @@ -103,46 +97,39 @@ public void testRequireRoutingPath() throws Exception { null ); state = service.addComponentTemplate(state, true, "1", componentTemplate); - var indexTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-*-*"), - new Template(null, null, null), - List.of("1"), - 100L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-*-*")) + .template(new Template(null, null, null)) + .componentTemplates(List.of("1")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build(); state = service.addIndexTemplateV2(state, false, "1", indexTemplate); assertThat(state.getMetadata().templatesV2().get("1"), equalTo(indexTemplate)); } { // Routing path defined in index template - var indexTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-*-*"), - new Template(builder().put("index.mode", "time_series").put("index.routing_path", "uid").build(), null, null), - List.of("1"), - 100L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-*-*")) + .template(new Template(builder().put("index.mode", "time_series").put("index.routing_path", "uid").build(), null, null)) + .componentTemplates(List.of("1")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build(); var state = service.addIndexTemplateV2(ClusterState.EMPTY_STATE, false, "1", indexTemplate); assertThat(state.getMetadata().templatesV2().get("1"), equalTo(indexTemplate)); } { // Routing fetched from mapping in index template - var indexTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-*-*"), - new Template(builder().put("index.mode", "time_series").build(), new CompressedXContent(generateTsdbMapping()), null), - List.of("1"), - 100L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-*-*")) + .template( + new Template(builder().put("index.mode", "time_series").build(), new CompressedXContent(generateTsdbMapping()), null) + ) + .componentTemplates(List.of("1")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build(); var state = service.addIndexTemplateV2(ClusterState.EMPTY_STATE, false, "1", indexTemplate); assertThat(state.getMetadata().templatesV2().get("1"), equalTo(indexTemplate)); } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java index 803f5c8661f17..1a9287c1d5ee8 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java @@ -96,16 +96,11 @@ private void createTemplate(boolean tsdb) throws IOException { var templateSettings = Settings.builder().put("index.mode", tsdb ? "time_series" : "standard"); var request = new PutComposableIndexTemplateAction.Request("id"); request.indexTemplate( - new ComposableIndexTemplate( - List.of("k8s*"), - new Template(templateSettings.build(), new CompressedXContent(mappingTemplate), null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("k8s*")) + .template(new Template(templateSettings.build(), new CompressedXContent(mappingTemplate), null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java index 5a15e831f5ad6..6833f2222b585 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java @@ -83,16 +83,12 @@ static void putComposableIndexTemplate( ) throws IOException { PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); request.indexTemplate( - new ComposableIndexTemplate( - patterns, - new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle), - null, - null, - null, - metadata, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle)) + .metadata(metadata) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); assertTrue(client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet().isAcknowledged()); } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 26ddbaa7ba854..30ecc96a3171c 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -9,7 +9,6 @@ package org.elasticsearch.ingest.geoip; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.internal.Client; @@ -65,6 +64,14 @@ public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, SystemIndexPlugin, Closeable, PersistentTaskPlugin, ActionPlugin { public static final Setting CACHE_SIZE = Setting.longSetting("ingest.geoip.cache_size", 1000, 0, Setting.Property.NodeScope); private static final int GEOIP_INDEX_MAPPINGS_VERSION = 1; + /** + * No longer used for determining the age of mappings, but system index descriptor + * code requires something be set. We use a value that can be parsed by + * old nodes in mixed-version clusters, just in case any old code exists that + * tries to parse version from index metadata, and that will indicate + * to these old nodes that the mappings are newer than they are. + */ + private static final String LEGACY_VERSION_FIELD_VALUE = "8.12.0"; private final SetOnce ingestService = new SetOnce<>(); private final SetOnce databaseRegistry = new SetOnce<>(); @@ -204,7 +211,7 @@ private static XContentBuilder mappings() { return jsonBuilder().startObject() .startObject(SINGLE_MAPPING_NAME) .startObject("_meta") - .field("version", Version.CURRENT) + .field("version", LEGACY_VERSION_FIELD_VALUE) .field(SystemIndexDescriptor.VERSION_META_KEY, GEOIP_INDEX_MAPPINGS_VERSION) .endObject() .field("dynamic", "strict") diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java index 30937ebcbd773..1fcf776ac8428 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java @@ -8,7 +8,6 @@ package org.elasticsearch.painless.spi; -import java.util.Collections; import java.util.List; import java.util.Objects; @@ -47,11 +46,10 @@ public Whitelist( List whitelistClassBindings, List whitelistInstanceBindings ) { - this.classLoader = Objects.requireNonNull(classLoader); - this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses)); - this.whitelistImportedMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistImportedMethods)); - this.whitelistClassBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistClassBindings)); - this.whitelistInstanceBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistInstanceBindings)); + this.whitelistClasses = List.copyOf(whitelistClasses); + this.whitelistImportedMethods = List.copyOf(whitelistImportedMethods); + this.whitelistClassBindings = List.copyOf(whitelistClassBindings); + this.whitelistInstanceBindings = List.copyOf(whitelistInstanceBindings); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java index 2130f9343dfa3..1daad59768a15 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java @@ -8,11 +8,10 @@ package org.elasticsearch.painless.spi; -import java.util.AbstractMap; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Function; import java.util.stream.Collectors; /** @@ -59,23 +58,12 @@ public WhitelistClass( List whitelistFields, List painlessAnnotations ) { - this.origin = Objects.requireNonNull(origin); this.javaClassName = Objects.requireNonNull(javaClassName); - - this.whitelistConstructors = Collections.unmodifiableList(Objects.requireNonNull(whitelistConstructors)); - this.whitelistMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistMethods)); - this.whitelistFields = Collections.unmodifiableList(Objects.requireNonNull(whitelistFields)); - - if (painlessAnnotations.isEmpty()) { - this.painlessAnnotations = Collections.emptyMap(); - } else { - this.painlessAnnotations = Collections.unmodifiableMap( - Objects.requireNonNull(painlessAnnotations) - .stream() - .map(painlessAnnotation -> new AbstractMap.SimpleEntry<>(painlessAnnotation.getClass(), painlessAnnotation)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - ); - } + this.whitelistConstructors = List.copyOf(whitelistConstructors); + this.whitelistMethods = List.copyOf(whitelistMethods); + this.whitelistFields = List.copyOf(whitelistFields); + this.painlessAnnotations = painlessAnnotations.stream() + .collect(Collectors.toUnmodifiableMap(Object::getClass, Function.identity())); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java index c1a3c43196647..872482bcf6281 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java @@ -8,11 +8,10 @@ package org.elasticsearch.painless.spi; -import java.util.AbstractMap; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Function; import java.util.stream.Collectors; /** @@ -39,16 +38,7 @@ public WhitelistField(String origin, String fieldName, String canonicalTypeNameP this.origin = Objects.requireNonNull(origin); this.fieldName = Objects.requireNonNull(fieldName); this.canonicalTypeNameParameter = Objects.requireNonNull(canonicalTypeNameParameter); - - if (painlessAnnotations.isEmpty()) { - this.painlessAnnotations = Collections.emptyMap(); - } else { - this.painlessAnnotations = Collections.unmodifiableMap( - Objects.requireNonNull(painlessAnnotations) - .stream() - .map(painlessAnnotation -> new AbstractMap.SimpleEntry<>(painlessAnnotation.getClass(), painlessAnnotation)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - ); - } + this.painlessAnnotations = painlessAnnotations.stream() + .collect(Collectors.toUnmodifiableMap(Object::getClass, Function.identity())); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java index 8451d1c9f3ef4..8927d290ecc77 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java @@ -8,11 +8,10 @@ package org.elasticsearch.painless.spi; -import java.util.AbstractMap; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Function; import java.util.stream.Collectors; /** @@ -69,22 +68,12 @@ public WhitelistMethod( List canonicalTypeNameParameters, List painlessAnnotations ) { - this.origin = Objects.requireNonNull(origin); this.augmentedCanonicalClassName = augmentedCanonicalClassName; this.methodName = methodName; this.returnCanonicalTypeName = Objects.requireNonNull(returnCanonicalTypeName); - this.canonicalTypeNameParameters = Collections.unmodifiableList(Objects.requireNonNull(canonicalTypeNameParameters)); - - if (painlessAnnotations.isEmpty()) { - this.painlessAnnotations = Collections.emptyMap(); - } else { - this.painlessAnnotations = Collections.unmodifiableMap( - Objects.requireNonNull(painlessAnnotations) - .stream() - .map(painlessAnnotation -> new AbstractMap.SimpleEntry<>(painlessAnnotation.getClass(), painlessAnnotation)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - ); - } + this.canonicalTypeNameParameters = List.copyOf(canonicalTypeNameParameters); + this.painlessAnnotations = painlessAnnotations.stream() + .collect(Collectors.toUnmodifiableMap(Object::getClass, Function.identity())); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index 3fc572d8446bc..d32639bf3968f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -45,7 +45,7 @@ public final class PainlessClass { this.staticFields = Map.copyOf(staticFields); this.fields = Map.copyOf(fields); this.functionalInterfaceMethod = functionalInterfaceMethod; - this.annotations = annotations; + this.annotations = Map.copyOf(annotations); this.getterMethodHandles = Map.copyOf(getterMethodHandles); this.setterMethodHandles = Map.copyOf(setterMethodHandles); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index bf001c5e49db9..0c1497b541954 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -1680,6 +1680,7 @@ public PainlessLookup build() { ); } + classesToDirectSubClasses.replaceAll((key, set) -> Set.copyOf(set)); // save some memory, especially when set is empty return new PainlessLookup( javaClassNamesToClasses, canonicalClassNamesToClasses, diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index 14647820e71f6..0c1a0e41206c7 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -396,27 +396,29 @@ private void migrateWithTemplatesV2(String prefix, SystemIndexDescriptor... desc ); client().execute(PutComponentTemplateAction.INSTANCE, new PutComponentTemplateAction.Request("a-ct").componentTemplate(ct)).get(); - ComposableIndexTemplate cit = new ComposableIndexTemplate( - Collections.singletonList(prefix + "*"), - new Template( - null, - new CompressedXContent( - "{\n" - + " \"dynamic\": false,\n" - + " \"properties\": {\n" - + " \"field2\": {\n" - + " \"type\": \"keyword\"\n" - + " }\n" - + " }\n" - + " }" - ), - null - ), - Collections.singletonList("a-ct"), - 4L, - 5L, - Collections.singletonMap("baz", "thud") - ); + ComposableIndexTemplate cit = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList(prefix + "*")) + .template( + new Template( + null, + new CompressedXContent( + "{\n" + + " \"dynamic\": false,\n" + + " \"properties\": {\n" + + " \"field2\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }" + ), + null + ) + ) + .componentTemplates(Collections.singletonList("a-ct")) + .priority(4L) + .version(5L) + .metadata(Collections.singletonMap("baz", "thud")) + .build(); client().execute(PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("a-it").indexTemplate(cit)) .get(); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexIdTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexIdTests.java index 34db459539323..644787446547e 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexIdTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexIdTests.java @@ -59,9 +59,12 @@ private ClusterState stateWithTemplate(Settings.Builder settings) { Template template = new Template(settings.build(), null, null); if (randomBoolean()) { metadata.put("c", new ComponentTemplate(template, null, null)); - metadata.put("c", new ComposableIndexTemplate(List.of("dest_index"), null, List.of("c"), null, null, null)); + metadata.put( + "c", + ComposableIndexTemplate.builder().indexPatterns(List.of("dest_index")).componentTemplates(List.of("c")).build() + ); } else { - metadata.put("c", new ComposableIndexTemplate(List.of("dest_index"), template, null, null, null, null)); + metadata.put("c", ComposableIndexTemplate.builder().indexPatterns(List.of("dest_index")).template(template).build()); } return ClusterState.builder(ClusterState.EMPTY_STATE).metadata(metadata).build(); } diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 6af9bc9b11723..06b92b8138cf7 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; import org.elasticsearch.test.NotEqualMessageBuilder; @@ -889,7 +890,7 @@ public void testRecovery() throws Exception { if (isRunningAgainstOldCluster()) { count = between(200, 300); Settings.Builder settings = Settings.builder(); - if (minimumNodeVersion().before(Version.V_8_0_0) && randomBoolean()) { + if (minimumIndexVersion().before(IndexVersions.V_8_0_0) && randomBoolean()) { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); } final String mappings = randomBoolean() ? "\"_source\": { \"enabled\": false}" : null; @@ -941,7 +942,7 @@ public void testSnapshotRestore() throws IOException { // Create the index count = between(200, 300); Settings.Builder settings = Settings.builder(); - if (minimumNodeVersion().before(Version.V_8_0_0) && randomBoolean()) { + if (minimumIndexVersion().before(IndexVersions.V_8_0_0) && randomBoolean()) { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); } createIndex(index, settings.build()); @@ -1435,7 +1436,7 @@ public void testPeerRecoveryRetentionLeases() throws Exception { public void testOperationBasedRecovery() throws Exception { if (isRunningAgainstOldCluster()) { Settings.Builder settings = indexSettings(1, 1); - if (minimumNodeVersion().before(Version.V_8_0_0) && randomBoolean()) { + if (minimumIndexVersion().before(IndexVersions.V_8_0_0) && randomBoolean()) { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); } final String mappings = randomBoolean() ? "\"_source\": { \"enabled\": false}" : null; @@ -1498,7 +1499,7 @@ public void testResize() throws Exception { final Settings.Builder settings = Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 3) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1); - if (minimumNodeVersion().before(Version.V_8_0_0) && randomBoolean()) { + if (minimumIndexVersion().before(IndexVersions.V_8_0_0) && randomBoolean()) { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false); } final String mappings = randomBoolean() ? "\"_source\": { \"enabled\": false}" : null; diff --git a/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 7af6ad49fb001..c3ee7307bf821 100644 --- a/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -305,7 +305,7 @@ public void testRecovery() throws Exception { // before timing out .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms") .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster - if (minimumNodeVersion().before(Version.V_8_0_0) && randomBoolean()) { + if (minimumIndexVersion().before(IndexVersions.V_8_0_0) && randomBoolean()) { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); } createIndex(index, settings.build()); @@ -340,7 +340,7 @@ public void testRetentionLeasesEstablishedWhenPromotingPrimary() throws Exceptio .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), between(1, 2)) // triggers nontrivial promotion .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms") .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster - if (minimumNodeVersion().before(Version.V_8_0_0) && randomBoolean()) { + if (minimumIndexVersion().before(IndexVersions.V_8_0_0) && randomBoolean()) { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); } createIndex(index, settings.build()); @@ -363,7 +363,7 @@ public void testRetentionLeasesEstablishedWhenRelocatingPrimary() throws Excepti .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), between(0, 1)) .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms") .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster - if (minimumNodeVersion().before(Version.V_8_0_0) && randomBoolean()) { + if (minimumIndexVersion().before(IndexVersions.V_8_0_0) && randomBoolean()) { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); } createIndex(index, settings.build()); @@ -461,7 +461,7 @@ public void testCloseIndexDuringRollingUpgrade() throws Exception { closeIndex(indexName); } - if (minimumNodeVersion.onOrAfter(Version.V_7_2_0)) { + if (minimumIndexVersion().onOrAfter(IndexVersions.V_7_2_0)) { // index is created on a version that supports the replication of closed indices, // so we expect the index to be closed and replicated ensureGreen(indexName); @@ -501,7 +501,7 @@ public void testClosedIndexNoopRecovery() throws Exception { if (indexVersionCreated(indexName).onOrAfter(IndexVersions.V_7_2_0)) { // index was created on a version that supports the replication of closed indices, so we expect it to be closed and replicated - assertTrue(minimumNodeVersion().onOrAfter(Version.V_7_2_0)); + assertTrue(minimumIndexVersion().onOrAfter(IndexVersions.V_7_2_0)); ensureGreen(indexName); assertClosedIndex(indexName, true); if (CLUSTER_TYPE != ClusterType.OLD) { @@ -648,7 +648,7 @@ public void testOperationBasedRecovery() throws Exception { final Settings.Builder settings = Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 2); - if (minimumNodeVersion().before(Version.V_8_0_0) && randomBoolean()) { + if (minimumIndexVersion().before(IndexVersions.V_8_0_0) && randomBoolean()) { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); } final String mappings = randomBoolean() ? "\"_source\": { \"enabled\": false}" : null; @@ -733,7 +733,7 @@ public void testSoftDeletesDisabledWarning() throws Exception { if (CLUSTER_TYPE == ClusterType.OLD) { boolean softDeletesEnabled = true; Settings.Builder settings = Settings.builder(); - if (minimumNodeVersion().before(Version.V_8_0_0) && randomBoolean()) { + if (minimumIndexVersion().before(IndexVersions.V_8_0_0) && randomBoolean()) { softDeletesEnabled = randomBoolean(); settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), softDeletesEnabled); } diff --git a/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml b/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml index f8b1de5155527..afe66594a490b 100644 --- a/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml +++ b/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml @@ -186,3 +186,18 @@ setup: - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.max' - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.average' - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.std_dev' + +--- +"Test unassigned_shards, total_allocations, undesired_allocations and undesired_allocations_fraction": + + - skip: + version: " - 8.11.99" + reason: "undesired_shard_allocation_count added in in 8.12.0" + + - do: + _internal.get_desired_balance: { } + + - gte: { 'stats.unassigned_shards' : 0 } + - gte: { 'stats.total_allocations' : 0 } + - gte: { 'stats.undesired_allocations' : 0 } + - gte: { 'stats.undesired_allocations_fraction' : 0.0 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml index 8e1d3431069cf..4647c85ba9caf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml @@ -221,3 +221,18 @@ setup: - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.max' - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.average' - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.std_dev' + +--- +"Test unassigned_shards, total_allocations, undesired_allocations and undesired_allocations_fraction": + + - skip: + version: " - 8.11.99" + reason: "undesired_shard_allocation_count added in in 8.12.0" + + - do: + _internal.get_desired_balance: { } + + - gte: { 'stats.unassigned_shards' : 0 } + - gte: { 'stats.total_allocations' : 0 } + - gte: { 'stats.undesired_allocations' : 0 } + - gte: { 'stats.undesired_allocations_fraction' : 0.0 } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java index e5edeccbad55d..2c5c7a8c103b9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java @@ -205,18 +205,20 @@ public void testAutoCreateSystemAliasViaV1TemplateAllowsTemplates() throws Excep } private String autoCreateSystemAliasViaComposableTemplate(String indexName) throws Exception { - ComposableIndexTemplate cit = new ComposableIndexTemplate( - Collections.singletonList(indexName + "*"), - new Template( - null, - null, - Map.of(indexName + "-composable-alias", AliasMetadata.builder(indexName + "-composable-alias").build()) - ), - Collections.emptyList(), - 4L, - 5L, - Collections.emptyMap() - ); + ComposableIndexTemplate cit = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList(indexName + "*")) + .template( + new Template( + null, + null, + Map.of(indexName + "-composable-alias", AliasMetadata.builder(indexName + "-composable-alias").build()) + ) + ) + .componentTemplates(Collections.emptyList()) + .priority(4L) + .version(5L) + .metadata(Collections.emptyMap()) + .build(); assertAcked( client().execute( PutComposableIndexTemplateAction.INSTANCE, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java index a0dffa8b7caa8..1c075442d99e6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java @@ -194,18 +194,20 @@ public void testCreateSystemAliasViaV1TemplateAllowsTemplates() throws Exception } private void createIndexWithComposableTemplates(String indexName, String primaryIndexName) throws Exception { - ComposableIndexTemplate cit = new ComposableIndexTemplate( - Collections.singletonList(indexName + "*"), - new Template( - null, - null, - Map.of(indexName + "-composable-alias", AliasMetadata.builder(indexName + "-composable-alias").build()) - ), - Collections.emptyList(), - 4L, - 5L, - Collections.emptyMap() - ); + ComposableIndexTemplate cit = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList(indexName + "*")) + .template( + new Template( + null, + null, + Map.of(indexName + "-composable-alias", AliasMetadata.builder(indexName + "-composable-alias").build()) + ) + ) + .componentTemplates(Collections.emptyList()) + .priority(4L) + .version(5L) + .metadata(Collections.emptyMap()) + .build(); assertAcked( client().execute( PutComposableIndexTemplateAction.INSTANCE, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index d7e4e42b73554..7ae7fc5c4a180 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -794,7 +794,9 @@ public void testRolloverConcurrently() throws Exception { null, null ); - putTemplateRequest.indexTemplate(new ComposableIndexTemplate(List.of("test-*"), template, null, 100L, null, null)); + putTemplateRequest.indexTemplate( + ComposableIndexTemplate.builder().indexPatterns(List.of("test-*")).template(template).priority(100L).build() + ); assertAcked(client().execute(PutComposableIndexTemplateAction.INSTANCE, putTemplateRequest).actionGet()); final CyclicBarrier barrier = new CyclicBarrier(numOfThreads); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java index fe447eca6e8fd..24bf198b7b42f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java @@ -18,7 +18,7 @@ import java.util.stream.Collectors; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasKey; public class ClusterFeaturesIT extends ESIntegTestCase { @@ -29,7 +29,7 @@ public void testClusterHasFeatures() { FeatureService service = internalCluster().getCurrentMasterNodeInstance(FeatureService.class); - assertThat(service.getNodeFeatures(), hasItem(FeatureService.FEATURES_SUPPORTED.id())); + assertThat(service.getNodeFeatures(), hasKey(FeatureService.FEATURES_SUPPORTED.id())); // check the nodes all have a feature in their cluster state (there should always be features_supported) var response = clusterAdmin().state(new ClusterStateRequest().clear().nodes(true)).actionGet(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/ComposableTemplateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/ComposableTemplateIT.java index 29c38c07fcbd7..0e385768fc256 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/ComposableTemplateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/ComposableTemplateIT.java @@ -33,9 +33,9 @@ public void testComponentTemplatesCanBeUpdatedAfterRestart() throws Exception { }"""), null), 3L, Collections.singletonMap("eggplant", "potato")); client().execute(PutComponentTemplateAction.INSTANCE, new PutComponentTemplateAction.Request("my-ct").componentTemplate(ct)).get(); - ComposableIndexTemplate cit = new ComposableIndexTemplate( - Collections.singletonList("coleslaw"), - new Template(null, new CompressedXContent(""" + ComposableIndexTemplate cit = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("coleslaw")) + .template(new Template(null, new CompressedXContent(""" { "dynamic": false, "properties": { @@ -43,12 +43,12 @@ public void testComponentTemplatesCanBeUpdatedAfterRestart() throws Exception { "type": "keyword" } } - }"""), null), - Collections.singletonList("my-ct"), - 4L, - 5L, - Collections.singletonMap("egg", "bread") - ); + }"""), null)) + .componentTemplates(Collections.singletonList("my-ct")) + .priority(4L) + .version(5L) + .metadata(Collections.singletonMap("egg", "bread")) + .build(); client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("my-it").indexTemplate(cit) @@ -68,9 +68,9 @@ public void testComponentTemplatesCanBeUpdatedAfterRestart() throws Exception { }"""), null), 3L, Collections.singletonMap("eggplant", "potato")); client().execute(PutComponentTemplateAction.INSTANCE, new PutComponentTemplateAction.Request("my-ct").componentTemplate(ct2)).get(); - ComposableIndexTemplate cit2 = new ComposableIndexTemplate( - Collections.singletonList("coleslaw"), - new Template(null, new CompressedXContent(""" + ComposableIndexTemplate cit2 = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("coleslaw")) + .template(new Template(null, new CompressedXContent(""" { "dynamic": true, "properties": { @@ -78,12 +78,12 @@ public void testComponentTemplatesCanBeUpdatedAfterRestart() throws Exception { "type": "integer" } } - }"""), null), - Collections.singletonList("my-ct"), - 4L, - 5L, - Collections.singletonMap("egg", "bread") - ); + }"""), null)) + .componentTemplates(Collections.singletonList("my-ct")) + .priority(4L) + .version(5L) + .metadata(Collections.singletonMap("egg", "bread")) + .build(); client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("my-it").indexTemplate(cit2) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java index cc74dcc3d0d28..ac18177187372 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.ESIntegTestCase; @@ -19,7 +18,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; @ESIntegTestCase.SuiteScopeTestCase public class AggregationsIntegrationIT extends ESIntegTestCase { @@ -39,25 +38,32 @@ public void setupSuiteScopeCluster() throws Exception { public void testScroll() { final int size = randomIntBetween(1, 4); - SearchResponse response = prepareSearch("index").setSize(size) - .setScroll(TimeValue.timeValueMinutes(1)) - .addAggregation(terms("f").field("f")) - .get(); - assertNoFailures(response); - Aggregations aggregations = response.getAggregations(); - assertNotNull(aggregations); - Terms terms = aggregations.get("f"); - assertEquals(Math.min(numDocs, 3L), terms.getBucketByKey("0").getDocCount()); - - int total = response.getHits().getHits().length; - while (response.getHits().getHits().length > 0) { - response = client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); - assertNoFailures(response); - assertNull(response.getAggregations()); - total += response.getHits().getHits().length; + final String[] scroll = new String[1]; + final int[] total = new int[1]; + assertNoFailuresAndResponse( + prepareSearch("index").setSize(size).setScroll(TimeValue.timeValueMinutes(1)).addAggregation(terms("f").field("f")), + response -> { + Aggregations aggregations = response.getAggregations(); + assertNotNull(aggregations); + Terms terms = aggregations.get("f"); + assertEquals(Math.min(numDocs, 3L), terms.getBucketByKey("0").getDocCount()); + scroll[0] = response.getScrollId(); + total[0] = response.getHits().getHits().length; + } + ); + int currentTotal = 0; + while (total[0] - currentTotal > 0) { + currentTotal = total[0]; + assertNoFailuresAndResponse( + client().prepareSearchScroll(scroll[0]).setScroll(TimeValue.timeValueMinutes(1)), + scrollResponse -> { + assertNull(scrollResponse.getAggregations()); + total[0] += scrollResponse.getHits().getHits().length; + scroll[0] = scrollResponse.getScrollId(); + } + ); } - clearScroll(response.getScrollId()); - assertEquals(numDocs, total); + clearScroll(scroll[0]); + assertEquals(numDocs, total[0]); } - } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index b600098d82b33..15afd6897a40e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -14,7 +14,6 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.StreamInput; @@ -39,7 +38,7 @@ import java.util.Objects; import static java.util.Collections.singletonList; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.CoreMatchers.equalTo; @@ -72,21 +71,22 @@ public void testPlugin() throws Exception { indicesAdmin().prepareRefresh().get(); - SearchResponse response = prepareSearch().setSource( - new SearchSourceBuilder().ext(Collections.singletonList(new TermVectorsFetchBuilder("test"))) - ).get(); - assertNoFailures(response); - assertThat( - ((Map) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("i"), - equalTo(2) - ); - assertThat( - ((Map) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("am"), - equalTo(2) - ); - assertThat( - ((Map) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("sam"), - equalTo(1) + assertNoFailuresAndResponse( + prepareSearch().setSource(new SearchSourceBuilder().ext(Collections.singletonList(new TermVectorsFetchBuilder("test")))), + response -> { + assertThat( + ((Map) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("i"), + equalTo(2) + ); + assertThat( + ((Map) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("am"), + equalTo(2) + ); + assertThat( + ((Map) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("sam"), + equalTo(1) + ); + } ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index dcfee8994b56b..ef3b382da7089 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; @@ -53,6 +52,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; @@ -143,75 +143,84 @@ public void testSimpleNested() throws Exception { ); indexRandom(true, requests); - SearchResponse response = prepareSearch("articles").setQuery( - nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder("comment")) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertSearchHit(response, 1, hasId("1")); - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); - assertThat(innerHits.getHits().length, equalTo(2)); - assertThat(innerHits.getAt(0).getId(), equalTo("1")); - assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(innerHits.getAt(1).getId(), equalTo("1")); - assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1)); - - response = prepareSearch("articles").setQuery( - nestedQuery("comments", matchQuery("comments.message", "elephant"), ScoreMode.Avg).innerHit(new InnerHitBuilder("comment")) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertSearchHit(response, 1, hasId("2")); - assertThat(response.getHits().getAt(0).getShard(), notNullValue()); - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); - assertThat(innerHits.getHits().length, equalTo(3)); - assertThat(innerHits.getAt(0).getId(), equalTo("2")); - assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(innerHits.getAt(1).getId(), equalTo("2")); - assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1)); - assertThat(innerHits.getAt(2).getId(), equalTo("2")); - assertThat(innerHits.getAt(2).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(2).getNestedIdentity().getOffset(), equalTo(2)); - - response = prepareSearch("articles").setQuery( - nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message")) - .setExplain(true) - .addFetchField("comments.mes*") - .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) - .setSize(1) - ) - ).get(); - assertNoFailures(response); - innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); - assertThat(innerHits.getHits().length, equalTo(1)); - HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("comments.message"); - assertThat(highlightField.fragments()[0].string(), equalTo("fox eat quick")); - assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(comments.message:fox in")); - assertThat( - innerHits.getAt(0).getFields().get("comments").getValue(), - equalTo(Collections.singletonMap("message", Collections.singletonList("fox eat quick"))) + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder("comment")) + ), + response -> { + assertHitCount(response, 1); + assertSearchHit(response, 1, hasId("1")); + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); + assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getHits().length, equalTo(2)); + assertThat(innerHits.getAt(0).getId(), equalTo("1")); + assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + assertThat(innerHits.getAt(1).getId(), equalTo("1")); + assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1)); + } + ); + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "elephant"), ScoreMode.Avg).innerHit(new InnerHitBuilder("comment")) + ), + response -> { + assertHitCount(response, 1); + assertSearchHit(response, 1, hasId("2")); + assertThat(response.getHits().getAt(0).getShard(), notNullValue()); + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); + assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getHits().length, equalTo(3)); + assertThat(innerHits.getAt(0).getId(), equalTo("2")); + assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + assertThat(innerHits.getAt(1).getId(), equalTo("2")); + assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1)); + assertThat(innerHits.getAt(2).getId(), equalTo("2")); + assertThat(innerHits.getAt(2).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(2).getNestedIdentity().getOffset(), equalTo(2)); + } + ); + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message")) + .setExplain(true) + .addFetchField("comments.mes*") + .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) + .setSize(1) + ) + ), + response -> { + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); + assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getHits().length, equalTo(1)); + HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("comments.message"); + assertThat(highlightField.fragments()[0].string(), equalTo("fox eat quick")); + assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(comments.message:fox in")); + assertThat( + innerHits.getAt(0).getFields().get("comments").getValue(), + equalTo(Collections.singletonMap("message", Collections.singletonList("fox eat quick"))) + ); + assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5")); + } + ); + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().addDocValueField("comments.mes*").setSize(1) + ) + ), + response -> { + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); + assertThat(innerHits.getHits().length, equalTo(1)); + assertThat(innerHits.getAt(0).getFields().get("comments.message").getValue().toString(), equalTo("eat")); + } ); - assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5")); - - response = prepareSearch("articles").setQuery( - nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().addDocValueField("comments.mes*").setSize(1) - ) - ).get(); - assertNoFailures(response); - innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getFields().get("comments.message").getValue().toString(), equalTo("eat")); } public void testRandomNested() throws Exception { @@ -249,32 +258,31 @@ public void testRandomNested() throws Exception { new InnerHitBuilder("b").addSort(new FieldSortBuilder("_doc").order(SortOrder.ASC)).setSize(size) ) ); - SearchResponse searchResponse = prepareSearch("idx").setQuery(boolQuery).setSize(numDocs).addSort("foo", SortOrder.ASC).get(); - - assertNoFailures(searchResponse); - assertHitCount(searchResponse, numDocs); - assertThat(searchResponse.getHits().getHits().length, equalTo(numDocs)); - for (int i = 0; i < numDocs; i++) { - SearchHit searchHit = searchResponse.getHits().getAt(i); - assertThat(searchHit.getShard(), notNullValue()); - SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) field1InnerObjects[i])); - for (int j = 0; j < field1InnerObjects[i] && j < size; j++) { - SearchHit innerHit = inner.getAt(j); - assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field1")); - assertThat(innerHit.getNestedIdentity().getOffset(), equalTo(j)); - assertThat(innerHit.getNestedIdentity().getChild(), nullValue()); - } - - inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) field2InnerObjects[i])); - for (int j = 0; j < field2InnerObjects[i] && j < size; j++) { - SearchHit innerHit = inner.getAt(j); - assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field2")); - assertThat(innerHit.getNestedIdentity().getOffset(), equalTo(j)); - assertThat(innerHit.getNestedIdentity().getChild(), nullValue()); + assertNoFailuresAndResponse(prepareSearch("idx").setQuery(boolQuery).setSize(numDocs).addSort("foo", SortOrder.ASC), response -> { + assertHitCount(response, numDocs); + assertThat(response.getHits().getHits().length, equalTo(numDocs)); + for (int i = 0; i < numDocs; i++) { + SearchHit searchHit = response.getHits().getAt(i); + assertThat(searchHit.getShard(), notNullValue()); + SearchHits inner = searchHit.getInnerHits().get("a"); + assertThat(inner.getTotalHits().value, equalTo((long) field1InnerObjects[i])); + for (int j = 0; j < field1InnerObjects[i] && j < size; j++) { + SearchHit innerHit = inner.getAt(j); + assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field1")); + assertThat(innerHit.getNestedIdentity().getOffset(), equalTo(j)); + assertThat(innerHit.getNestedIdentity().getChild(), nullValue()); + } + + inner = searchHit.getInnerHits().get("b"); + assertThat(inner.getTotalHits().value, equalTo((long) field2InnerObjects[i])); + for (int j = 0; j < field2InnerObjects[i] && j < size; j++) { + SearchHit innerHit = inner.getAt(j); + assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field2")); + assertThat(innerHit.getNestedIdentity().getOffset(), equalTo(j)); + assertThat(innerHit.getNestedIdentity().getChild(), nullValue()); + } } - } + }); } public void testNestedMultipleLayers() throws Exception { @@ -358,140 +366,154 @@ public void testNestedMultipleLayers() throws Exception { indexRandom(true, requests); // Check we can load the first doubly-nested document. - SearchResponse response = prepareSearch("articles").setQuery( - nestedQuery( - "comments", - nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg).innerHit( - new InnerHitBuilder("remark") - ), - ScoreMode.Avg - ).innerHit(new InnerHitBuilder()) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertSearchHit(response, 1, hasId("1")); - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getId(), equalTo("1")); - assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getId(), equalTo("1")); - assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); - assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); - + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery( + "comments", + nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg).innerHit( + new InnerHitBuilder("remark") + ), + ScoreMode.Avg + ).innerHit(new InnerHitBuilder()) + ), + response -> { + assertHitCount(response, 1); + assertSearchHit(response, 1, hasId("1")); + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getHits().length, equalTo(1)); + assertThat(innerHits.getAt(0).getId(), equalTo("1")); + assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + innerHits = innerHits.getAt(0).getInnerHits().get("remark"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getHits().length, equalTo(1)); + assertThat(innerHits.getAt(0).getId(), equalTo("1")); + assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); + assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); + } + ); // Check we can load the second doubly-nested document. - response = prepareSearch("articles").setQuery( - nestedQuery( - "comments", - nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "neutral"), ScoreMode.Avg).innerHit( - new InnerHitBuilder("remark") - ), - ScoreMode.Avg - ).innerHit(new InnerHitBuilder()) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertSearchHit(response, 1, hasId("1")); - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getId(), equalTo("1")); - assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); - innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getId(), equalTo("1")); - assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); - assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); - assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); - + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery( + "comments", + nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "neutral"), ScoreMode.Avg).innerHit( + new InnerHitBuilder("remark") + ), + ScoreMode.Avg + ).innerHit(new InnerHitBuilder()) + ), + response -> { + assertHitCount(response, 1); + assertSearchHit(response, 1, hasId("1")); + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getHits().length, equalTo(1)); + assertThat(innerHits.getAt(0).getId(), equalTo("1")); + assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); + innerHits = innerHits.getAt(0).getInnerHits().get("remark"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getHits().length, equalTo(1)); + assertThat(innerHits.getAt(0).getId(), equalTo("1")); + assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); + assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); + assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); + } + ); // Directly refer to the second level: - response = prepareSearch("articles").setQuery( - nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg).innerHit(new InnerHitBuilder()) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertSearchHit(response, 1, hasId("2")); - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - innerHits = response.getHits().getAt(0).getInnerHits().get("comments.remarks"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getId(), equalTo("2")); - assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); - assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); - - response = prepareSearch("articles").setQuery( - nestedQuery( - "comments", + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg).innerHit( - new InnerHitBuilder("remark") - ), - ScoreMode.Avg - ).innerHit(new InnerHitBuilder()) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertSearchHit(response, 1, hasId("2")); - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getId(), equalTo("2")); - assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getId(), equalTo("2")); - assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); - assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); - - // Check that inner hits contain _source even when it's disabled on the parent request. - response = prepareSearch("articles").setFetchSource(false) - .setQuery( + new InnerHitBuilder() + ) + ), + response -> { + assertHitCount(response, 1); + assertSearchHit(response, 1, hasId("2")); + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments.remarks"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getHits().length, equalTo(1)); + assertThat(innerHits.getAt(0).getId(), equalTo("2")); + assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); + assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); + } + ); + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( nestedQuery( "comments", - nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg).innerHit( + nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg).innerHit( new InnerHitBuilder("remark") ), ScoreMode.Avg ).innerHit(new InnerHitBuilder()) - ) - .get(); - assertNoFailures(response); - innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertNotNull(innerHits.getAt(0).getSourceAsMap()); - assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty()); - - response = prepareSearch("articles").setQuery( - nestedQuery( - "comments", - nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg).innerHit( - new InnerHitBuilder("remark") + ), + response -> { + assertHitCount(response, 1); + assertSearchHit(response, 1, hasId("2")); + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getHits().length, equalTo(1)); + assertThat(innerHits.getAt(0).getId(), equalTo("2")); + assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + innerHits = innerHits.getAt(0).getInnerHits().get("remark"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getHits().length, equalTo(1)); + assertThat(innerHits.getAt(0).getId(), equalTo("2")); + assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); + assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); + } + ); + // Check that inner hits contain _source even when it's disabled on the parent request. + assertNoFailuresAndResponse( + prepareSearch("articles").setFetchSource(false) + .setQuery( + nestedQuery( + "comments", + nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg).innerHit( + new InnerHitBuilder("remark") + ), + ScoreMode.Avg + ).innerHit(new InnerHitBuilder()) ), - ScoreMode.Avg - ).innerHit(new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE)) - ).get(); - assertNoFailures(response); - innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertNotNull(innerHits.getAt(0).getSourceAsMap()); - assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty()); + response -> { + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); + innerHits = innerHits.getAt(0).getInnerHits().get("remark"); + assertNotNull(innerHits.getAt(0).getSourceAsMap()); + assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty()); + } + ); + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery( + "comments", + nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg).innerHit( + new InnerHitBuilder("remark") + ), + ScoreMode.Avg + ).innerHit(new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE)) + ), + response -> { + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); + innerHits = innerHits.getAt(0).getInnerHits().get("remark"); + assertNotNull(innerHits.getAt(0).getSourceAsMap()); + assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty()); + } + ); } // Issue #9723 @@ -513,20 +535,23 @@ public void testNestedDefinedAsObject() throws Exception { ); indexRandom(true, requests); - SearchResponse response = prepareSearch("articles").setQuery( - nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder()) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getId(), equalTo("1")); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), - equalTo("comments") + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder()) + ), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getId(), equalTo("1")); + assertThat( + response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), + equalTo("comments") + ); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); + } ); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); } public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { @@ -582,56 +607,62 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { ); indexRandom(true, requests); - SearchResponse resp1 = prepareSearch("articles").setQuery( - nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.FETCH_SOURCE) - ) - ).get(); - assertNoFailures(resp1); - assertHitCount(resp1, 1); - SearchHit parent = resp1.getHits().getAt(0); - assertThat(parent.getId(), equalTo("1")); - SearchHits inner = parent.getInnerHits().get("comments.messages"); - assertThat(inner.getTotalHits().value, equalTo(2L)); - assertThat(inner.getAt(0).getSourceAsString(), equalTo("{\"message\":\"no fox\"}")); - assertThat(inner.getAt(1).getSourceAsString(), equalTo("{\"message\":\"fox eat quick\"}")); - - SearchResponse response = prepareSearch("articles").setQuery( - nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) - ) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - SearchHit hit = response.getHits().getAt(0); - assertThat(hit.getId(), equalTo("1")); - SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(2L)); - assertThat(messages.getAt(0).getId(), equalTo("1")); - assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); - assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(2)); - assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); - assertThat(messages.getAt(1).getId(), equalTo("1")); - assertThat(messages.getAt(1).getNestedIdentity().getField().string(), equalTo("comments.messages")); - assertThat(messages.getAt(1).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(messages.getAt(1).getNestedIdentity().getChild(), nullValue()); - - response = prepareSearch("articles").setQuery( - nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) - ) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - hit = response.getHits().getAt(0); - assertThat(hit.getId(), equalTo("1")); - messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); - assertThat(messages.getAt(0).getId(), equalTo("1")); - assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); - assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); - assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); - + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.FETCH_SOURCE) + ) + ), + response -> { + assertHitCount(response, 1); + SearchHit parent = response.getHits().getAt(0); + assertThat(parent.getId(), equalTo("1")); + SearchHits inner = parent.getInnerHits().get("comments.messages"); + assertThat(inner.getTotalHits().value, equalTo(2L)); + assertThat(inner.getAt(0).getSourceAsString(), equalTo("{\"message\":\"no fox\"}")); + assertThat(inner.getAt(1).getSourceAsString(), equalTo("{\"message\":\"fox eat quick\"}")); + } + ); + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) + ) + ), + response -> { + assertHitCount(response, 1); + SearchHit hit = response.getHits().getAt(0); + assertThat(hit.getId(), equalTo("1")); + SearchHits messages = hit.getInnerHits().get("comments.messages"); + assertThat(messages.getTotalHits().value, equalTo(2L)); + assertThat(messages.getAt(0).getId(), equalTo("1")); + assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); + assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(2)); + assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); + assertThat(messages.getAt(1).getId(), equalTo("1")); + assertThat(messages.getAt(1).getNestedIdentity().getField().string(), equalTo("comments.messages")); + assertThat(messages.getAt(1).getNestedIdentity().getOffset(), equalTo(0)); + assertThat(messages.getAt(1).getNestedIdentity().getChild(), nullValue()); + } + ); + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) + ) + ), + response -> { + assertHitCount(response, 1); + SearchHit hit = response.getHits().getAt(0); + assertThat(hit.getId(), equalTo("1")); + SearchHits messages = hit.getInnerHits().get("comments.messages"); + assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getAt(0).getId(), equalTo("1")); + assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); + assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); + assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); + } + ); // index the message in an object form instead of an array requests = new ArrayList<>(); requests.add( @@ -649,21 +680,24 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { ) ); indexRandom(true, requests); - response = prepareSearch("articles").setQuery( - nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) - ) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - hit = response.getHits().getAt(0); - assertThat(hit.getId(), equalTo("1")); - messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); - assertThat(messages.getAt(0).getId(), equalTo("1")); - assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); - assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) + ) + ), + response -> { + assertHitCount(response, 1); + SearchHit hit = response.getHits().getAt(0); + assertThat(hit.getId(), equalTo("1")); + SearchHits messages = hit.getInnerHits().get("comments.messages"); + assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getAt(0).getId(), equalTo("1")); + assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); + assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); + } + ); } public void testMatchesQueriesNestedInnerHits() throws Exception { @@ -759,28 +793,28 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { query = nestedQuery("nested1", query, ScoreMode.Avg).innerHit( new InnerHitBuilder().addSort(new FieldSortBuilder("nested1.n_field1").order(SortOrder.ASC)) ); - SearchResponse searchResponse = prepareSearch("test").setQuery(query).setSize(numDocs).addSort("field1", SortOrder.ASC).get(); - assertNoFailures(searchResponse); - assertAllSuccessful(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("0")); - assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test1")); - assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries()[0], equalTo("test3")); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test2")); - - for (int i = 2; i < numDocs; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(String.valueOf(i))); - assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test3")); - } + assertNoFailuresAndResponse(prepareSearch("test").setQuery(query).setSize(numDocs).addSort("field1", SortOrder.ASC), response -> { + assertAllSuccessful(response); + assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getAt(0).getId(), equalTo("0")); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test1")); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries()[0], equalTo("test3")); + + assertThat(response.getHits().getAt(1).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test2")); + + for (int i = 2; i < numDocs; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(String.valueOf(i))); + assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test3")); + } + }); } public void testNestedSource() throws Exception { @@ -811,64 +845,76 @@ public void testNestedSource() throws Exception { // the field name (comments.message) used for source filtering should be the same as when using that field for // other features (like in the query dsl or aggs) in order for consistency: - SearchResponse response = prepareSearch().setQuery( - nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.of(true, new String[] { "comments.message" }, null)) - ) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), - equalTo("fox eat quick") - ); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().size(), equalTo(1)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"), - equalTo("fox ate rabbit x y z") + assertNoFailuresAndResponse( + prepareSearch().setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.of(true, new String[] { "comments.message" }, null)) + ) + ), + response -> { + assertHitCount(response, 1); + + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1)); + assertThat( + response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), + equalTo("fox eat quick") + ); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().size(), equalTo(1)); + assertThat( + response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"), + equalTo("fox ate rabbit x y z") + ); + } ); - response = prepareSearch().setQuery( - nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder()) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), - equalTo("fox eat quick") - ); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"), - equalTo("fox ate rabbit x y z") + assertNoFailuresAndResponse( + prepareSearch().setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder()) + ), + response -> { + assertHitCount(response, 1); + + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat( + response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), + equalTo("fox eat quick") + ); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat( + response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"), + equalTo("fox ate rabbit x y z") + ); + } ); // Source filter on a field that does not exist inside the nested document and just check that we do not fail and // return an empty _source: - response = prepareSearch().setQuery( - nestedQuery("comments", matchQuery("comments.message", "away"), ScoreMode.None).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.of(true, new String[] { "comments.missing_field" }, null)) - ) - ).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0)); - + assertNoFailuresAndResponse( + prepareSearch().setQuery( + nestedQuery("comments", matchQuery("comments.message", "away"), ScoreMode.None).innerHit( + new InnerHitBuilder().setFetchSourceContext( + FetchSourceContext.of(true, new String[] { "comments.missing_field" }, null) + ) + ) + ), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0)); + } + ); // Check that inner hits contain _source even when it's disabled on the root request. - response = prepareSearch().setFetchSource(false) - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); - assertFalse(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().isEmpty()); + assertNoFailuresAndResponse( + prepareSearch().setFetchSource(false) + .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertFalse(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().isEmpty()); + } + ); } public void testInnerHitsWithIgnoreUnmapped() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java index d7347ef21328f..b5243ed5a52ab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.fetch.subphase; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -32,6 +31,7 @@ import static org.elasticsearch.index.query.QueryBuilders.termsQuery; import static org.elasticsearch.index.query.QueryBuilders.wrapperQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasKey; @@ -46,44 +46,53 @@ public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { client().prepareIndex("test").setId("3").setSource("name", "test3", "number", 3).get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery( - boolQuery().must(matchAllQuery()) - .filter( - boolQuery().should(rangeQuery("number").lt(2).queryName("test1")).should(rangeQuery("number").gte(2).queryName("test2")) - ) - ).get(); - assertHitCount(searchResponse, 3L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("3") || hit.getId().equals("2")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("test2")); - assertThat(hit.getMatchedQueryScore("test2"), equalTo(1f)); - } else if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("test1")); - assertThat(hit.getMatchedQueryScore("test1"), equalTo(1f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + assertResponse( + prepareSearch().setQuery( + boolQuery().must(matchAllQuery()) + .filter( + boolQuery().should(rangeQuery("number").lt(2).queryName("test1")) + .should(rangeQuery("number").gte(2).queryName("test2")) + ) + ), + response -> { + assertHitCount(response, 3L); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("3") || hit.getId().equals("2")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test2")); + assertThat(hit.getMatchedQueryScore("test2"), equalTo(1f)); + } else if (hit.getId().equals("1")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test1")); + assertThat(hit.getMatchedQueryScore("test1"), equalTo(1f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } + } } - } - - searchResponse = prepareSearch().setQuery( - boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2")) - ).get(); - assertHitCount(searchResponse, 3L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1") || hit.getId().equals("2")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("test1")); - assertThat(hit.getMatchedQueryScore("test1"), equalTo(1f)); - } else if (hit.getId().equals("3")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("test2")); - assertThat(hit.getMatchedQueryScore("test2"), equalTo(1f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + ); + + assertResponse( + prepareSearch().setQuery( + boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2")) + ), + response -> { + assertHitCount(response, 3L); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1") || hit.getId().equals("2")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test1")); + assertThat(hit.getMatchedQueryScore("test1"), equalTo(1f)); + } else if (hit.getId().equals("3")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test2")); + assertThat(hit.getMatchedQueryScore("test2"), equalTo(1f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } + } } - } + ); } public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { @@ -95,50 +104,55 @@ public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { client().prepareIndex("test").setId("3").setSource("name", "test").get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setPostFilter( - boolQuery().should(termQuery("name", "test").queryName("name")).should(termQuery("title", "title1").queryName("title")) - ) - .get(); - assertHitCount(searchResponse, 3L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); - assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); - } else if (hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .setPostFilter( + boolQuery().should(termQuery("name", "test").queryName("name")).should(termQuery("title", "title1").queryName("title")) + ), + response -> { + assertHitCount(response, 3L); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); + } else if (hit.getId().equals("2") || hit.getId().equals("3")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } + } } - } - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setPostFilter( - boolQuery().should(termQuery("name", "test").queryName("name")).should(termQuery("title", "title1").queryName("title")) - ) - .get(); - - assertHitCount(searchResponse, 3L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); - assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); - } else if (hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + ); + + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .setPostFilter( + boolQuery().should(termQuery("name", "test").queryName("name")).should(termQuery("title", "title1").queryName("title")) + ), + response -> { + assertHitCount(response, 3L); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); + } else if (hit.getId().equals("2") || hit.getId().equals("3")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } + } } - } + ); } public void testSimpleMatchedQueryFromTopLevelFilterAndFilteredQuery() throws Exception { @@ -150,37 +164,44 @@ public void testSimpleMatchedQueryFromTopLevelFilterAndFilteredQuery() throws Ex client().prepareIndex("test").setId("3").setSource("name", "test", "title", "title3").get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery( - boolQuery().must(matchAllQuery()).filter(termsQuery("title", "title1", "title2", "title3").queryName("title")) - ).setPostFilter(termQuery("name", "test").queryName("name")).get(); - assertHitCount(searchResponse, 3L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1") || hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); - assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + assertResponse( + prepareSearch().setQuery( + boolQuery().must(matchAllQuery()).filter(termsQuery("title", "title1", "title2", "title3").queryName("title")) + ).setPostFilter(termQuery("name", "test").queryName("name")), + response -> { + assertHitCount(response, 3L); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1") || hit.getId().equals("2") || hit.getId().equals("3")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } + } } - } - - searchResponse = prepareSearch().setQuery(termsQuery("title", "title1", "title2", "title3").queryName("title")) - .setPostFilter(matchQuery("name", "test").queryName("name")) - .get(); - assertHitCount(searchResponse, 3L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1") || hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); - assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + ); + + assertResponse( + prepareSearch().setQuery(termsQuery("title", "title1", "title2", "title3").queryName("title")) + .setPostFilter(matchQuery("name", "test").queryName("name")), + response -> { + assertHitCount(response, 3L); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1") || hit.getId().equals("2") || hit.getId().equals("3")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } + } } - } + ); } public void testRegExpQuerySupportsName() { @@ -190,18 +211,19 @@ public void testRegExpQuerySupportsName() { client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.regexpQuery("title", "title1").queryName("regex")).get(); - assertHitCount(searchResponse, 1L); + assertResponse(prepareSearch().setQuery(QueryBuilders.regexpQuery("title", "title1").queryName("regex")), response -> { + assertHitCount(response, 1L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("regex")); - assertThat(hit.getMatchedQueryScore("regex"), equalTo(1f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("regex")); + assertThat(hit.getMatchedQueryScore("regex"), equalTo(1f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } } - } + }); } public void testPrefixQuerySupportsName() { @@ -211,18 +233,19 @@ public void testPrefixQuerySupportsName() { client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.prefixQuery("title", "title").queryName("prefix")).get(); - assertHitCount(searchResponse, 1L); + assertResponse(prepareSearch().setQuery(QueryBuilders.prefixQuery("title", "title").queryName("prefix")), response -> { + assertHitCount(response, 1L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("prefix")); - assertThat(hit.getMatchedQueryScore("prefix"), equalTo(1f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("prefix")); + assertThat(hit.getMatchedQueryScore("prefix"), equalTo(1f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } } - } + }); } public void testFuzzyQuerySupportsName() { @@ -232,18 +255,19 @@ public void testFuzzyQuerySupportsName() { client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.fuzzyQuery("title", "titel1").queryName("fuzzy")).get(); - assertHitCount(searchResponse, 1L); + assertResponse(prepareSearch().setQuery(QueryBuilders.fuzzyQuery("title", "titel1").queryName("fuzzy")), response -> { + assertHitCount(response, 1L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("fuzzy")); - assertThat(hit.getMatchedQueryScore("fuzzy"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("fuzzy")); + assertThat(hit.getMatchedQueryScore("fuzzy"), greaterThan(0f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } } - } + }); } public void testWildcardQuerySupportsName() { @@ -253,18 +277,19 @@ public void testWildcardQuerySupportsName() { client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.wildcardQuery("title", "titl*").queryName("wildcard")).get(); - assertHitCount(searchResponse, 1L); + assertResponse(prepareSearch().setQuery(QueryBuilders.wildcardQuery("title", "titl*").queryName("wildcard")), response -> { + assertHitCount(response, 1L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("wildcard")); - assertThat(hit.getMatchedQueryScore("wildcard"), equalTo(1f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("wildcard")); + assertThat(hit.getMatchedQueryScore("wildcard"), equalTo(1f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } } - } + }); } public void testSpanFirstQuerySupportsName() { @@ -274,20 +299,22 @@ public void testSpanFirstQuerySupportsName() { client().prepareIndex("test1").setId("1").setSource("title", "title1 title2").get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery( - QueryBuilders.spanFirstQuery(QueryBuilders.spanTermQuery("title", "title1"), 10).queryName("span") - ).get(); - assertHitCount(searchResponse, 1L); - - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("span")); - assertThat(hit.getMatchedQueryScore("span"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + assertResponse( + prepareSearch().setQuery(QueryBuilders.spanFirstQuery(QueryBuilders.spanTermQuery("title", "title1"), 10).queryName("span")), + response -> { + assertHitCount(response, 1L); + + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("span")); + assertThat(hit.getMatchedQueryScore("span"), greaterThan(0f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } + } } - } + ); } /** @@ -304,26 +331,29 @@ public void testMatchedWithShould() throws Exception { // Execute search at least two times to load it in cache int iter = scaledRandomIntBetween(2, 10); for (int i = 0; i < iter; i++) { - SearchResponse searchResponse = prepareSearch().setQuery( - boolQuery().minimumShouldMatch(1) - .should(queryStringQuery("dolor").queryName("dolor")) - .should(queryStringQuery("elit").queryName("elit")) - ).get(); - - assertHitCount(searchResponse, 2L); - for (SearchHit hit : searchResponse.getHits()) { - if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("dolor")); - assertThat(hit.getMatchedQueryScore("dolor"), greaterThan(0f)); - } else if (hit.getId().equals("2")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("elit")); - assertThat(hit.getMatchedQueryScore("elit"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); + assertResponse( + prepareSearch().setQuery( + boolQuery().minimumShouldMatch(1) + .should(queryStringQuery("dolor").queryName("dolor")) + .should(queryStringQuery("elit").queryName("elit")) + ), + response -> { + assertHitCount(response, 2L); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("dolor")); + assertThat(hit.getMatchedQueryScore("dolor"), greaterThan(0f)); + } else if (hit.getId().equals("2")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("elit")); + assertThat(hit.getMatchedQueryScore("elit"), greaterThan(0f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } + } } - } + ); } } @@ -340,12 +370,13 @@ public void testMatchedWithWrapperQuery() throws Exception { BytesReference termBytes = XContentHelper.toXContent(termQueryBuilder, XContentType.JSON, false); QueryBuilder[] queries = new QueryBuilder[] { wrapperQuery(matchBytes), constantScoreQuery(wrapperQuery(termBytes)) }; for (QueryBuilder query : queries) { - SearchResponse searchResponse = prepareSearch().setQuery(query).get(); - assertHitCount(searchResponse, 1L); - SearchHit hit = searchResponse.getHits().getAt(0); - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("abc")); - assertThat(hit.getMatchedQueryScore("abc"), greaterThan(0f)); + assertResponse(prepareSearch().setQuery(query), response -> { + assertHitCount(response, 1L); + SearchHit hit = response.getHits().getAt(0); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("abc")); + assertThat(hit.getMatchedQueryScore("abc"), greaterThan(0f)); + }); } } @@ -357,16 +388,19 @@ public void testMatchedWithRescoreQuery() throws Exception { client().prepareIndex("test").setId("2").setSource("content", "hello you").get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(new MatchAllQueryBuilder().queryName("all")) - .setRescorer( - new QueryRescorerBuilder(new MatchPhraseQueryBuilder("content", "hello you").boost(10).queryName("rescore_phrase")) - ) - .get(); - assertHitCount(searchResponse, 2L); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(2)); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries(), equalTo(new String[] { "all", "rescore_phrase" })); - - assertThat(searchResponse.getHits().getAt(1).getMatchedQueries().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(1).getMatchedQueries(), equalTo(new String[] { "all" })); + assertResponse( + prepareSearch().setQuery(new MatchAllQueryBuilder().queryName("all")) + .setRescorer( + new QueryRescorerBuilder(new MatchPhraseQueryBuilder("content", "hello you").boost(10).queryName("rescore_phrase")) + ), + response -> { + assertHitCount(response, 2L); + assertThat(response.getHits().getAt(0).getMatchedQueries().length, equalTo(2)); + assertThat(response.getHits().getAt(0).getMatchedQueries(), equalTo(new String[] { "all", "rescore_phrase" })); + + assertThat(response.getHits().getAt(1).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(1).getMatchedQueries(), equalTo(new String[] { "all" })); + } + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java index 6b790f9e6f090..0dbf3af735b44 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.search.fetch.subphase.highlight; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -22,6 +21,7 @@ import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHighlight; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; /** @@ -45,10 +45,11 @@ protected void setup() throws Exception { } public void testThatCustomHighlightersAreSupported() throws IOException { - SearchResponse searchResponse = prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) - .highlighter(new HighlightBuilder().field("name").highlighterType("test-custom")) - .get(); - assertHighlight(searchResponse, 0, "name", 0, equalTo("standard response for name at position 1")); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) + .highlighter(new HighlightBuilder().field("name").highlighterType("test-custom")), + response -> assertHighlight(response, 0, "name", 0, equalTo("standard response for name at position 1")) + ); } public void testThatCustomHighlighterCanBeConfiguredPerField() throws Exception { @@ -58,44 +59,49 @@ public void testThatCustomHighlighterCanBeConfiguredPerField() throws Exception options.put("myFieldOption", "someValue"); highlightConfig.options(options); - SearchResponse searchResponse = prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) - .highlighter(new HighlightBuilder().field(highlightConfig)) - .get(); - - assertHighlight(searchResponse, 0, "name", 0, equalTo("standard response for name at position 1")); - assertHighlight(searchResponse, 0, "name", 1, equalTo("field:myFieldOption:someValue")); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).highlighter(new HighlightBuilder().field(highlightConfig)), + response -> { + assertHighlight(response, 0, "name", 0, equalTo("standard response for name at position 1")); + assertHighlight(response, 0, "name", 1, equalTo("field:myFieldOption:someValue")); + } + ); } public void testThatCustomHighlighterCanBeConfiguredGlobally() throws Exception { Map options = new HashMap<>(); options.put("myGlobalOption", "someValue"); - SearchResponse searchResponse = prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) - .highlighter(new HighlightBuilder().field("name").highlighterType("test-custom").options(options)) - .get(); - - assertHighlight(searchResponse, 0, "name", 0, equalTo("standard response for name at position 1")); - assertHighlight(searchResponse, 0, "name", 1, equalTo("field:myGlobalOption:someValue")); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) + .highlighter(new HighlightBuilder().field("name").highlighterType("test-custom").options(options)), + response -> { + assertHighlight(response, 0, "name", 0, equalTo("standard response for name at position 1")); + assertHighlight(response, 0, "name", 1, equalTo("field:myGlobalOption:someValue")); + } + ); } public void testThatCustomHighlighterReceivesFieldsInOrder() throws Exception { - SearchResponse searchResponse = prepareSearch("test").setQuery( - QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).should(QueryBuilders.termQuery("name", "arbitrary")) - ) - .highlighter( - new HighlightBuilder().highlighterType("test-custom") - .field("name") - .field("other_name") - .field("other_other_name") - .useExplicitFieldOrder(true) + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).should(QueryBuilders.termQuery("name", "arbitrary")) ) - .get(); - - assertHighlight(searchResponse, 0, "name", 0, equalTo("standard response for name at position 1")); - assertHighlight(searchResponse, 0, "other_name", 0, equalTo("standard response for other_name at position 2")); - assertHighlight(searchResponse, 0, "other_other_name", 0, equalTo("standard response for other_other_name at position 3")); - assertHighlight(searchResponse, 1, "name", 0, equalTo("standard response for name at position 1")); - assertHighlight(searchResponse, 1, "other_name", 0, equalTo("standard response for other_name at position 2")); - assertHighlight(searchResponse, 1, "other_other_name", 0, equalTo("standard response for other_other_name at position 3")); + .highlighter( + new HighlightBuilder().highlighterType("test-custom") + .field("name") + .field("other_name") + .field("other_other_name") + .useExplicitFieldOrder(true) + ), + response -> { + assertHighlight(response, 0, "name", 0, equalTo("standard response for name at position 1")); + assertHighlight(response, 0, "other_name", 0, equalTo("standard response for other_name at position 2")); + assertHighlight(response, 0, "other_other_name", 0, equalTo("standard response for other_other_name at position 3")); + assertHighlight(response, 1, "name", 0, equalTo("standard response for name at position 1")); + assertHighlight(response, 1, "other_name", 0, equalTo("standard response for other_name at position 2")); + assertHighlight(response, 1, "other_other_name", 0, equalTo("standard response for other_other_name at position 3")); + } + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 6500b969ee273..5dcfd861c91a0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; @@ -94,7 +93,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNotHighlighted; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -139,13 +140,16 @@ public void testHighlightingWithKeywordIgnoreBoundaryScanner() throws IOExceptio refresh(); for (BoundaryScannerType scanner : BoundaryScannerType.values()) { - SearchResponse search = prepareSearch().addSort(SortBuilders.fieldSort("sort")) - .setQuery(matchQuery("tags", "foo bar")) - .highlighter(new HighlightBuilder().field(new Field("tags")).numOfFragments(2).boundaryScannerType(scanner)) - .get(); - assertHighlight(search, 0, "tags", 0, 2, equalTo("foo bar")); - assertHighlight(search, 0, "tags", 1, 2, equalTo("foo bar")); - assertHighlight(search, 1, "tags", 0, 1, equalTo("foo bar")); + assertResponse( + prepareSearch().addSort(SortBuilders.fieldSort("sort")) + .setQuery(matchQuery("tags", "foo bar")) + .highlighter(new HighlightBuilder().field(new Field("tags")).numOfFragments(2).boundaryScannerType(scanner)), + response -> { + assertHighlight(response, 0, "tags", 0, 2, equalTo("foo bar")); + assertHighlight(response, 0, "tags", 1, 2, equalTo("foo bar")); + assertHighlight(response, 1, "tags", 0, 1, equalTo("foo bar")); + } + ); } } @@ -164,10 +168,10 @@ public void testHighlightingWithStoredKeyword() throws IOException { assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "foo").endObject()).get(); refresh(); - SearchResponse search = prepareSearch().setQuery(matchQuery("text", "foo")) - .highlighter(new HighlightBuilder().field(new Field("text"))) - .get(); - assertHighlight(search, 0, "text", 0, equalTo("foo")); + assertResponse( + prepareSearch().setQuery(matchQuery("text", "foo")).highlighter(new HighlightBuilder().field(new Field("text"))), + response -> assertHighlight(response, 0, "text", 0, equalTo("foo")) + ); } public void testHighlightingWithWildcardName() throws IOException { @@ -189,10 +193,11 @@ public void testHighlightingWithWildcardName() throws IOException { client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "text").endObject()).get(); refresh(); for (String type : ALL_TYPES) { - SearchResponse search = prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) - .highlighter(new HighlightBuilder().field(new Field("*").highlighterType(type))) - .get(); - assertHighlight(search, 0, "text", 0, equalTo("text")); + assertResponse( + prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) + .highlighter(new HighlightBuilder().field(new Field("*").highlighterType(type))), + response -> assertHighlight(response, 0, "text", 0, equalTo("text")) + ); } } @@ -220,8 +225,10 @@ public void testFieldAlias() throws IOException { for (String type : ALL_TYPES) { HighlightBuilder builder = new HighlightBuilder().field(new Field("alias").highlighterType(type)) .requireFieldMatch(randomBoolean()); - SearchResponse search = prepareSearch().setQuery(matchQuery("alias", "foo")).highlighter(builder).get(); - assertHighlight(search, 0, "alias", 0, equalTo("foo")); + assertResponse( + prepareSearch().setQuery(matchQuery("alias", "foo")).highlighter(builder), + response -> assertHighlight(response, 0, "alias", 0, equalTo("foo")) + ); } } @@ -250,8 +257,10 @@ public void testFieldAliasWithSourceLookup() throws IOException { for (String type : ALL_TYPES) { HighlightBuilder builder = new HighlightBuilder().field(new Field("alias").highlighterType(type)) .requireFieldMatch(randomBoolean()); - SearchResponse search = prepareSearch().setQuery(matchQuery("alias", "bar")).highlighter(builder).get(); - assertHighlight(search, 0, "alias", 0, equalTo("foo bar")); + assertResponse( + prepareSearch().setQuery(matchQuery("alias", "bar")).highlighter(builder), + response -> assertHighlight(response, 0, "alias", 0, equalTo("foo bar")) + ); } } @@ -275,8 +284,10 @@ public void testFieldAliasWithWildcardField() throws IOException { refresh(); HighlightBuilder builder = new HighlightBuilder().field(new Field("al*")).requireFieldMatch(false); - SearchResponse search = prepareSearch().setQuery(matchQuery("alias", "foo")).highlighter(builder).get(); - assertHighlight(search, 0, "alias", 0, equalTo("foo")); + assertResponse( + prepareSearch().setQuery(matchQuery("alias", "foo")).highlighter(builder), + response -> assertHighlight(response, 0, "alias", 0, equalTo("foo")) + ); } public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOException { @@ -309,15 +320,16 @@ public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOExc .get(); refresh(); for (String type : ALL_TYPES) { - SearchResponse search = prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) - .highlighter(new HighlightBuilder().field(new Field("*").highlighterType(type))) - .get(); - assertHighlight(search, 0, "text", 0, equalTo("text")); - search = prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) - .highlighter(new HighlightBuilder().field(new Field("unstored_text"))) - .get(); - assertNoFailures(search); - assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); + assertResponse( + prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) + .highlighter(new HighlightBuilder().field(new Field("*").highlighterType(type))), + response -> assertHighlight(response, 0, "text", 0, equalTo("text")) + ); + assertNoFailuresAndResponse( + prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) + .highlighter(new HighlightBuilder().field(new Field("unstored_text"))), + response -> assertThat(response.getHits().getAt(0).getHighlightFields().size(), equalTo(0)) + ); } } @@ -330,10 +342,12 @@ public void testHighTermFrequencyDoc() throws IOException { } client().prepareIndex("test").setId("1").setSource("name", builder.toString()).get(); refresh(); - SearchResponse search = prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "abc"))) - .highlighter(new HighlightBuilder().field("name")) - .get(); - assertHighlight(search, 0, "name", 0, startsWith("abc abc abc abc")); + assertResponse( + prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "abc"))).highlighter(new HighlightBuilder().field("name")), + response -> { + assertHighlight(response, 0, "name", 0, startsWith("abc abc abc abc")); + } + ); } public void testEnsureNoNegativeOffsets() throws Exception { @@ -433,22 +447,31 @@ public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception } indexRandom(true, indexRequestBuilders); - SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) - .highlighter(new HighlightBuilder().field("title", -1, 0)) - .get(); - - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); - } - - search = prepareSearch().setQuery(matchQuery("attachments.body", "attachment")) - .highlighter(new HighlightBuilder().field("attachments.body", -1, 0)) - .get(); + assertResponse( + prepareSearch().setQuery(matchQuery("title", "bug")).highlighter(new HighlightBuilder().field("title", -1, 0)), + response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight( + response, + i, + "title", + 0, + equalTo("This is a test on the highlighting bug present in elasticsearch") + ); + } + } + ); - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "attachments.body", 0, equalTo("attachment 1")); - assertHighlight(search, i, "attachments.body", 1, equalTo("attachment 2")); - } + assertResponse( + prepareSearch().setQuery(matchQuery("attachments.body", "attachment")) + .highlighter(new HighlightBuilder().field("attachments.body", -1, 0)), + response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight(response, i, "attachments.body", 0, equalTo("attachment 1")); + assertHighlight(response, i, "attachments.body", 1, equalTo("attachment 2")); + } + } + ); } @@ -500,23 +523,32 @@ public void testSourceLookupHighlightingUsingFastVectorHighlighter() throws Exce } indexRandom(true, indexRequestBuilders); - SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) - .highlighter(new HighlightBuilder().field("title", -1, 0)) - .get(); - - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); - } - - search = prepareSearch().setQuery(matchQuery("attachments.body", "attachment")) - .highlighter(new HighlightBuilder().field("attachments.body", -1, 2)) - .execute() - .get(); + assertResponse( + prepareSearch().setQuery(matchQuery("title", "bug")).highlighter(new HighlightBuilder().field("title", -1, 0)), + response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight( + response, + i, + "title", + 0, + equalTo("This is a test on the highlighting bug present in elasticsearch") + ); + } + } + ); - for (int i = 0; i < 5; i++) { - assertHighlight(search, i, "attachments.body", 0, equalTo("attachment 1")); - assertHighlight(search, i, "attachments.body", 1, equalTo("attachment 2")); - } + assertResponse( + prepareSearch().setQuery(matchQuery("attachments.body", "attachment")) + .highlighter(new HighlightBuilder().field("attachments.body", -1, 2)) + .execute(), + response -> { + for (int i = 0; i < 5; i++) { + assertHighlight(response, i, "attachments.body", 0, equalTo("attachment 1")); + assertHighlight(response, i, "attachments.body", 1, equalTo("attachment 2")); + } + } + ); } public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Exception { @@ -571,46 +603,52 @@ public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Except } indexRandom(true, indexRequestBuilders); - SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) - // asking for the whole field to be highlighted - .highlighter(new HighlightBuilder().field("title", -1, 0)) - .get(); - - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight( - search, - i, - "title", - 0, - equalTo("This is a test on the highlighting bug present in elasticsearch. Hopefully it works.") - ); - assertHighlight(search, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); - } - - search = prepareSearch().setQuery(matchQuery("title", "bug")) - // sentences will be generated out of each value - .highlighter(new HighlightBuilder().field("title")) - .get(); - - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight( - search, - i, - "title", - 0, - equalTo("This is a test on the highlighting bug present in elasticsearch. Hopefully it works.") - ); - assertHighlight(search, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); - } + assertResponse( + prepareSearch().setQuery(matchQuery("title", "bug")) + // asking for the whole field to be highlighted + .highlighter(new HighlightBuilder().field("title", -1, 0)), + response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight( + response, + i, + "title", + 0, + equalTo("This is a test on the highlighting bug present in elasticsearch. Hopefully it works.") + ); + assertHighlight(response, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); + } + } + ); - search = prepareSearch().setQuery(matchQuery("attachments.body", "attachment")) - .highlighter(new HighlightBuilder().field("attachments.body", -1, 2)) - .get(); + assertResponse( + prepareSearch().setQuery(matchQuery("title", "bug")) + // sentences will be generated out of each value + .highlighter(new HighlightBuilder().field("title")), + response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight( + response, + i, + "title", + 0, + equalTo("This is a test on the highlighting bug present in elasticsearch. Hopefully it works.") + ); + assertHighlight(response, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); + } + } + ); - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "attachments.body", 0, equalTo("attachment for this test")); - assertHighlight(search, i, "attachments.body", 1, 2, equalTo("attachment 2")); - } + assertResponse( + prepareSearch().setQuery(matchQuery("attachments.body", "attachment")) + .highlighter(new HighlightBuilder().field("attachments.body", -1, 2)), + response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight(response, i, "attachments.body", 0, equalTo("attachment for this test")); + assertHighlight(response, i, "attachments.body", 1, 2, equalTo("attachment 2")); + } + } + ); } public void testHighlightIssue1994() throws Exception { @@ -631,21 +669,35 @@ public void testHighlightIssue1994() throws Exception { client().prepareIndex("test").setId("2").setSource("titleTV", new String[] { "some text to highlight", "highlight other text" }) ); - SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) - .highlighter(new HighlightBuilder().field("title", -1, 2).field("titleTV", -1, 2).requireFieldMatch(false)) - .get(); - - assertHighlight(search, 0, "title", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); - assertHighlight(search, 0, "title", 1, 2, equalTo("The bug is bugging us")); - assertHighlight(search, 0, "titleTV", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); - assertHighlight(search, 0, "titleTV", 1, 2, equalTo("The bug is bugging us")); - - search = prepareSearch().setQuery(matchQuery("titleTV", "highlight")) - .highlighter(new HighlightBuilder().field("titleTV", -1, 2)) - .get(); - - assertHighlight(search, 0, "titleTV", 0, equalTo("some text to highlight")); - assertHighlight(search, 0, "titleTV", 1, 2, equalTo("highlight other text")); + assertResponse( + prepareSearch().setQuery(matchQuery("title", "bug")) + .highlighter(new HighlightBuilder().field("title", -1, 2).field("titleTV", -1, 2).requireFieldMatch(false)), + response -> { + assertHighlight( + response, + 0, + "title", + 0, + equalTo("This is a test on the highlighting bug present in elasticsearch") + ); + assertHighlight(response, 0, "title", 1, 2, equalTo("The bug is bugging us")); + assertHighlight( + response, + 0, + "titleTV", + 0, + equalTo("This is a test on the highlighting bug present in elasticsearch") + ); + assertHighlight(response, 0, "titleTV", 1, 2, equalTo("The bug is bugging us")); + } + ); + assertResponse( + prepareSearch().setQuery(matchQuery("titleTV", "highlight")).highlighter(new HighlightBuilder().field("titleTV", -1, 2)), + response -> { + assertHighlight(response, 0, "titleTV", 0, equalTo("some text to highlight")); + assertHighlight(response, 0, "titleTV", 1, 2, equalTo("highlight other text")); + } + ); } public void testGlobalHighlightingSettingsOverriddenAtFieldLevel() { @@ -679,11 +731,11 @@ public void testGlobalHighlightingSettingsOverriddenAtFieldLevel() { ) ); - SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field1", 0, 2, equalTo("test")); - assertHighlight(searchResponse, 0, "field1", 1, 2, equalTo("test")); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("yet another test")); + assertResponse(prepareSearch("test").setSource(source), response -> { + assertHighlight(response, 0, "field1", 0, 2, equalTo("test")); + assertHighlight(response, 0, "field1", 1, 2, equalTo("test")); + assertHighlight(response, 0, "field2", 0, 1, equalTo("yet another test")); + }); } // Issue #5175 @@ -718,18 +770,18 @@ public void testHighlightingOnWildcardFields() throws Exception { .query(termQuery("field-postings", "test")) .highlighter(highlight().field("field*").preTags("").postTags("").requireFieldMatch(false)); - SearchResponse searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - - assertHighlight( - searchResponse, - 0, - "field-postings", - 0, - 1, - equalTo("This is the first test sentence. Here is the second one.") - ); - assertHighlight(searchResponse, 0, "field-fvh", 0, 1, equalTo("This is the test with term_vectors")); - assertHighlight(searchResponse, 0, "field-plain", 0, 1, equalTo("This is the test for the plain highlighter")); + assertResponse(client().search(new SearchRequest("test").source(source)), response -> { + assertHighlight( + response, + 0, + "field-postings", + 0, + 1, + equalTo("This is the first test sentence. Here is the second one.") + ); + assertHighlight(response, 0, "field-fvh", 0, 1, equalTo("This is the test with term_vectors")); + assertHighlight(response, 0, "field-plain", 0, 1, equalTo("This is the test for the plain highlighter")); + }); } public void testPlainHighlighter() throws Exception { @@ -756,23 +808,23 @@ public void testPlainHighlighterOrder() throws Exception { SearchSourceBuilder source = searchSource().query(matchQuery("field1", "brown dog")) .highlighter(highlight().highlighterType("plain").field("field1").preTags("").postTags("").fragmentSize(25)); - SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field1", 0, 3, equalTo("The quick brown fox")); - assertHighlight(searchResponse, 0, "field1", 1, 3, equalTo(" jumps over the lazy brown dog")); - assertHighlight(searchResponse, 0, "field1", 2, 3, equalTo(" dog doesn't care")); + assertResponse(prepareSearch("test").setSource(source), response -> { + assertHighlight(response, 0, "field1", 0, 3, equalTo("The quick brown fox")); + assertHighlight(response, 0, "field1", 1, 3, equalTo(" jumps over the lazy brown dog")); + assertHighlight(response, 0, "field1", 2, 3, equalTo(" dog doesn't care")); + }); // lets be explicit about the order source = searchSource().query(matchQuery("field1", "brown dog")) .highlighter( highlight().highlighterType("plain").field("field1").order("none").preTags("").postTags("").fragmentSize(25) ); - searchResponse = prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field1", 0, 3, equalTo("The quick brown fox")); - assertHighlight(searchResponse, 0, "field1", 1, 3, equalTo(" jumps over the lazy brown dog")); - assertHighlight(searchResponse, 0, "field1", 2, 3, equalTo(" dog doesn't care")); + assertResponse(prepareSearch("test").setSource(source), response -> { + assertHighlight(response, 0, "field1", 0, 3, equalTo("The quick brown fox")); + assertHighlight(response, 0, "field1", 1, 3, equalTo(" jumps over the lazy brown dog")); + assertHighlight(response, 0, "field1", 2, 3, equalTo(" dog doesn't care")); + }); } { // order by score @@ -781,11 +833,11 @@ public void testPlainHighlighterOrder() throws Exception { highlight().highlighterType("plain").order("score").field("field1").preTags("").postTags("").fragmentSize(25) ); - SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field1", 0, 3, equalTo(" jumps over the lazy brown dog")); - assertHighlight(searchResponse, 0, "field1", 1, 3, equalTo("The quick brown fox")); - assertHighlight(searchResponse, 0, "field1", 2, 3, equalTo(" dog doesn't care")); + assertResponse(prepareSearch("test").setSource(source), response -> { + assertHighlight(response, 0, "field1", 0, 3, equalTo(" jumps over the lazy brown dog")); + assertHighlight(response, 0, "field1", 1, 3, equalTo("The quick brown fox")); + assertHighlight(response, 0, "field1", 2, 3, equalTo(" dog doesn't care")); + }); } } @@ -836,25 +888,25 @@ public void testHighlighterWithSentenceBoundaryScanner() throws Exception { .postTags("") .boundaryScannerType(BoundaryScannerType.SENTENCE) ); - SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); - - assertHighlight( - searchResponse, - 0, - "field1", - 0, - 2, - anyOf(equalTo("A sentence with few words"), equalTo("A sentence with few words. ")) - ); + assertResponse(prepareSearch("test").setSource(source), response -> { + assertHighlight( + response, + 0, + "field1", + 0, + 2, + anyOf(equalTo("A sentence with few words"), equalTo("A sentence with few words. ")) + ); - assertHighlight( - searchResponse, - 0, - "field1", - 1, - 2, - anyOf(equalTo("Another sentence with"), equalTo("Another sentence with even more words. ")) - ); + assertHighlight( + response, + 0, + "field1", + 1, + 2, + anyOf(equalTo("Another sentence with"), equalTo("Another sentence with even more words. ")) + ); + }); } } @@ -879,25 +931,25 @@ public void testHighlighterWithSentenceBoundaryScannerAndLocale() throws Excepti .boundaryScannerLocale(Locale.ENGLISH.toLanguageTag()) ); - SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); - - assertHighlight( - searchResponse, - 0, - "field1", - 0, - 2, - anyOf(equalTo("A sentence with few words"), equalTo("A sentence with few words. ")) - ); + assertResponse(prepareSearch("test").setSource(source), response -> { + assertHighlight( + response, + 0, + "field1", + 0, + 2, + anyOf(equalTo("A sentence with few words"), equalTo("A sentence with few words. ")) + ); - assertHighlight( - searchResponse, - 0, - "field1", - 1, - 2, - anyOf(equalTo("Another sentence with"), equalTo("Another sentence with even more words. ")) - ); + assertHighlight( + response, + 0, + "field1", + 1, + 2, + anyOf(equalTo("Another sentence with"), equalTo("Another sentence with even more words. ")) + ); + }); } } @@ -971,12 +1023,16 @@ public void testFVHManyMatches() throws Exception { client().prepareIndex("test").setSource("field1", value).get(); refresh(); + final long[] tookDefaultPhrase = new long[1]; + final long[] tookLargePhrase = new long[1]; + logger.info("--> highlighting and searching on field1 with default phrase limit"); SearchSourceBuilder source = searchSource().query(termQuery("field1", "t")) .highlighter(highlight().highlighterType("fvh").field("field1", 20, 1).order("score").preTags("").postTags("")); - SearchResponse defaultPhraseLimit = client().search(new SearchRequest("test").source(source)).actionGet(); - assertHighlight(defaultPhraseLimit, 0, "field1", 0, 1, containsString("t")); - + assertResponse(client().search(new SearchRequest("test").source(source)), defaultPhraseLimit -> { + assertHighlight(defaultPhraseLimit, 0, "field1", 0, 1, containsString("t")); + tookDefaultPhrase[0] = defaultPhraseLimit.getTook().getMillis(); + }); logger.info("--> highlighting and searching on field1 with large phrase limit"); source = searchSource().query(termQuery("field1", "t")) .highlighter( @@ -987,15 +1043,16 @@ public void testFVHManyMatches() throws Exception { .postTags("") .phraseLimit(30000) ); - SearchResponse largePhraseLimit = client().search(new SearchRequest("test").source(source)).actionGet(); - assertHighlight(largePhraseLimit, 0, "field1", 0, 1, containsString("t")); - + assertResponse(client().search(new SearchRequest("test").source(source)), largePhraseLimit -> { + assertHighlight(largePhraseLimit, 0, "field1", 0, 1, containsString("t")); + tookLargePhrase[0] = largePhraseLimit.getTook().getMillis(); + }); /* * I hate comparing times because it can be inconsistent but default is * in the neighborhood of 300ms and the large phrase limit is in the * neighborhood of 8 seconds. */ - assertThat(defaultPhraseLimit.getTook().getMillis(), lessThan(largePhraseLimit.getTook().getMillis())); + assertThat(tookDefaultPhrase[0], lessThan(tookLargePhrase[0])); } public void testMatchedFieldsFvhRequireFieldMatch() throws Exception { @@ -1071,12 +1128,16 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception SearchRequestBuilder req = prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); // First check highlighting without any matched fields set - SearchResponse resp = req.setQuery(queryStringQuery("running scissors").field("foo")).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); + assertResponse( + req.setQuery(queryStringQuery("running scissors").field("foo")), + response -> assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")) + ); // And that matching a subfield doesn't automatically highlight it - resp = req.setQuery(queryStringQuery("foo.plain:running scissors").field("foo")).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); + assertResponse( + req.setQuery(queryStringQuery("foo.plain:running scissors").field("foo")), + response -> assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")) + ); // Add the subfield to the list of matched fields but don't match it. Everything should still work // like before we added it. @@ -1087,12 +1148,16 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .requireFieldMatch(requireFieldMatch); fooField.matchedFields("foo", "foo.plain"); req = prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); - resp = req.setQuery(queryStringQuery("running scissors").field("foo")).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); + assertResponse( + req.setQuery(queryStringQuery("running scissors").field("foo")), + response -> assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")) + ); // Now make half the matches come from the stored field and half from just a matched field. - resp = req.setQuery(queryStringQuery("foo.plain:running scissors").field("foo")).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); + assertResponse( + req.setQuery(queryStringQuery("foo.plain:running scissors").field("foo")), + response -> assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")) + ); // Now remove the stored field from the matched field list. That should work too. fooField = new Field("foo").numOfFragments(1) @@ -1102,8 +1167,10 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .requireFieldMatch(requireFieldMatch); fooField.matchedFields("foo.plain"); req = prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); - resp = req.setQuery(queryStringQuery("foo.plain:running scissors").field("foo")).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); + assertResponse( + req.setQuery(queryStringQuery("foo.plain:running scissors").field("foo")), + response -> assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")) + ); // Now make sure boosted fields don't blow up when matched fields is both the subfield and stored field. fooField = new Field("foo").numOfFragments(1) @@ -1113,28 +1180,40 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .requireFieldMatch(requireFieldMatch); fooField.matchedFields("foo", "foo.plain"); req = prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); - resp = req.setQuery(queryStringQuery("foo.plain:running^5 scissors").field("foo")).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); + assertResponse( + req.setQuery(queryStringQuery("foo.plain:running^5 scissors").field("foo")), + response -> assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")) + ); // Now just all matches are against the matched field. This still returns highlighting. - resp = req.setQuery(queryStringQuery("foo.plain:running foo.plain:scissors").field("foo")).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); + assertResponse( + req.setQuery(queryStringQuery("foo.plain:running foo.plain:scissors").field("foo")), + response -> assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")) + ); // And all matched field via the queryString's field parameter, just in case - resp = req.setQuery(queryStringQuery("running scissors").field("foo.plain")).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); + assertResponse( + req.setQuery(queryStringQuery("running scissors").field("foo.plain")), + response -> assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")) + ); // Finding the same string two ways is ok too - resp = req.setQuery(queryStringQuery("run foo.plain:running^5 scissors").field("foo")).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); + assertResponse( + req.setQuery(queryStringQuery("run foo.plain:running^5 scissors").field("foo")), + response -> assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")) + ); // But we use the best found score when sorting fragments - resp = req.setQuery(queryStringQuery("cats foo.plain:cats^5").field("foo")).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("junk junk cats junk junk")); + assertResponse( + req.setQuery(queryStringQuery("cats foo.plain:cats^5").field("foo")), + response -> assertHighlight(response, 0, "foo", 0, equalTo("junk junk cats junk junk")) + ); // which can also be written by searching on the subfield - resp = req.setQuery(queryStringQuery("cats").field("foo").field("foo.plain", 5)).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("junk junk cats junk junk")); + assertResponse( + req.setQuery(queryStringQuery("cats").field("foo").field("foo.plain", 5)), + response -> assertHighlight(response, 0, "foo", 0, equalTo("junk junk cats junk junk")) + ); // Speaking of two fields, you can have two fields, only one of which has matchedFields enabled QueryBuilder twoFieldsQuery = queryStringQuery("cats").field("foo").field("foo.plain", 5).field("bar").field("bar.plain", 5); @@ -1143,50 +1222,63 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .fragmentSize(25) .highlighterType("fvh") .requireFieldMatch(requireFieldMatch); - resp = req.setQuery(twoFieldsQuery).highlighter(new HighlightBuilder().field(fooField).field(barField)).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("junk junk cats junk junk")); - assertHighlight(resp, 0, "bar", 0, equalTo("cat cat junk junk junk junk")); + assertResponse(req.setQuery(twoFieldsQuery).highlighter(new HighlightBuilder().field(fooField).field(barField)), response -> { + assertHighlight(response, 0, "foo", 0, equalTo("junk junk cats junk junk")); + assertHighlight(response, 0, "bar", 0, equalTo("cat cat junk junk junk junk")); + }); // And you can enable matchedField highlighting on both barField.matchedFields("bar", "bar.plain"); - resp = req.setQuery(twoFieldsQuery).highlighter(new HighlightBuilder().field(fooField).field(barField)).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("junk junk cats junk junk")); - assertHighlight(resp, 0, "bar", 0, equalTo("junk junk cats junk junk")); + assertResponse(req.setQuery(twoFieldsQuery).highlighter(new HighlightBuilder().field(fooField).field(barField)), response -> { + assertHighlight(response, 0, "foo", 0, equalTo("junk junk cats junk junk")); + assertHighlight(response, 0, "bar", 0, equalTo("junk junk cats junk junk")); + }); // Setting a matchedField that isn't searched/doesn't exist is simply ignored. barField.matchedFields("bar", "candy"); - resp = req.setQuery(twoFieldsQuery).highlighter(new HighlightBuilder().field(fooField).field(barField)).get(); - assertHighlight(resp, 0, "foo", 0, equalTo("junk junk cats junk junk")); - assertHighlight(resp, 0, "bar", 0, equalTo("cat cat junk junk junk junk")); + assertResponse(req.setQuery(twoFieldsQuery).highlighter(new HighlightBuilder().field(fooField).field(barField)), response -> { + assertHighlight(response, 0, "foo", 0, equalTo("junk junk cats junk junk")); + assertHighlight(response, 0, "bar", 0, equalTo("cat cat junk junk junk junk")); + }); // If the stored field doesn't have a value it doesn't matter what you match, you get nothing. barField.matchedFields("bar", "foo.plain"); - resp = req.setQuery(queryStringQuery("running scissors").field("foo.plain").field("bar")) - .highlighter(new HighlightBuilder().field(fooField).field(barField)) - .get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); - assertThat(resp.getHits().getAt(0).getHighlightFields(), not(hasKey("bar"))); + assertResponse( + req.setQuery(queryStringQuery("running scissors").field("foo.plain").field("bar")) + .highlighter(new HighlightBuilder().field(fooField).field(barField)), + response -> { + assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")); + assertThat(response.getHits().getAt(0).getHighlightFields(), not(hasKey("bar"))); + } + ); // If the stored field is found but the matched field isn't then you don't get a result either. fooField.matchedFields("bar.plain"); - resp = req.setQuery(queryStringQuery("running scissors").field("foo").field("foo.plain").field("bar").field("bar.plain")) - .highlighter(new HighlightBuilder().field(fooField).field(barField)) - .get(); - assertThat(resp.getHits().getAt(0).getHighlightFields(), not(hasKey("foo"))); + assertResponse( + req.setQuery(queryStringQuery("running scissors").field("foo").field("foo.plain").field("bar").field("bar.plain")) + .highlighter(new HighlightBuilder().field(fooField).field(barField)), + response -> assertThat(response.getHits().getAt(0).getHighlightFields(), not(hasKey("foo"))) + ); // But if you add the stored field to the list of matched fields then you'll get a result again fooField.matchedFields("foo", "bar.plain"); - resp = req.setQuery(queryStringQuery("running scissors").field("foo").field("foo.plain").field("bar").field("bar.plain")) - .highlighter(new HighlightBuilder().field(fooField).field(barField)) - .get(); - assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); - assertThat(resp.getHits().getAt(0).getHighlightFields(), not(hasKey("bar"))); + assertResponse( + req.setQuery(queryStringQuery("running scissors").field("foo").field("foo.plain").field("bar").field("bar.plain")) + .highlighter(new HighlightBuilder().field(fooField).field(barField)), + response -> { + assertHighlight(response, 0, "foo", 0, equalTo("running with scissors")); + assertThat(response.getHits().getAt(0).getHighlightFields(), not(hasKey("bar"))); + } + ); // You _can_ highlight fields that aren't subfields of one another. - resp = req.setQuery(queryStringQuery("weird").field("foo").field("foo.plain").field("bar").field("bar.plain")) - .highlighter(new HighlightBuilder().field(fooField).field(barField)) - .get(); - assertHighlight(resp, 0, "foo", 0, equalTo("weird")); - assertHighlight(resp, 0, "bar", 0, equalTo("result")); + assertResponse( + req.setQuery(queryStringQuery("weird").field("foo").field("foo.plain").field("bar").field("bar.plain")) + .highlighter(new HighlightBuilder().field(fooField).field(barField)), + response -> { + assertHighlight(response, 0, "foo", 0, equalTo("weird")); + assertHighlight(response, 0, "bar", 0, equalTo("result")); + } + ); assertFailures( req.setQuery(queryStringQuery("result").field("foo").field("foo.plain").field("bar").field("bar.plain")), @@ -1208,15 +1300,18 @@ public void testFastVectorHighlighterManyDocs() throws Exception { indexRandom(true, indexRequestBuilders); logger.info("--> searching explicitly on field1 and highlighting on it"); - SearchResponse searchResponse = prepareSearch().setSize(COUNT) - .setQuery(termQuery("field1", "test")) - .highlighter(new HighlightBuilder().field("field1", 100, 0)) - .get(); - for (int i = 0; i < COUNT; i++) { - SearchHit hit = searchResponse.getHits().getHits()[i]; - // LUCENE 3.1 UPGRADE: Caused adding the space at the end... - assertHighlight(searchResponse, i, "field1", 0, 1, equalTo("test " + hit.getId())); - } + assertResponse( + prepareSearch().setSize(COUNT) + .setQuery(termQuery("field1", "test")) + .highlighter(new HighlightBuilder().field("field1", 100, 0)), + response -> { + for (int i = 0; i < COUNT; i++) { + SearchHit hit = response.getHits().getHits()[i]; + // LUCENE 3.1 UPGRADE: Caused adding the space at the end... + assertHighlight(response, i, "field1", 0, 1, equalTo("test " + hit.getId())); + } + } + ); } public XContentBuilder type1TermVectorMapping() throws IOException { @@ -1248,13 +1343,21 @@ public void testSameContent() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) - .highlighter(new HighlightBuilder().field("title", -1, 0)) - .get(); - - for (int i = 0; i < 5; i++) { - assertHighlight(search, i, "title", 0, 1, equalTo("This is a test on the highlighting bug present in elasticsearch")); - } + assertResponse( + prepareSearch().setQuery(matchQuery("title", "bug")).highlighter(new HighlightBuilder().field("title", -1, 0)), + response -> { + for (int i = 0; i < 5; i++) { + assertHighlight( + response, + i, + "title", + 0, + 1, + equalTo("This is a test on the highlighting bug present in elasticsearch") + ); + } + } + ); } public void testFastVectorHighlighterOffsetParameter() throws Exception { @@ -1268,14 +1371,16 @@ public void testFastVectorHighlighterOffsetParameter() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) - .highlighter(new HighlightBuilder().field("title", 30, 1, 10).highlighterType("fvh")) - .get(); - - for (int i = 0; i < 5; i++) { - // LUCENE 3.1 UPGRADE: Caused adding the space at the end... - assertHighlight(search, i, "title", 0, 1, equalTo("highlighting bug present in elasticsearch")); - } + assertResponse( + prepareSearch().setQuery(matchQuery("title", "bug")) + .highlighter(new HighlightBuilder().field("title", 30, 1, 10).highlighterType("fvh")), + response -> { + for (int i = 0; i < 5; i++) { + // LUCENE 3.1 UPGRADE: Caused adding the space at the end... + assertHighlight(response, i, "title", 0, 1, equalTo("highlighting bug present in elasticsearch")); + } + } + ); } public void testEscapeHtml() throws Exception { @@ -1289,13 +1394,22 @@ public void testEscapeHtml() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse search = prepareSearch().setQuery(matchQuery("title", "test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1, 10)) - .get(); - - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, 1, startsWith("This is a html escaping highlighting test for *&?")); - } + assertResponse( + prepareSearch().setQuery(matchQuery("title", "test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1, 10)), + response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight( + response, + i, + "title", + 0, + 1, + startsWith("This is a html escaping highlighting test for *&?") + ); + } + } + ); } public void testEscapeHtmlVector() throws Exception { @@ -1309,13 +1423,15 @@ public void testEscapeHtmlVector() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse search = prepareSearch().setQuery(matchQuery("title", "test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 30, 1, 10).highlighterType("plain")) - .get(); - - for (int i = 0; i < 5; i++) { - assertHighlight(search, i, "title", 0, 1, equalTo(" highlighting test for *&? elasticsearch")); - } + assertResponse( + prepareSearch().setQuery(matchQuery("title", "test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 30, 1, 10).highlighterType("plain")), + response -> { + for (int i = 0; i < 5; i++) { + assertHighlight(response, i, "title", 0, 1, equalTo(" highlighting test for *&? elasticsearch")); + } + } + ); } public void testMultiMapperVectorWithStore() throws Exception { @@ -1573,48 +1689,55 @@ public void testDisableFastVectorHighlighter() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse search = prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) - .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("fvh")) - .get(); + assertResponse( + prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) + .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("fvh")), + response -> { - for (int i = 0; i < indexRequestBuilders.length; i++) { - // Because of SOLR-3724 nothing is highlighted when FVH is used - assertNotHighlighted(search, i, "title"); - } + for (int i = 0; i < indexRequestBuilders.length; i++) { + // Because of SOLR-3724 nothing is highlighted when FVH is used + assertNotHighlighted(response, i, "title"); + } + } + ); // Using plain highlighter instead of FVH - search = prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) - .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("plain")) - .get(); - - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight( - search, - i, - "title", - 0, - 1, - equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724") - ); - } + assertResponse( + prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) + .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("plain")), + response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight( + response, + i, + "title", + 0, + 1, + equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724") + ); + } + } + ); // Using plain highlighter instead of FVH on the field level - search = prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) - .highlighter( - new HighlightBuilder().field(new HighlightBuilder.Field("title").highlighterType("plain")).highlighterType("plain") - ) - .get(); - - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight( - search, - i, - "title", - 0, - 1, - equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724") - ); - } + assertResponse( + prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) + .highlighter( + new HighlightBuilder().field(new HighlightBuilder.Field("title").highlighterType("plain")).highlighterType("plain") + ), + response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight( + response, + i, + "title", + 0, + 1, + equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724") + ); + } + } + ); } public void testFSHHighlightAllMvFragments() throws Exception { @@ -1631,18 +1754,20 @@ public void testFSHHighlightAllMvFragments() throws Exception { .get(); refresh(); - SearchResponse response = prepareSearch("test").setQuery(QueryBuilders.matchQuery("tags", "tag")) - .highlighter(new HighlightBuilder().field("tags", -1, 0).highlighterType("fvh")) - .get(); - - assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight( - response, - 0, - "tags", - 1, - 2, - equalTo("here is another one that is very long and has the tag token near the end") + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchQuery("tags", "tag")) + .highlighter(new HighlightBuilder().field("tags", -1, 0).highlighterType("fvh")), + response -> { + assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); + assertHighlight( + response, + 0, + "tags", + 1, + 2, + equalTo("here is another one that is very long and has the tag token near the end") + ); + } ); } @@ -1703,40 +1828,44 @@ public void testPlainHighlightDifferentFragmenter() throws Exception { .get(); refresh(); - SearchResponse response = prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) - .highlighter( - new HighlightBuilder().field( - new HighlightBuilder.Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("simple") - ) - ) - .get(); - - assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight( - response, - 0, - "tags", - 1, - 2, - equalTo("here is another one that is very long tag and has the tag token near the end") + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) + .highlighter( + new HighlightBuilder().field( + new HighlightBuilder.Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("simple") + ) + ), + response -> { + assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); + assertHighlight( + response, + 0, + "tags", + 1, + 2, + equalTo("here is another one that is very long tag and has the tag token near the end") + ); + } ); - response = prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) - .highlighter( - new HighlightBuilder().field( - new Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("span") - ) - ) - .get(); - - assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight( - response, - 0, - "tags", - 1, - 2, - equalTo("here is another one that is very long tag and has the tag token near the end") + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) + .highlighter( + new HighlightBuilder().field( + new Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("span") + ) + ), + response -> { + assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); + assertHighlight( + response, + 0, + "tags", + 1, + 2, + equalTo("here is another one that is very long tag and has the tag token near the end") + ); + } ); assertFailures( @@ -1758,14 +1887,18 @@ public void testPlainHighlighterMultipleFields() { indexDoc("test", "1", "field1", "The quick brown fox", "field2", "The slow brown fox"); refresh(); - SearchResponse response = prepareSearch("test").setQuery(QueryBuilders.matchQuery("field1", "fox")) - .highlighter( - new HighlightBuilder().field(new HighlightBuilder.Field("field1").preTags("<1>").postTags("").requireFieldMatch(true)) - .field(new HighlightBuilder.Field("field2").preTags("<2>").postTags("").requireFieldMatch(false)) - ) - .get(); - assertHighlight(response, 0, "field1", 0, 1, equalTo("The quick brown <1>fox")); - assertHighlight(response, 0, "field2", 0, 1, equalTo("The slow brown <2>fox")); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchQuery("field1", "fox")) + .highlighter( + new HighlightBuilder().field( + new HighlightBuilder.Field("field1").preTags("<1>").postTags("").requireFieldMatch(true) + ).field(new HighlightBuilder.Field("field2").preTags("<2>").postTags("").requireFieldMatch(false)) + ), + response -> { + assertHighlight(response, 0, "field1", 0, 1, equalTo("The quick brown <1>fox")); + assertHighlight(response, 0, "field2", 0, 1, equalTo("The slow brown <2>fox")); + } + ); } public void testFastVectorHighlighterMultipleFields() { @@ -1782,14 +1915,18 @@ public void testFastVectorHighlighterMultipleFields() { indexDoc("test", "1", "field1", "The quick brown fox", "field2", "The slow brown fox"); refresh(); - SearchResponse response = prepareSearch("test").setQuery(QueryBuilders.matchQuery("field1", "fox")) - .highlighter( - new HighlightBuilder().field(new HighlightBuilder.Field("field1").preTags("<1>").postTags("").requireFieldMatch(true)) - .field(new HighlightBuilder.Field("field2").preTags("<2>").postTags("").requireFieldMatch(false)) - ) - .get(); - assertHighlight(response, 0, "field1", 0, 1, equalTo("The quick brown <1>fox")); - assertHighlight(response, 0, "field2", 0, 1, equalTo("The slow brown <2>fox")); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchQuery("field1", "fox")) + .highlighter( + new HighlightBuilder().field( + new HighlightBuilder.Field("field1").preTags("<1>").postTags("").requireFieldMatch(true) + ).field(new HighlightBuilder.Field("field2").preTags("<2>").postTags("").requireFieldMatch(false)) + ), + response -> { + assertHighlight(response, 0, "field1", 0, 1, equalTo("The quick brown <1>fox")); + assertHighlight(response, 0, "field2", 0, 1, equalTo("The slow brown <2>fox")); + } + ); } public void testMissingStoredField() throws Exception { @@ -1799,14 +1936,15 @@ public void testMissingStoredField() throws Exception { refresh(); // This query used to fail when the field to highlight was absent - SearchResponse response = prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "highlight")) - .highlighter( - new HighlightBuilder().field( - new HighlightBuilder.Field("highlight_field").fragmentSize(-1).numOfFragments(1).fragmenter("simple") - ) - ) - .get(); - assertThat(response.getHits().getHits()[0].getHighlightFields().isEmpty(), equalTo(true)); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "highlight")) + .highlighter( + new HighlightBuilder().field( + new HighlightBuilder.Field("highlight_field").fragmentSize(-1).numOfFragments(1).fragmenter("simple") + ) + ), + response -> assertThat(response.getHits().getHits()[0].getHighlightFields().isEmpty(), equalTo(true)) + ); } // Issue #3211 @@ -1891,22 +2029,19 @@ public void testHighlightUsesHighlightQuery() throws IOException { .highlighter(highlightBuilder); Matcher searchQueryMatcher = equalTo("Testing the highlight query feature"); - SearchResponse response = search.get(); - assertHighlight(response, 0, "text", 0, searchQueryMatcher); + assertResponse(search, response -> assertHighlight(response, 0, "text", 0, searchQueryMatcher)); field = new HighlightBuilder.Field("text"); Matcher hlQueryMatcher = equalTo("Testing the highlight query feature"); field.highlightQuery(matchQuery("text", "query")); highlightBuilder = new HighlightBuilder().field(field); search = prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "testing")).highlighter(highlightBuilder); - response = search.get(); - assertHighlight(response, 0, "text", 0, hlQueryMatcher); + assertResponse(search, response -> assertHighlight(response, 0, "text", 0, hlQueryMatcher)); // Make sure the highlightQuery is taken into account when it is set on the highlight context instead of the field highlightBuilder.highlightQuery(matchQuery("text", "query")); field.highlighterType(type).highlightQuery(null); - response = search.get(); - assertHighlight(response, 0, "text", 0, hlQueryMatcher); + assertResponse(search, response -> assertHighlight(response, 0, "text", 0, hlQueryMatcher)); } } @@ -2212,19 +2347,21 @@ public void testHighlightNoMatchSizeNumberOfFragments() { // if there's a match we only return the values with matches (whole value as number_of_fragments == 0) MatchQueryBuilder queryBuilder = QueryBuilders.matchQuery("text", "third fifth"); field.highlighterType("plain"); - SearchResponse response = prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); - assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); + assertResponse(prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)), response -> { + assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); + assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); + }); field.highlighterType("fvh"); - response = prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); - assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); - + assertResponse(prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)), response -> { + assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); + assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); + }); field.highlighterType("unified"); - response = prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); - assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); + assertResponse(prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)), response -> { + assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); + assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); + }); } public void testPostingsHighlighter() throws Exception { @@ -2329,22 +2466,21 @@ public void testPostingsHighlighterNumberOfFragments() throws Exception { SearchSourceBuilder source = searchSource().query(termQuery("field1", "fox")) .highlighter(highlight().field(new Field("field1").numOfFragments(5).preTags("").postTags(""))); - SearchResponse searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertHighlight( - searchResponse, - 0, - "field1", - 0, - 2, - equalTo( - "The quick brown fox jumps over the lazy dog." - + " The lazy red fox jumps over the quick dog." - ) - ); - assertHighlight(searchResponse, 0, "field1", 1, 2, equalTo("The quick brown dog jumps over the lazy fox.")); - + assertResponse(client().search(new SearchRequest("test").source(source)), response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + assertHighlight( + response, + 0, + "field1", + 0, + 2, + equalTo( + "The quick brown fox jumps over the lazy dog." + + " The lazy red fox jumps over the quick dog." + ) + ); + assertHighlight(response, 0, "field1", 1, 2, equalTo("The quick brown dog jumps over the lazy fox.")); + }); client().prepareIndex("test") .setId("2") .setSource( @@ -2360,39 +2496,40 @@ public void testPostingsHighlighterNumberOfFragments() throws Exception { source = searchSource().query(termQuery("field1", "fox")) .highlighter(highlight().field(new Field("field1").numOfFragments(0).preTags("").postTags(""))); - searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - assertHitCount(searchResponse, 2L); - - for (SearchHit searchHit : searchResponse.getHits()) { - if ("1".equals(searchHit.getId())) { - assertHighlight( - searchHit, - "field1", - 0, - 1, - equalTo( - "The quick brown fox jumps over the lazy dog. " - + "The lazy red fox jumps over the quick dog. " - + "The quick brown dog jumps over the lazy fox." - ) - ); - } else if ("2".equals(searchHit.getId())) { - assertHighlight( - searchHit, - "field1", - 0, - 3, - equalTo("The quick brown fox jumps over the lazy dog. Second sentence not finished") - ); - assertHighlight(searchHit, "field1", 1, 3, equalTo("The lazy red fox jumps over the quick dog.")); - assertHighlight(searchHit, "field1", 2, 3, equalTo("The quick brown dog jumps over the lazy fox.")); - } else { - fail("Only hits with id 1 and 2 are returned"); + assertResponse(client().search(new SearchRequest("test").source(source)), response -> { + assertHitCount(response, 2L); + + for (SearchHit searchHit : response.getHits()) { + if ("1".equals(searchHit.getId())) { + assertHighlight( + searchHit, + "field1", + 0, + 1, + equalTo( + "The quick brown fox jumps over the lazy dog. " + + "The lazy red fox jumps over the quick dog. " + + "The quick brown dog jumps over the lazy fox." + ) + ); + } else if ("2".equals(searchHit.getId())) { + assertHighlight( + searchHit, + "field1", + 0, + 3, + equalTo("The quick brown fox jumps over the lazy dog. Second sentence not finished") + ); + assertHighlight(searchHit, "field1", 1, 3, equalTo("The lazy red fox jumps over the quick dog.")); + assertHighlight(searchHit, "field1", 2, 3, equalTo("The quick brown dog jumps over the lazy fox.")); + } else { + fail("Only hits with id 1 and 2 are returned"); + } } - } + }); } - public void testMultiMatchQueryHighlight() throws IOException { + public void testMultiMatchQueryHighlight() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("_doc") @@ -2429,22 +2566,23 @@ public void testMultiMatchQueryHighlight() throws IOException { .field(new Field("field1").requireFieldMatch(true).preTags("").postTags("")) ); logger.info("Running multi-match type: [{}] highlight with type: [{}]", matchQueryType, highlighterType); - SearchResponse searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - assertHitCount(searchResponse, 1L); - assertHighlight( - searchResponse, - 0, - "field1", - 0, - anyOf( - equalTo("The quick brown fox jumps over"), - equalTo("The quick brown fox jumps over") - ) - ); + assertResponse(client().search(new SearchRequest("test").source(source)), response -> { + assertHitCount(response, 1L); + assertHighlight( + response, + 0, + "field1", + 0, + anyOf( + equalTo("The quick brown fox jumps over"), + equalTo("The quick brown fox jumps over") + ) + ); + }); } } - public void testCombinedFieldsQueryHighlight() throws IOException { + public void testCombinedFieldsQueryHighlight() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("_doc") @@ -2478,15 +2616,16 @@ public void testCombinedFieldsQueryHighlight() throws IOException { .field(new Field("field1").requireFieldMatch(true).preTags("").postTags("")) ); - SearchResponse searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - assertHitCount(searchResponse, 1L); - assertHighlight( - searchResponse, - 0, - "field1", - 0, - equalTo("The quick brown fox jumps over") - ); + assertResponse(client().search(new SearchRequest("test").source(source)), response -> { + assertHitCount(response, 1L); + assertHighlight( + response, + 0, + "field1", + 0, + equalTo("The quick brown fox jumps over") + ); + }); } } @@ -2512,31 +2651,31 @@ public void testPostingsHighlighterOrderByScore() throws Exception { SearchSourceBuilder source = searchSource().query(termQuery("field1", "sentence")) .highlighter(highlight().field("field1").order("score")); - SearchResponse searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - - Map highlightFieldMap = searchResponse.getHits().getAt(0).getHighlightFields(); - assertThat(highlightFieldMap.size(), equalTo(1)); - HighlightField field1 = highlightFieldMap.get("field1"); - assertThat(field1.fragments().length, equalTo(4)); - assertThat( - field1.fragments()[0].string(), - equalTo("This sentence contains three sentence occurrences (sentence).") - ); - assertThat( - field1.fragments()[1].string(), - equalTo( - "This sentence contains one match, not that short. " - + "This sentence contains two sentence matches." - ) - ); - assertThat( - field1.fragments()[2].string(), - equalTo("This is the second value's first sentence. This one contains no matches.") - ); - assertThat( - field1.fragments()[3].string(), - equalTo("One sentence match here and scored lower since the text is quite long, not that appealing.") - ); + assertResponse(client().search(new SearchRequest("test").source(source)), response -> { + Map highlightFieldMap = response.getHits().getAt(0).getHighlightFields(); + assertThat(highlightFieldMap.size(), equalTo(1)); + HighlightField field1 = highlightFieldMap.get("field1"); + assertThat(field1.fragments().length, equalTo(4)); + assertThat( + field1.fragments()[0].string(), + equalTo("This sentence contains three sentence occurrences (sentence).") + ); + assertThat( + field1.fragments()[1].string(), + equalTo( + "This sentence contains one match, not that short. " + + "This sentence contains two sentence matches." + ) + ); + assertThat( + field1.fragments()[2].string(), + equalTo("This is the second value's first sentence. This one contains no matches.") + ); + assertThat( + field1.fragments()[3].string(), + equalTo("One sentence match here and scored lower since the text is quite long, not that appealing.") + ); + }); } public void testPostingsHighlighterEscapeHtml() throws Exception { @@ -2550,20 +2689,21 @@ public void testPostingsHighlighterEscapeHtml() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse searchResponse = prepareSearch().setQuery(matchQuery("title", "test")) - .highlighter(new HighlightBuilder().field("title").encoder("html")) - .get(); - - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight( - searchResponse, - i, - "title", - 0, - 1, - equalTo("This is a html escaping highlighting test for *&? elasticsearch") - ); - } + assertResponse( + prepareSearch().setQuery(matchQuery("title", "test")).highlighter(new HighlightBuilder().field("title").encoder("html")), + response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight( + response, + i, + "title", + 0, + 1, + equalTo("This is a html escaping highlighting test for *&? elasticsearch") + ); + } + } + ); } public void testPostingsHighlighterMultiMapperWithStore() throws Exception { @@ -2596,31 +2736,35 @@ public void testPostingsHighlighterMultiMapperWithStore() throws Exception { refresh(); // simple search on body with standard analyzer with a simple field query - SearchResponse searchResponse = prepareSearch() - // lets make sure we analyze the query and we highlight the resulting terms - .setQuery(matchQuery("title", "This is a Test")) - .highlighter(new HighlightBuilder().field("title")) - .get(); - - assertHitCount(searchResponse, 1L); - SearchHit hit = searchResponse.getHits().getAt(0); - // stopwords are not highlighted since not indexed - assertHighlight(hit, "title", 0, 1, equalTo("this is a test . Second sentence.")); - + assertResponse( + prepareSearch() + // lets make sure we analyze the query and we highlight the resulting terms + .setQuery(matchQuery("title", "This is a Test")) + .highlighter(new HighlightBuilder().field("title")), + response -> { + + assertHitCount(response, 1L); + SearchHit hit = response.getHits().getAt(0); + // stopwords are not highlighted since not indexed + assertHighlight(hit, "title", 0, 1, equalTo("this is a test . Second sentence.")); + } + ); // search on title.key and highlight on title - searchResponse = prepareSearch().setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().field("title.key")) - .get(); - assertHitCount(searchResponse, 1L); + assertResponse( + prepareSearch().setQuery(matchQuery("title.key", "this is a test")).highlighter(new HighlightBuilder().field("title.key")), + response -> { + assertHitCount(response, 1L); - // stopwords are now highlighted since we used only whitespace analyzer here - assertHighlight( - searchResponse, - 0, - "title.key", - 0, - 1, - equalTo("this is a test . Second sentence.") + // stopwords are now highlighted since we used only whitespace analyzer here + assertHighlight( + response, + 0, + "title.key", + 0, + 1, + equalTo("this is a test . Second sentence.") + ); + } ); } @@ -2830,17 +2974,11 @@ public void testPostingsHighlighterWildcardQuery() throws Exception { ); source = searchSource().query(wildcardQuery("field2", "qu*k")).highlighter(highlight().field("field2")); - SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); - assertHitCount(searchResponse, 1L); + assertResponse(prepareSearch("test").setSource(source), response -> { + assertHitCount(response, 1L); - assertHighlight( - searchResponse, - 0, - "field2", - 0, - 1, - equalTo("The quick brown fox jumps over the lazy dog! Second sentence.") - ); + assertHighlight(response, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog! Second sentence.")); + }); } public void testPostingsHighlighterTermRangeQuery() throws Exception { @@ -2987,13 +3125,14 @@ public void testPostingsHighlighterManyDocs() throws Exception { SearchRequestBuilder searchRequestBuilder = prepareSearch().setSize(COUNT) .setQuery(termQuery("field1", "test")) .highlighter(new HighlightBuilder().field("field1")); - SearchResponse searchResponse = searchRequestBuilder.get(); - assertHitCount(searchResponse, COUNT); - assertThat(searchResponse.getHits().getHits().length, equalTo(COUNT)); - for (SearchHit hit : searchResponse.getHits()) { - String prefix = prefixes.get(hit.getId()); - assertHighlight(hit, "field1", 0, 1, equalTo("Sentence " + prefix + " test. Sentence two.")); - } + assertResponse(searchRequestBuilder, response -> { + assertHitCount(response, COUNT); + assertThat(response.getHits().getHits().length, equalTo(COUNT)); + for (SearchHit hit : response.getHits()) { + String prefix = prefixes.get(hit.getId()); + assertHighlight(hit, "field1", 0, 1, equalTo("Sentence " + prefix + " test. Sentence two.")); + } + }); } public void testDoesNotHighlightTypeName() throws Exception { @@ -3186,12 +3325,15 @@ public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOExcepti .setCorners(61.10078883158897, -170.15625, -64.92354174306496, 118.47656249999999) ) .should(QueryBuilders.termQuery("text", "failure")); - SearchResponse search = prepareSearch().setSource( - new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().field("*").highlighterType(highlighterType)) - ).get(); - assertNoFailures(search); - assertThat(search.getHits().getTotalHits().value, equalTo(1L)); - assertThat(search.getHits().getAt(0).getHighlightFields().get("text").fragments().length, equalTo(1)); + assertNoFailuresAndResponse( + prepareSearch().setSource( + new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().field("*").highlighterType(highlighterType)) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getHighlightFields().get("text").fragments().length, equalTo(1)); + } + ); } public void testGeoFieldHighlightingWhenQueryGetsRewritten() throws IOException { @@ -3258,14 +3400,17 @@ public void testKeywordFieldHighlighting() throws IOException { .setSource(jsonBuilder().startObject().field("keyword_field", "some text").endObject()) .get(); refresh(); - SearchResponse search = prepareSearch().setSource( - new SearchSourceBuilder().query(QueryBuilders.matchQuery("keyword_field", "some text")) - .highlighter(new HighlightBuilder().field("*")) - ).get(); - assertNoFailures(search); - assertThat(search.getHits().getTotalHits().value, equalTo(1L)); - HighlightField highlightField = search.getHits().getAt(0).getHighlightFields().get("keyword_field"); - assertThat(highlightField.fragments()[0].string(), equalTo("some text")); + assertNoFailuresAndResponse( + prepareSearch().setSource( + new SearchSourceBuilder().query(QueryBuilders.matchQuery("keyword_field", "some text")) + .highlighter(new HighlightBuilder().field("*")) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("keyword_field"); + assertThat(highlightField.fragments()[0].string(), equalTo("some text")); + } + ); } public void testCopyToFields() throws Exception { @@ -3286,14 +3431,15 @@ public void testCopyToFields() throws Exception { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - SearchResponse response = prepareSearch().setQuery(matchQuery("foo_copy", "brown")) - .highlighter(new HighlightBuilder().field(new Field("foo_copy"))) - .get(); - - assertHitCount(response, 1); - HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo_copy"); - assertThat(field.fragments().length, equalTo(1)); - assertThat(field.fragments()[0].string(), equalTo("how now brown cow")); + assertResponse( + prepareSearch().setQuery(matchQuery("foo_copy", "brown")).highlighter(new HighlightBuilder().field(new Field("foo_copy"))), + response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo_copy"); + assertThat(field.fragments().length, equalTo(1)); + assertThat(field.fragments()[0].string(), equalTo("how now brown cow")); + } + ); } public void testACopyFieldWithNestedQuery() throws Exception { @@ -3336,14 +3482,17 @@ public void testACopyFieldWithNestedQuery() throws Exception { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - SearchResponse searchResponse = prepareSearch().setQuery(nestedQuery("foo", matchQuery("foo.text", "brown cow"), ScoreMode.None)) - .highlighter(new HighlightBuilder().field(new Field("foo_text").highlighterType("fvh")).requireFieldMatch(false)) - .get(); - assertHitCount(searchResponse, 1); - HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo_text"); - assertThat(field.fragments().length, equalTo(2)); - assertThat(field.fragments()[0].string(), equalTo("brown")); - assertThat(field.fragments()[1].string(), equalTo("cow")); + assertResponse( + prepareSearch().setQuery(nestedQuery("foo", matchQuery("foo.text", "brown cow"), ScoreMode.None)) + .highlighter(new HighlightBuilder().field(new Field("foo_text").highlighterType("fvh")).requireFieldMatch(false)), + response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo_text"); + assertThat(field.fragments().length, equalTo(2)); + assertThat(field.fragments()[0].string(), equalTo("brown")); + assertThat(field.fragments()[1].string(), equalTo("cow")); + } + ); } public void testFunctionScoreQueryHighlight() throws Exception { @@ -3353,13 +3502,16 @@ public void testFunctionScoreQueryHighlight() throws Exception { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - SearchResponse searchResponse = prepareSearch().setQuery(new FunctionScoreQueryBuilder(QueryBuilders.prefixQuery("text", "bro"))) - .highlighter(new HighlightBuilder().field(new Field("text"))) - .get(); - assertHitCount(searchResponse, 1); - HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text"); - assertThat(field.fragments().length, equalTo(1)); - assertThat(field.fragments()[0].string(), equalTo("brown")); + assertResponse( + prepareSearch().setQuery(new FunctionScoreQueryBuilder(QueryBuilders.prefixQuery("text", "bro"))) + .highlighter(new HighlightBuilder().field(new Field("text"))), + response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("text"); + assertThat(field.fragments().length, equalTo(1)); + assertThat(field.fragments()[0].string(), equalTo("brown")); + } + ); } public void testFiltersFunctionScoreQueryHighlight() throws Exception { @@ -3373,16 +3525,20 @@ public void testFiltersFunctionScoreQueryHighlight() throws Exception { new RandomScoreFunctionBuilder() ); - SearchResponse searchResponse = prepareSearch().setQuery( - new FunctionScoreQueryBuilder( - QueryBuilders.prefixQuery("text", "bro"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { filterBuilder } - ) - ).highlighter(new HighlightBuilder().field(new Field("text"))).get(); - assertHitCount(searchResponse, 1); - HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text"); - assertThat(field.fragments().length, equalTo(1)); - assertThat(field.fragments()[0].string(), equalTo("brown")); + assertResponse( + prepareSearch().setQuery( + new FunctionScoreQueryBuilder( + QueryBuilders.prefixQuery("text", "bro"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { filterBuilder } + ) + ).highlighter(new HighlightBuilder().field(new Field("text"))), + response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("text"); + assertThat(field.fragments().length, equalTo(1)); + assertThat(field.fragments()[0].string(), equalTo("brown")); + } + ); } public void testHighlightQueryRewriteDatesWithNow() throws Exception { @@ -3401,19 +3557,20 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { ); ensureSearchable("index-1"); for (int i = 0; i < 5; i++) { - final SearchResponse r1 = prepareSearch("index-1").addSort("d", SortOrder.DESC) - .setTrackScores(true) - .highlighter(highlight().field("field").preTags("").postTags("")) - .setQuery( - QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("d").gte("now-12h").lte("now").includeLower(true).includeUpper(true).boost(1.0f)) - .should(QueryBuilders.termQuery("field", "hello")) - ) - .get(); - - assertNoFailures(r1); - assertThat(r1.getHits().getTotalHits().value, equalTo(1L)); - assertHighlight(r1, 0, "field", 0, 1, equalTo("hello world")); + assertNoFailuresAndResponse( + prepareSearch("index-1").addSort("d", SortOrder.DESC) + .setTrackScores(true) + .highlighter(highlight().field("field").preTags("").postTags("")) + .setQuery( + QueryBuilders.boolQuery() + .must(QueryBuilders.rangeQuery("d").gte("now-12h").lte("now").includeLower(true).includeUpper(true).boost(1.0f)) + .should(QueryBuilders.termQuery("field", "hello")) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertHighlight(response, 0, "field", 0, 1, equalTo("hello world")); + } + ); } } @@ -3458,51 +3615,63 @@ public void testWithNestedQuery() throws Exception { .get(); for (String type : new String[] { "unified", "plain" }) { - SearchResponse searchResponse = prepareSearch().setQuery( - nestedQuery("foo", matchQuery("foo.text", "brown cow"), ScoreMode.None) - ).highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))).get(); - assertHitCount(searchResponse, 1); - HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text"); - assertThat(field.fragments().length, equalTo(2)); - assertThat(field.fragments()[0].string(), equalTo("brown shoes")); - assertThat(field.fragments()[1].string(), equalTo("cow")); - - searchResponse = prepareSearch().setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None)) - .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))) - .get(); - assertHitCount(searchResponse, 1); - field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text"); - assertThat(field.fragments().length, equalTo(1)); - assertThat(field.fragments()[0].string(), equalTo("brown shoes")); - - searchResponse = prepareSearch().setQuery(nestedQuery("foo", matchPhraseQuery("foo.text", "brown shoes"), ScoreMode.None)) - .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))) - .get(); - assertHitCount(searchResponse, 1); - field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text"); - assertThat(field.fragments().length, equalTo(1)); - assertThat(field.fragments()[0].string(), equalTo("brown shoes")); - - searchResponse = prepareSearch().setQuery(nestedQuery("foo", matchPhrasePrefixQuery("foo.text", "bro"), ScoreMode.None)) - .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))) - .get(); - assertHitCount(searchResponse, 1); - field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text"); - assertThat(field.fragments().length, equalTo(1)); - assertThat(field.fragments()[0].string(), equalTo("brown shoes")); + assertResponse( + prepareSearch().setQuery(nestedQuery("foo", matchQuery("foo.text", "brown cow"), ScoreMode.None)) + .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))), + response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo.text"); + assertThat(field.fragments().length, equalTo(2)); + assertThat(field.fragments()[0].string(), equalTo("brown shoes")); + assertThat(field.fragments()[1].string(), equalTo("cow")); + } + ); + assertResponse( + prepareSearch().setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None)) + .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))), + response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo.text"); + assertThat(field.fragments().length, equalTo(1)); + assertThat(field.fragments()[0].string(), equalTo("brown shoes")); + } + ); + assertResponse( + prepareSearch().setQuery(nestedQuery("foo", matchPhraseQuery("foo.text", "brown shoes"), ScoreMode.None)) + .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))), + response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo.text"); + assertThat(field.fragments().length, equalTo(1)); + assertThat(field.fragments()[0].string(), equalTo("brown shoes")); + } + ); + assertResponse( + prepareSearch().setQuery(nestedQuery("foo", matchPhrasePrefixQuery("foo.text", "bro"), ScoreMode.None)) + .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))), + response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo.text"); + assertThat(field.fragments().length, equalTo(1)); + assertThat(field.fragments()[0].string(), equalTo("brown shoes")); + } + ); } // For unified and fvh highlighters we just check that the nested query is correctly extracted // but we highlight the root text field since nested documents cannot be highlighted with postings nor term vectors // directly. for (String type : ALL_TYPES) { - SearchResponse searchResponse = prepareSearch().setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None)) - .highlighter(new HighlightBuilder().field(new Field("text").highlighterType(type).requireFieldMatch(false))) - .get(); - assertHitCount(searchResponse, 1); - HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text"); - assertThat(field.fragments().length, equalTo(1)); - assertThat(field.fragments()[0].string(), equalTo("brown")); + assertResponse( + prepareSearch().setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None)) + .highlighter(new HighlightBuilder().field(new Field("text").highlighterType(type).requireFieldMatch(false))), + response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("text"); + assertThat(field.fragments().length, equalTo(1)); + assertThat(field.fragments()[0].string(), equalTo("brown")); + } + ); } } @@ -3519,13 +3688,16 @@ public void testWithNormalizer() throws Exception { .get(); for (String highlighterType : new String[] { "unified", "plain" }) { - SearchResponse searchResponse = prepareSearch().setQuery(matchQuery("keyword", "hello world")) - .highlighter(new HighlightBuilder().field(new Field("keyword").highlighterType(highlighterType))) - .get(); - assertHitCount(searchResponse, 1); - HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("keyword"); - assertThat(field.fragments().length, equalTo(1)); - assertThat(field.fragments()[0].string(), equalTo("hello world")); + assertResponse( + prepareSearch().setQuery(matchQuery("keyword", "hello world")) + .highlighter(new HighlightBuilder().field(new Field("keyword").highlighterType(highlighterType))), + response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("keyword"); + assertThat(field.fragments().length, equalTo(1)); + assertThat(field.fragments()[0].string(), equalTo("hello world")); + } + ); } } @@ -3540,11 +3712,14 @@ public void testDisableHighlightIdField() throws Exception { .get(); for (String highlighterType : new String[] { "plain", "unified" }) { - SearchResponse searchResponse = prepareSearch().setQuery( - matchQuery("_id", "d33f85bf1e51e84d9ab38948db9f3a068e1fe5294f1d8603914ac8c7bcc39ca1") - ).highlighter(new HighlightBuilder().field(new Field("*").highlighterType(highlighterType).requireFieldMatch(false))).get(); - assertHitCount(searchResponse, 1); - assertNull(searchResponse.getHits().getAt(0).getHighlightFields().get("_id")); + assertResponse( + prepareSearch().setQuery(matchQuery("_id", "d33f85bf1e51e84d9ab38948db9f3a068e1fe5294f1d8603914ac8c7bcc39ca1")) + .highlighter(new HighlightBuilder().field(new Field("*").highlighterType(highlighterType).requireFieldMatch(false))), + response -> { + assertHitCount(response, 1); + assertNull(response.getHits().getAt(0).getHighlightFields().get("_id")); + } + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index faefeea0cb04e..ef5eafa5153ce 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -8,12 +8,10 @@ package org.elasticsearch.search.functionscore; -import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.geo.GeoPoint; @@ -48,8 +46,10 @@ import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.linearDecayFunction; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.anyOf; @@ -140,61 +140,65 @@ public void testDistanceScoreGeoLinGaussExp() throws Exception { lonlat.add(20f); lonlat.add(11f); - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH).source(searchSource().query(baseQuery)) + assertHitCount( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH).source(searchSource().query(baseQuery)) + ), + (numDummyDocs + 2) ); - SearchResponse sr = response.actionGet(); - SearchHits sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source(searchSource().query(functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km")))) + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source(searchSource().query(functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km")))) + ), + response -> { + assertHitCount(response, (numDummyDocs + 2)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); - - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat(sh.getAt(1).getId(), equalTo("2")); // Test Exp - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH).source(searchSource().query(baseQuery)) + assertHitCount( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH).source(searchSource().query(baseQuery)) + ), + (numDummyDocs + 2) ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source(searchSource().query(functionScoreQuery(baseQuery, linearDecayFunction("loc", lonlat, "1000km")))) + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source(searchSource().query(functionScoreQuery(baseQuery, linearDecayFunction("loc", lonlat, "1000km")))) + ), + response -> { + assertHitCount(response, (numDummyDocs + 2)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat(sh.getAt(1).getId(), equalTo("2")); // Test Lin - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH).source(searchSource().query(baseQuery)) + assertHitCount( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH).source(searchSource().query(baseQuery)) + ), + (numDummyDocs + 2) ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source(searchSource().query(functionScoreQuery(baseQuery, exponentialDecayFunction("loc", lonlat, "1000km")))) + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source(searchSource().query(functionScoreQuery(baseQuery, exponentialDecayFunction("loc", lonlat, "1000km")))) + ), + response -> { + assertHitCount(response, (numDummyDocs + 2)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); - - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat(sh.getAt(1).getId(), equalTo("2")); } public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { @@ -245,67 +249,76 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { // Test Gauss - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().size(numDummyDocs + 2) - .query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 1.0, 5.0, 1.0)).boostMode( - CombineFunction.REPLACE + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().size(numDummyDocs + 2) + .query( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 1.0, 5.0, 1.0)).boostMode( + CombineFunction.REPLACE + ) ) - ) - ) + ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); + assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); + assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); + for (int i = 0; i < numDummyDocs; i++) { + assertThat(sh.getAt(i + 2).getId(), equalTo(Integer.toString(i + 3))); + } + } ); - SearchResponse sr = response.actionGet(); - SearchHits sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); - assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); - for (int i = 0; i < numDummyDocs; i++) { - assertThat(sh.getAt(i + 2).getId(), equalTo(Integer.toString(i + 3))); - } // Test Exp - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().size(numDummyDocs + 2) - .query( - functionScoreQuery(termQuery("test", "value"), exponentialDecayFunction("num", 1.0, 5.0, 1.0)).boostMode( - CombineFunction.REPLACE + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().size(numDummyDocs + 2) + .query( + functionScoreQuery(termQuery("test", "value"), exponentialDecayFunction("num", 1.0, 5.0, 1.0)).boostMode( + CombineFunction.REPLACE + ) ) - ) - ) + ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); + assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); + assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); + for (int i = 0; i < numDummyDocs; i++) { + assertThat(sh.getAt(i + 2).getId(), equalTo(Integer.toString(i + 3))); + } + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); - assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); - for (int i = 0; i < numDummyDocs; i++) { - assertThat(sh.getAt(i + 2).getId(), equalTo(Integer.toString(i + 3))); - } // Test Lin - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().size(numDummyDocs + 2) - .query( - functionScoreQuery(termQuery("test", "value"), linearDecayFunction("num", 1.0, 20.0, 1.0)).boostMode( - CombineFunction.REPLACE + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().size(numDummyDocs + 2) + .query( + functionScoreQuery(termQuery("test", "value"), linearDecayFunction("num", 1.0, 20.0, 1.0)).boostMode( + CombineFunction.REPLACE + ) ) - ) - ) + ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); + assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); + assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); - assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); } public void testBoostModeSettingWorks() throws Exception { @@ -364,48 +377,56 @@ public void testBoostModeSettingWorks() throws Exception { lonlat.add(20f); lonlat.add(11f); - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", lonlat, "1000km")).boostMode( - CombineFunction.MULTIPLY + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", lonlat, "1000km")).boostMode( + CombineFunction.MULTIPLY + ) ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + } ); - SearchResponse sr = response.actionGet(); - SearchHits sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat(sh.getAt(1).getId(), equalTo("2")); - // Test Exp - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source(searchSource().query(termQuery("test", "value"))) + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source(searchSource().query(termQuery("test", "value"))) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat(sh.getAt(1).getId(), equalTo("2")); - - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", lonlat, "1000km")).boostMode( - CombineFunction.REPLACE + + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", lonlat, "1000km")).boostMode( + CombineFunction.REPLACE + ) ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getAt(0).getId(), equalTo("2")); + assertThat(sh.getAt(1).getId(), equalTo("1")); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); - assertThat(sh.getAt(0).getId(), equalTo("2")); - assertThat(sh.getAt(1).getId(), equalTo("1")); } @@ -446,34 +467,44 @@ public void testParseGeoPoint() throws Exception { ScoreFunctionBuilders.weightFactorFunction(randomIntBetween(1, 10)) ); GeoPoint point = new GeoPoint(20, 11); - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("loc", point, "1000km")).boostMode(CombineFunction.REPLACE) + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("loc", point, "1000km")).boostMode( + CombineFunction.REPLACE + ) + ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); + } ); - SearchResponse sr = response.actionGet(); - SearchHits sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); // this is equivalent to new GeoPoint(20, 11); just flipped so scores must be same float[] coords = { 11, 20 }; - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("loc", coords, "1000km")).boostMode(CombineFunction.REPLACE) + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("loc", coords, "1000km")).boostMode( + CombineFunction.REPLACE + ) + ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).getScore(), closeTo(1.0f, 1.e-5)); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).getScore(), closeTo(1.0f, 1.e-5)); } public void testCombineModes() throws Exception { @@ -505,95 +536,120 @@ public void testCombineModes() throws Exception { ScoreFunctionBuilders.weightFactorFunction(2) ); // decay score should return 0.5 for this function and baseQuery should return 2.0f as it's score - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( - CombineFunction.MULTIPLY + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( + CombineFunction.MULTIPLY + ) ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); + } ); - SearchResponse sr = response.actionGet(); - SearchHits sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); - - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( - CombineFunction.REPLACE + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( + CombineFunction.REPLACE + ) ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); - - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode(CombineFunction.SUM) + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( + CombineFunction.SUM + ) + ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).getScore(), closeTo(2.0 + 0.5, 1.e-5)); + logger.info( + "--> Hit[0] {} Explanation:\n {}", + response.getHits().getAt(0).getId(), + response.getHits().getAt(0).getExplanation() + ); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).getScore(), closeTo(2.0 + 0.5, 1.e-5)); - logger.info("--> Hit[0] {} Explanation:\n {}", sr.getHits().getAt(0).getId(), sr.getHits().getAt(0).getExplanation()); - - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode(CombineFunction.AVG) + + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( + CombineFunction.AVG + ) + ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).getScore(), closeTo((2.0 + 0.5) / 2, 1.e-5)); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).getScore(), closeTo((2.0 + 0.5) / 2, 1.e-5)); - - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode(CombineFunction.MIN) + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( + CombineFunction.MIN + ) + ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); - - response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode(CombineFunction.MAX) + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( + CombineFunction.MAX + ) + ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).getScore(), closeTo(2.0, 1.e-5)); + } ); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).getScore(), closeTo(2.0, 1.e-5)); - } public void testExceptionThrownIfScaleLE0() throws Exception { @@ -623,18 +679,18 @@ public void testExceptionThrownIfScaleLE0() throws Exception { ).actionGet(); refresh(); - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query(functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num1", "2013-05-28", "-1d"))) - ) + SearchPhaseExecutionException e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num1", "2013-05-28", "-1d")) + ) + ) + ).actionGet() ); - try { - response.actionGet(); - fail("Expected SearchPhaseExecutionException"); - } catch (SearchPhaseExecutionException e) { - assertThat(e.getMessage(), is("all shards failed")); - } + assertThat(e.getMessage(), is("all shards failed")); } public void testParseDateMath() throws Exception { @@ -670,24 +726,23 @@ public void testParseDateMath() throws Exception { ).actionGet(); refresh(); - SearchResponse sr = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query(functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num1", "now", "2d"))) - ) - ).get(); - - assertNoFailures(sr); - assertOrderedSearchHits(sr, "1", "2"); - - sr = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query(functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num1", "now-1d", "2d"))) - ) - ).get(); - - assertNoFailures(sr); - assertOrderedSearchHits(sr, "2", "1"); + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query(functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num1", "now", "2d"))) + ) + ), + response -> assertOrderedSearchHits(response, "1", "2") + ); + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query(functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num1", "now-1d", "2d"))) + ) + ), + response -> assertOrderedSearchHits(response, "2", "1") + ); } public void testValueMissingLin() throws Exception { @@ -729,32 +784,31 @@ public void testValueMissingLin() throws Exception { refresh(); - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery( - baseQuery, - new FilterFunctionBuilder[] { - new FilterFunctionBuilder(linearDecayFunction("num1", "2013-05-28", "+3d")), - new FilterFunctionBuilder(linearDecayFunction("num2", "0.0", "1")) } - ).scoreMode(FunctionScoreQuery.ScoreMode.MULTIPLY) + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery( + baseQuery, + new FilterFunctionBuilder[] { + new FilterFunctionBuilder(linearDecayFunction("num1", "2013-05-28", "+3d")), + new FilterFunctionBuilder(linearDecayFunction("num2", "0.0", "1")) } + ).scoreMode(FunctionScoreQuery.ScoreMode.MULTIPLY) + ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getHits().length, equalTo(4)); + double[] scores = new double[4]; + for (int i = 0; i < sh.getHits().length; i++) { + scores[Integer.parseInt(sh.getAt(i).getId()) - 1] = sh.getAt(i).getScore(); + } + assertThat(scores[0], lessThan(scores[1])); + assertThat(scores[2], lessThan(scores[3])); + } ); - - SearchResponse sr = response.actionGet(); - - assertNoFailures(sr); - SearchHits sh = sr.getHits(); - assertThat(sh.getHits().length, equalTo(4)); - double[] scores = new double[4]; - for (int i = 0; i < sh.getHits().length; i++) { - scores[Integer.parseInt(sh.getAt(i).getId()) - 1] = sh.getAt(i).getScore(); - } - assertThat(scores[0], lessThan(scores[1])); - assertThat(scores[2], lessThan(scores[3])); - } public void testDateWithoutOrigin() throws Exception { @@ -810,32 +864,32 @@ public void testDateWithoutOrigin() throws Exception { refresh(); - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery( - QueryBuilders.matchAllQuery(), - new FilterFunctionBuilder[] { - new FilterFunctionBuilder(linearDecayFunction("num1", null, "7000d")), - new FilterFunctionBuilder(gaussDecayFunction("num1", null, "1d")), - new FilterFunctionBuilder(exponentialDecayFunction("num1", null, "7000d")) } - ).scoreMode(FunctionScoreQuery.ScoreMode.MULTIPLY) + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery( + QueryBuilders.matchAllQuery(), + new FilterFunctionBuilder[] { + new FilterFunctionBuilder(linearDecayFunction("num1", null, "7000d")), + new FilterFunctionBuilder(gaussDecayFunction("num1", null, "1d")), + new FilterFunctionBuilder(exponentialDecayFunction("num1", null, "7000d")) } + ).scoreMode(FunctionScoreQuery.ScoreMode.MULTIPLY) + ) ) - ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getHits().length, equalTo(3)); + double[] scores = new double[4]; + for (int i = 0; i < sh.getHits().length; i++) { + scores[Integer.parseInt(sh.getAt(i).getId()) - 1] = sh.getAt(i).getScore(); + } + assertThat(scores[1], lessThan(scores[0])); + assertThat(scores[2], lessThan(scores[1])); + } ); - - SearchResponse sr = response.actionGet(); - assertNoFailures(sr); - SearchHits sh = sr.getHits(); - assertThat(sh.getHits().length, equalTo(3)); - double[] scores = new double[4]; - for (int i = 0; i < sh.getHits().length; i++) { - scores[Integer.parseInt(sh.getAt(i).getId()) - 1] = sh.getAt(i).getScore(); - } - assertThat(scores[1], lessThan(scores[0])); - assertThat(scores[2], lessThan(scores[1])); - } public void testManyDocsLin() throws Exception { @@ -891,33 +945,34 @@ public void testManyDocsLin() throws Exception { List lonlat = new ArrayList<>(); lonlat.add(100f); lonlat.add(110f); - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().size(numDocs) - .query( - functionScoreQuery( - termQuery("test", "value"), - new FilterFunctionBuilder[] { - new FilterFunctionBuilder(linearDecayFunction("date", "2013-05-30", "+15d")), - new FilterFunctionBuilder(linearDecayFunction("geo", lonlat, "1000km")), - new FilterFunctionBuilder(linearDecayFunction("num", numDocs, numDocs / 2.0)) } - ).scoreMode(ScoreMode.MULTIPLY).boostMode(CombineFunction.REPLACE) - ) - ) + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().size(numDocs) + .query( + functionScoreQuery( + termQuery("test", "value"), + new FilterFunctionBuilder[] { + new FilterFunctionBuilder(linearDecayFunction("date", "2013-05-30", "+15d")), + new FilterFunctionBuilder(linearDecayFunction("geo", lonlat, "1000km")), + new FilterFunctionBuilder(linearDecayFunction("num", numDocs, numDocs / 2.0)) } + ).scoreMode(ScoreMode.MULTIPLY).boostMode(CombineFunction.REPLACE) + ) + ) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getHits().length, equalTo(numDocs)); + double[] scores = new double[numDocs]; + for (int i = 0; i < numDocs; i++) { + scores[Integer.parseInt(sh.getAt(i).getId())] = sh.getAt(i).getScore(); + } + for (int i = 0; i < numDocs - 1; i++) { + assertThat(scores[i], lessThan(scores[i + 1])); + } + } ); - - SearchResponse sr = response.actionGet(); - assertNoFailures(sr); - SearchHits sh = sr.getHits(); - assertThat(sh.getHits().length, equalTo(numDocs)); - double[] scores = new double[numDocs]; - for (int i = 0; i < numDocs; i++) { - scores[Integer.parseInt(sh.getAt(i).getId())] = sh.getAt(i).getScore(); - } - for (int i = 0; i < numDocs - 1; i++) { - assertThat(scores[i], lessThan(scores[i + 1])); - } } public void testParsingExceptionIfFieldDoesNotExist() throws Exception { @@ -953,23 +1008,22 @@ public void testParsingExceptionIfFieldDoesNotExist() throws Exception { List lonlat = new ArrayList<>(); lonlat.add(100f); lonlat.add(110f); - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().size(numDocs) - .query( - functionScoreQuery(termQuery("test", "value"), linearDecayFunction("type.geo", lonlat, "1000km")).scoreMode( - FunctionScoreQuery.ScoreMode.MULTIPLY + + SearchPhaseExecutionException e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().size(numDocs) + .query( + functionScoreQuery(termQuery("test", "value"), linearDecayFunction("type.geo", lonlat, "1000km")).scoreMode( + FunctionScoreQuery.ScoreMode.MULTIPLY + ) ) - ) - ) + ) + ).actionGet() ); - try { - response.actionGet(); - fail("Expected SearchPhaseExecutionException"); - } catch (SearchPhaseExecutionException e) { - assertThat(e.getMessage(), is("all shards failed")); - } + assertThat(e.getMessage(), is("all shards failed")); } public void testParsingExceptionIfFieldTypeDoesNotMatch() throws Exception { @@ -996,20 +1050,20 @@ public void testParsingExceptionIfFieldTypeDoesNotMatch() throws Exception { ).actionGet(); refresh(); // so, we indexed a string field, but now we try to score a num field - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(termQuery("test", "value"), linearDecayFunction("num", 1.0, 0.5)).scoreMode(ScoreMode.MULTIPLY) + SearchPhaseExecutionException e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(termQuery("test", "value"), linearDecayFunction("num", 1.0, 0.5)).scoreMode( + ScoreMode.MULTIPLY + ) + ) ) - ) + ).actionGet() ); - try { - response.actionGet(); - fail("Expected SearchPhaseExecutionException"); - } catch (SearchPhaseExecutionException e) { - assertThat(e.getMessage(), is("all shards failed")); - } + assertThat(e.getMessage(), is("all shards failed")); } public void testNoQueryGiven() throws Exception { @@ -1033,15 +1087,17 @@ public void testNoQueryGiven() throws Exception { .actionGet(); refresh(); // so, we indexed a string field, but now we try to score a num field - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(linearDecayFunction("num", 1, 0.5)).scoreMode(FunctionScoreQuery.ScoreMode.MULTIPLY) + assertResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().query( + functionScoreQuery(linearDecayFunction("num", 1, 0.5)).scoreMode(FunctionScoreQuery.ScoreMode.MULTIPLY) + ) ) - ) + ), + response -> {} ); - response.actionGet(); } public void testMultiFieldOptions() throws Exception { @@ -1099,41 +1155,47 @@ public void testMultiFieldOptions() throws Exception { indexRandom(true, doc1, doc2); - ActionFuture response = client().search(new SearchRequest(new String[] {}).source(searchSource().query(baseQuery))); - SearchResponse sr = response.actionGet(); - assertSearchHits(sr, "1", "2"); - SearchHits sh = sr.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertResponse(client().search(new SearchRequest(new String[] {}).source(searchSource().query(baseQuery))), response -> { + assertSearchHits(response, "1", "2"); + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (2))); + }); List lonlat = new ArrayList<>(); lonlat.add(20f); lonlat.add(10f); - response = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN)) + assertResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query( + functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN)) + ) ) - ) + ), + response -> { + assertSearchHits(response, "1", "2"); + SearchHits sh = response.getHits(); + + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + } ); - sr = response.actionGet(); - assertSearchHits(sr, "1", "2"); - sh = sr.getHits(); - - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat(sh.getAt(1).getId(), equalTo("2")); - response = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX)) + assertResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query( + functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX)) + ) ) - ) + ), + response -> { + assertSearchHits(response, "1", "2"); + SearchHits sh = response.getHits(); + + assertThat(sh.getAt(0).getId(), equalTo("2")); + assertThat(sh.getAt(1).getId(), equalTo("1")); + } ); - sr = response.actionGet(); - assertSearchHits(sr, "1", "2"); - sh = sr.getHits(); - - assertThat(sh.getAt(0).getId(), equalTo("2")); - assertThat(sh.getAt(1).getId(), equalTo("1")); // Now test AVG and SUM @@ -1149,30 +1211,36 @@ public void testMultiFieldOptions() throws Exception { .setSource(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject()); indexRandom(true, doc1, doc2); - response = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM)) + assertResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query( + functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM)) + ) ) - ) + ), + response -> { + assertSearchHits(response, "1", "2"); + SearchHits sh = response.getHits(); + + assertThat(sh.getAt(0).getId(), equalTo("2")); + assertThat(sh.getAt(1).getId(), equalTo("1")); + assertThat(1.0 - sh.getAt(0).getScore(), closeTo((1.0 - sh.getAt(1).getScore()) / 3.0, 1.e-6d)); + } ); - sr = response.actionGet(); - assertSearchHits(sr, "1", "2"); - sh = sr.getHits(); - - assertThat(sh.getAt(0).getId(), equalTo("2")); - assertThat(sh.getAt(1).getId(), equalTo("1")); - assertThat(1.0 - sh.getAt(0).getScore(), closeTo((1.0 - sh.getAt(1).getScore()) / 3.0, 1.e-6d)); - response = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG)) + assertResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query( + functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG)) + ) ) - ) + ), + response -> { + assertSearchHits(response, "1", "2"); + SearchHits sh = response.getHits(); + assertThat((double) (sh.getAt(0).getScore()), closeTo((sh.getAt(1).getScore()), 1.e-6d)); + } ); - sr = response.actionGet(); - assertSearchHits(sr, "1", "2"); - sh = sr.getHits(); - assertThat((double) (sh.getAt(0).getScore()), closeTo((sh.getAt(1).getScore()), 1.e-6d)); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index e9ce09f7455a2..6353c34491326 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.settings.Settings; @@ -41,12 +40,13 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutionException; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -112,7 +112,7 @@ protected Collection> nodePlugins() { return Arrays.asList(ExplainableScriptPlugin.class); } - public void testExplainScript() throws InterruptedException, IOException { + public void testExplainScript() throws InterruptedException, IOException, ExecutionException { List indexRequests = new ArrayList<>(); for (int i = 0; i < 20; i++) { indexRequests.add( @@ -124,28 +124,30 @@ public void testExplainScript() throws InterruptedException, IOException { indexRandom(true, true, indexRequests); client().admin().indices().prepareRefresh().get(); ensureYellow(); - SearchResponse response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().explain(true) - .query( - functionScoreQuery( - termQuery("text", "text"), - scriptFunction(new Script(ScriptType.INLINE, "test", "explainable_script", Collections.emptyMap())) - ).boostMode(CombineFunction.REPLACE) - ) - ) - ).actionGet(); - - assertNoFailures(response); - SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(20L)); - int idCounter = 19; - for (SearchHit hit : hits.getHits()) { - assertThat(hit.getId(), equalTo(Integer.toString(idCounter))); - assertThat(hit.getExplanation().toString(), containsString(Double.toString(idCounter))); - assertThat(hit.getExplanation().getDetails().length, equalTo(2)); - idCounter--; - } + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery( + termQuery("text", "text"), + scriptFunction(new Script(ScriptType.INLINE, "test", "explainable_script", Collections.emptyMap())) + ).boostMode(CombineFunction.REPLACE) + ) + ) + ), + response -> { + SearchHits hits = response.getHits(); + assertThat(hits.getTotalHits().value, equalTo(20L)); + int idCounter = 19; + for (SearchHit hit : hits.getHits()) { + assertThat(hit.getId(), equalTo(Integer.toString(idCounter))); + assertThat(hit.getExplanation().toString(), containsString(Double.toString(idCounter))); + assertThat(hit.getExplanation().getDetails().length, equalTo(2)); + idCounter--; + } + } + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java index 61cccfdf114b1..238f5b873a8dd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java @@ -9,9 +9,9 @@ package org.elasticsearch.search.functionscore; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import java.io.IOException; @@ -20,8 +20,8 @@ import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; /** @@ -88,10 +88,11 @@ public void testFieldValueFactor() throws IOException { // doc 3 doesn't have a "test" field, so an exception will be thrown try { - SearchResponse response = prepareSearch("test").setExplain(randomBoolean()) - .setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("test"))) - .get(); - assertFailures(response); + assertResponse( + prepareSearch("test").setExplain(randomBoolean()) + .setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("test"))), + ElasticsearchAssertions::assertFailures + ); } catch (SearchPhaseExecutionException e) { // We are expecting an exception, because 3 has no field } @@ -111,30 +112,32 @@ public void testFieldValueFactor() throws IOException { ); // field is not mapped but we're defaulting it to 100 so all documents should have the same score - SearchResponse response = prepareSearch("test").setExplain(randomBoolean()) - .setQuery( - functionScoreQuery( - matchAllQuery(), - fieldValueFactorFunction("notmapped").modifier(FieldValueFactorFunction.Modifier.RECIPROCAL).missing(100) - ) - ) - .get(); - assertEquals(response.getHits().getAt(0).getScore(), response.getHits().getAt(2).getScore(), 0); + assertResponse( + prepareSearch("test").setExplain(randomBoolean()) + .setQuery( + functionScoreQuery( + matchAllQuery(), + fieldValueFactorFunction("notmapped").modifier(FieldValueFactorFunction.Modifier.RECIPROCAL).missing(100) + ) + ), + response -> assertEquals(response.getHits().getAt(0).getScore(), response.getHits().getAt(2).getScore(), 0) + ); client().prepareIndex("test").setId("2").setSource("test", -1, "body", "foo").get(); refresh(); // -1 divided by 0 is infinity, which should provoke an exception. try { - response = prepareSearch("test").setExplain(randomBoolean()) - .setQuery( - functionScoreQuery( - simpleQueryStringQuery("foo"), - fieldValueFactorFunction("test").modifier(FieldValueFactorFunction.Modifier.RECIPROCAL).factor(0) - ) - ) - .get(); - assertFailures(response); + assertResponse( + prepareSearch("test").setExplain(randomBoolean()) + .setQuery( + functionScoreQuery( + simpleQueryStringQuery("foo"), + fieldValueFactorFunction("test").modifier(FieldValueFactorFunction.Modifier.RECIPROCAL).factor(0) + ) + ), + ElasticsearchAssertions::assertFailures + ); } catch (SearchPhaseExecutionException e) { // This is fine, the query will throw an exception if executed // locally, instead of just having failures diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index e32abeb481a2a..d2f68d8dd1909 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -41,6 +41,8 @@ import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -76,7 +78,7 @@ protected Map, Object>> pluginScripts() { } } - public void testScriptScoresNested() throws IOException { + public void testScriptScoresNested() throws Exception { createIndex(INDEX); index(INDEX, "1", jsonBuilder().startObject().field("dummy_field", 1).endObject()); refresh(); @@ -84,39 +86,46 @@ public void testScriptScoresNested() throws IOException { Script scriptOne = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "1", Collections.emptyMap()); Script scriptTwo = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get score value", Collections.emptyMap()); - SearchResponse response = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery( - functionScoreQuery(functionScoreQuery(scriptFunction(scriptOne)), scriptFunction(scriptTwo)), - scriptFunction(scriptTwo) + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query( + functionScoreQuery( + functionScoreQuery(functionScoreQuery(scriptFunction(scriptOne)), scriptFunction(scriptTwo)), + scriptFunction(scriptTwo) + ) ) ) - ) - ).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getAt(0).getScore(), equalTo(1.0f)); + ), + response -> assertThat(response.getHits().getAt(0).getScore(), equalTo(1.0f)) + ); } - public void testScriptScoresWithAgg() throws IOException { + public void testScriptScoresWithAgg() throws Exception { createIndex(INDEX); index(INDEX, "1", jsonBuilder().startObject().field("dummy_field", 1).endObject()); refresh(); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get score value", Collections.emptyMap()); - SearchResponse response = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query(functionScoreQuery(scriptFunction(script))).aggregation(terms("score_agg").script(script)) - ) - ).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getAt(0).getScore(), equalTo(1.0f)); - assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getKeyAsString(), equalTo("1.0")); - assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getDocCount(), is(1L)); + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query(functionScoreQuery(scriptFunction(script))).aggregation(terms("score_agg").script(script)) + ) + ), + response -> { + assertThat(response.getHits().getAt(0).getScore(), equalTo(1.0f)); + assertThat( + ((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getKeyAsString(), + equalTo("1.0") + ); + assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getDocCount(), is(1L)); + } + ); } - public void testMinScoreFunctionScoreBasic() throws IOException { + public void testMinScoreFunctionScoreBasic() throws Exception { float score = randomValueOtherThanMany((f) -> Float.compare(f, 0) < 0, ESTestCase::randomFloat); float minScore = randomValueOtherThanMany((f) -> Float.compare(f, 0) < 0, ESTestCase::randomFloat); index( @@ -130,34 +139,42 @@ public void testMinScoreFunctionScoreBasic() throws IOException { ensureYellow(); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['random_score']", Collections.emptyMap()); - SearchResponse searchResponse = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query(functionScoreQuery(scriptFunction(script)).setMinScore(minScore)) - ) - ).actionGet(); - if (score < minScore) { - assertThat(searchResponse.getHits().getTotalHits().value, is(0L)); - } else { - assertThat(searchResponse.getHits().getTotalHits().value, is(1L)); - } + assertResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query(functionScoreQuery(scriptFunction(script)).setMinScore(minScore)) + ) + ), + response -> { + if (score < minScore) { + assertThat(response.getHits().getTotalHits().value, is(0L)); + } else { + assertThat(response.getHits().getTotalHits().value, is(1L)); + } + } + ); - searchResponse = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery( - new MatchAllQueryBuilder(), - new FilterFunctionBuilder[] { - new FilterFunctionBuilder(scriptFunction(script)), - new FilterFunctionBuilder(scriptFunction(script)) } - ).scoreMode(FunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore) + assertResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query( + functionScoreQuery( + new MatchAllQueryBuilder(), + new FilterFunctionBuilder[] { + new FilterFunctionBuilder(scriptFunction(script)), + new FilterFunctionBuilder(scriptFunction(script)) } + ).scoreMode(FunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore) + ) ) - ) - ).actionGet(); - if (score < minScore) { - assertThat(searchResponse.getHits().getTotalHits().value, is(0L)); - } else { - assertThat(searchResponse.getHits().getTotalHits().value, is(1L)); - } + ), + response -> { + if (score < minScore) { + assertThat(response.getHits().getTotalHits().value, is(0L)); + } else { + assertThat(response.getHits().getTotalHits().value, is(1L)); + } + } + ); } public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOException, ExecutionException, InterruptedException { @@ -178,26 +195,33 @@ public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOExcept numMatchingDocs = numDocs; } - SearchResponse searchResponse = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query(functionScoreQuery(scriptFunction(script)).setMinScore(minScore)).size(numDocs) - ) - ).actionGet(); - assertMinScoreSearchResponses(numDocs, searchResponse, numMatchingDocs); - - searchResponse = client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery( - new MatchAllQueryBuilder(), - new FilterFunctionBuilder[] { - new FilterFunctionBuilder(scriptFunction(script)), - new FilterFunctionBuilder(scriptFunction(script)) } - ).scoreMode(FunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore) - ).size(numDocs) - ) - ).actionGet(); - assertMinScoreSearchResponses(numDocs, searchResponse, numMatchingDocs); + final int finalNumMatchingDocs = numMatchingDocs; + + assertResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query(functionScoreQuery(scriptFunction(script)).setMinScore(minScore)).size(numDocs) + ) + ), + response -> assertMinScoreSearchResponses(numDocs, response, finalNumMatchingDocs) + ); + + assertResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().query( + functionScoreQuery( + new MatchAllQueryBuilder(), + new FilterFunctionBuilder[] { + new FilterFunctionBuilder(scriptFunction(script)), + new FilterFunctionBuilder(scriptFunction(script)) } + ).scoreMode(FunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore) + ).size(numDocs) + ) + ), + response -> assertMinScoreSearchResponses(numDocs, response, finalNumMatchingDocs) + ); + } protected void assertMinScoreSearchResponses(int numDocs, SearchResponse searchResponse, int numMatchingDocs) { @@ -216,35 +240,38 @@ public void testWithEmptyFunctions() throws IOException, ExecutionException, Int index("test", "1", jsonBuilder().startObject().field("text", "test text").endObject()); refresh(); - SearchResponse termQuery = client().search( - new SearchRequest(new String[] {}).source(searchSource().explain(true).query(termQuery("text", "text"))) - ).get(); - assertNoFailures(termQuery); - assertThat(termQuery.getHits().getTotalHits().value, equalTo(1L)); - float termQueryScore = termQuery.getHits().getAt(0).getScore(); - + float[] termQueryScore = new float[1]; + assertNoFailuresAndResponse( + client().search(new SearchRequest(new String[] {}).source(searchSource().explain(true).query(termQuery("text", "text")))), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + termQueryScore[0] = response.getHits().getAt(0).getScore(); + } + ); for (CombineFunction combineFunction : CombineFunction.values()) { - testMinScoreApplied(combineFunction, termQueryScore); + testMinScoreApplied(combineFunction, termQueryScore[0]); } } protected void testMinScoreApplied(CombineFunction boostMode, float expectedScore) throws InterruptedException, ExecutionException { - SearchResponse response = client().search( - new SearchRequest(new String[] {}).source( - searchSource().explain(true).query(functionScoreQuery(termQuery("text", "text")).boostMode(boostMode).setMinScore(0.1f)) - ) - ).get(); - assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(0).getScore(), equalTo(expectedScore)); - - response = client().search( - new SearchRequest(new String[] {}).source( - searchSource().explain(true).query(functionScoreQuery(termQuery("text", "text")).boostMode(boostMode).setMinScore(2f)) - ) - ).get(); - - assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().explain(true).query(functionScoreQuery(termQuery("text", "text")).boostMode(boostMode).setMinScore(0.1f)) + ) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getScore(), equalTo(expectedScore)); + } + ); + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).source( + searchSource().explain(true).query(functionScoreQuery(termQuery("text", "text")).boostMode(boostMode).setMinScore(2f)) + ) + ), + response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java index 5c9c54a0d3b19..396af7e8501cf 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java @@ -11,10 +11,8 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.Priority; import org.elasticsearch.common.bytes.BytesReference; @@ -29,7 +27,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import java.io.IOException; import java.util.Arrays; @@ -40,6 +37,7 @@ import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -81,19 +79,19 @@ public void testPlugin() throws Exception { client().admin().indices().prepareRefresh().get(); DecayFunctionBuilder gfb = new CustomDistanceScoreBuilder("num1", "2013-05-28", "+1d"); - ActionFuture response = client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source(searchSource().explain(false).query(functionScoreQuery(termQuery("test", "value"), gfb))) + assertNoFailuresAndResponse( + client().search( + new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) + .source(searchSource().explain(false).query(functionScoreQuery(termQuery("test", "value"), gfb))) + ), + response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getHits().length, equalTo(2)); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + } ); - SearchResponse sr = response.actionGet(); - ElasticsearchAssertions.assertNoFailures(sr); - SearchHits sh = sr.getHits(); - - assertThat(sh.getHits().length, equalTo(2)); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat(sh.getAt(1).getId(), equalTo("2")); - } public static class CustomDistanceScorePlugin extends Plugin implements SearchPlugin { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 14df03bb86e8d..f191f627dcd7f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -49,6 +49,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFourthHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThirdHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; @@ -72,29 +74,31 @@ public void testEnforceWindowSize() { int numShards = getNumShards("test").numPrimaries; for (int j = 0; j < iters; j++) { - SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.matchAllQuery()) - .setRescorer( - new QueryRescorerBuilder( - functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.weightFactorFunction(100)).boostMode( - CombineFunction.REPLACE - ).queryName("hello world") - ).setQueryWeight(0.0f).setRescoreQueryWeight(1.0f), - 1 - ) - .setSize(randomIntBetween(2, 10)) - .get(); - assertNoFailures(searchResponse); - assertFirstHit(searchResponse, hasScore(100.f)); - int numDocsWith100AsAScore = 0; - for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { - float score = searchResponse.getHits().getHits()[i].getScore(); - if (score == 100f) { - numDocsWith100AsAScore += 1; + assertNoFailuresAndResponse( + prepareSearch().setQuery(QueryBuilders.matchAllQuery()) + .setRescorer( + new QueryRescorerBuilder( + functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.weightFactorFunction(100)).boostMode( + CombineFunction.REPLACE + ).queryName("hello world") + ).setQueryWeight(0.0f).setRescoreQueryWeight(1.0f), + 1 + ) + .setSize(randomIntBetween(2, 10)), + response -> { + assertFirstHit(response, hasScore(100.f)); + int numDocsWith100AsAScore = 0; + for (int i = 0; i < response.getHits().getHits().length; i++) { + float score = response.getHits().getHits()[i].getScore(); + if (score == 100f) { + numDocsWith100AsAScore += 1; + } + } + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + // we cannot assert that they are equal since some shards might not have docs at all + assertThat(numDocsWith100AsAScore, lessThanOrEqualTo(numShards)); } - } - assertThat(searchResponse.getHits().getMaxScore(), equalTo(searchResponse.getHits().getHits()[0].getScore())); - // we cannot assert that they are equal since some shards might not have docs at all - assertThat(numDocsWith100AsAScore, lessThanOrEqualTo(numShards)); + ); } } @@ -121,39 +125,41 @@ public void testRescorePhrase() throws Exception { .setSource("field1", "quick huge brown", "field2", "the quick lazy huge brown fox jumps over the tree") .get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery( - QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR) - ) - .setRescorer( - new QueryRescorerBuilder(matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)).setRescoreQueryWeight(2), - 5 - ) - .get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getMaxScore(), equalTo(searchResponse.getHits().getHits()[0].getScore())); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("2")); - - searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer(new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown").slop(3)), 5) - .get(); - - assertHitCount(searchResponse, 3); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - assertThirdHit(searchResponse, hasId("3")); - - searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer(new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown")), 5) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getMaxScore(), equalTo(searchResponse.getHits().getHits()[0].getScore())); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - assertThirdHit(searchResponse, hasId("3")); + assertResponse( + prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + .setRescorer( + new QueryRescorerBuilder(matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)).setRescoreQueryWeight(2), + 5 + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + } + ); + assertResponse( + prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + .setRescorer(new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown").slop(3)), 5), + response -> { + assertHitCount(response, 3); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("2")); + assertThirdHit(response, hasId("3")); + } + ); + assertResponse( + prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + .setRescorer(new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown")), 5), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("2")); + assertThirdHit(response, hasId("3")); + } + ); } public void testMoreDocs() throws Exception { @@ -189,62 +195,61 @@ public void testMoreDocs() throws Exception { client().prepareIndex("test").setId("11").setSource("field1", "2st street boston massachusetts").get(); client().prepareIndex("test").setId("12").setSource("field1", "3st street boston massachusetts").get(); indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = prepareSearch().setQuery( - QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR) - ) - .setFrom(0) - .setSize(5) - .setRescorer( - new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) - .setRescoreQueryWeight(2.0f), - 20 - ) - .get(); - - assertThat(searchResponse.getHits().getHits().length, equalTo(5)); - assertHitCount(searchResponse, 9); - assertFirstHit(searchResponse, hasId("2")); - assertSecondHit(searchResponse, hasId("6")); - assertThirdHit(searchResponse, hasId("3")); - - searchResponse = prepareSearch().setQuery( - QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR) - ) - .setFrom(0) - .setSize(5) - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setRescorer( - new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) - .setRescoreQueryWeight(2.0f), - 20 - ) - .get(); - - assertThat(searchResponse.getHits().getHits().length, equalTo(5)); - assertHitCount(searchResponse, 9); - assertThat(searchResponse.getHits().getMaxScore(), equalTo(searchResponse.getHits().getHits()[0].getScore())); - assertFirstHit(searchResponse, hasId("2")); - assertSecondHit(searchResponse, hasId("6")); - assertThirdHit(searchResponse, hasId("3")); + assertResponse( + prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) + .setFrom(0) + .setSize(5) + .setRescorer( + new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) + .setRescoreQueryWeight(2.0f), + 20 + ), + response -> { + assertThat(response.getHits().getHits().length, equalTo(5)); + assertHitCount(response, 9); + assertFirstHit(response, hasId("2")); + assertSecondHit(response, hasId("6")); + assertThirdHit(response, hasId("3")); + } + ); + assertResponse( + prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) + .setFrom(0) + .setSize(5) + .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setRescorer( + new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) + .setRescoreQueryWeight(2.0f), + 20 + ), + response -> { + assertThat(response.getHits().getHits().length, equalTo(5)); + assertHitCount(response, 9); + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("2")); + assertSecondHit(response, hasId("6")); + assertThirdHit(response, hasId("3")); + } + ); // Make sure non-zero from works: - searchResponse = prepareSearch().setQuery( - QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR) - ) - .setFrom(2) - .setSize(5) - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setRescorer( - new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) - .setRescoreQueryWeight(2.0f), - 20 - ) - .get(); - - assertThat(searchResponse.getHits().getHits().length, equalTo(5)); - assertHitCount(searchResponse, 9); - assertThat(searchResponse.getHits().getMaxScore(), greaterThan(searchResponse.getHits().getHits()[0].getScore())); - assertFirstHit(searchResponse, hasId("3")); + assertResponse( + prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) + .setFrom(2) + .setSize(5) + .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setRescorer( + new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) + .setRescoreQueryWeight(2.0f), + 20 + ), + response -> { + assertThat(response.getHits().getHits().length, equalTo(5)); + assertHitCount(response, 9); + assertThat(response.getHits().getMaxScore(), greaterThan(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("3")); + } + ); } // Tests a rescore window smaller than number of hits: @@ -272,56 +277,59 @@ public void testSmallRescoreWindow() throws Exception { client().prepareIndex("test").setId("2").setSource("field1", "lexington avenue boston massachusetts road").get(); indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) - .setFrom(0) - .setSize(5) - .get(); - assertThat(searchResponse.getHits().getHits().length, equalTo(4)); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().getMaxScore(), equalTo(searchResponse.getHits().getHits()[0].getScore())); - assertFirstHit(searchResponse, hasId("3")); - assertSecondHit(searchResponse, hasId("6")); - assertThirdHit(searchResponse, hasId("1")); - assertFourthHit(searchResponse, hasId("2")); + assertResponse(prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts")).setFrom(0).setSize(5), response -> { + assertThat(response.getHits().getHits().length, equalTo(4)); + assertHitCount(response, 4); + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("3")); + assertSecondHit(response, hasId("6")); + assertThirdHit(response, hasId("1")); + assertFourthHit(response, hasId("2")); + }); // Now, rescore only top 2 hits w/ proximity: - searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) - .setFrom(0) - .setSize(5) - .setRescorer( - new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) - .setRescoreQueryWeight(2.0f), - 2 - ) - .get(); - // Only top 2 hits were re-ordered: - assertThat(searchResponse.getHits().getHits().length, equalTo(4)); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().getMaxScore(), equalTo(searchResponse.getHits().getHits()[0].getScore())); - assertFirstHit(searchResponse, hasId("6")); - assertSecondHit(searchResponse, hasId("3")); - assertThirdHit(searchResponse, hasId("1")); - assertFourthHit(searchResponse, hasId("2")); + assertResponse( + prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) + .setFrom(0) + .setSize(5) + .setRescorer( + new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) + .setRescoreQueryWeight(2.0f), + 2 + ), + response -> { + // Only top 2 hits were re-ordered: + assertThat(response.getHits().getHits().length, equalTo(4)); + assertHitCount(response, 4); + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("6")); + assertSecondHit(response, hasId("3")); + assertThirdHit(response, hasId("1")); + assertFourthHit(response, hasId("2")); + } + ); // Now, rescore only top 3 hits w/ proximity: - searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) - .setFrom(0) - .setSize(5) - .setRescorer( - new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) - .setRescoreQueryWeight(2.0f), - 3 - ) - .get(); - - // Only top 3 hits were re-ordered: - assertThat(searchResponse.getHits().getHits().length, equalTo(4)); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().getMaxScore(), equalTo(searchResponse.getHits().getHits()[0].getScore())); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("6")); - assertThirdHit(searchResponse, hasId("3")); - assertFourthHit(searchResponse, hasId("2")); + assertResponse( + prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) + .setFrom(0) + .setSize(5) + .setRescorer( + new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) + .setRescoreQueryWeight(2.0f), + 3 + ), + response -> { + // Only top 3 hits were re-ordered: + assertThat(response.getHits().getHits().length, equalTo(4)); + assertHitCount(response, 4); + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("6")); + assertThirdHit(response, hasId("3")); + assertFourthHit(response, hasId("2")); + } + ); } // Tests a rescorer that penalizes the scores: @@ -349,35 +357,37 @@ public void testRescorerMadeScoresWorse() throws Exception { client().prepareIndex("test").setId("2").setSource("field1", "lexington avenue boston massachusetts road").get(); indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) - .setFrom(0) - .setSize(5) - .get(); - assertThat(searchResponse.getHits().getHits().length, equalTo(4)); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().getMaxScore(), equalTo(searchResponse.getHits().getHits()[0].getScore())); - assertFirstHit(searchResponse, hasId("3")); - assertSecondHit(searchResponse, hasId("6")); - assertThirdHit(searchResponse, hasId("1")); - assertFourthHit(searchResponse, hasId("2")); - + assertResponse( + prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)).setFrom(0).setSize(5), + response -> { + assertThat(response.getHits().getHits().length, equalTo(4)); + assertHitCount(response, 4); + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("3")); + assertSecondHit(response, hasId("6")); + assertThirdHit(response, hasId("1")); + assertFourthHit(response, hasId("2")); + } + ); // Now, penalizing rescore (nothing matches the rescore query): - searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) - .setFrom(0) - .setSize(5) - .setRescorer( - new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(1.0f) - .setRescoreQueryWeight(-1f), - 3 - ) - .get(); - - // 6 and 1 got worse, and then the hit (2) outside the rescore window were sorted ahead: - assertThat(searchResponse.getHits().getMaxScore(), equalTo(searchResponse.getHits().getHits()[0].getScore())); - assertFirstHit(searchResponse, hasId("3")); - assertSecondHit(searchResponse, hasId("2")); - assertThirdHit(searchResponse, hasId("6")); - assertFourthHit(searchResponse, hasId("1")); + assertResponse( + prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) + .setFrom(0) + .setSize(5) + .setRescorer( + new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(1.0f) + .setRescoreQueryWeight(-1f), + 3 + ), + response -> { + // 6 and 1 got worse, and then the hit (2) outside the rescore window were sorted ahead: + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("3")); + assertSecondHit(response, hasId("2")); + assertThirdHit(response, hasId("6")); + assertFourthHit(response, hasId("1")); + } + ); } // Comparator that sorts hits and rescored hits in the same way. @@ -430,43 +440,46 @@ public void testEquivalence() throws Exception { int rescoreWindow = between(1, 3) * resultSize; String intToEnglish = English.intToEnglish(between(0, numDocs - 1)); String query = intToEnglish.split(" ")[0]; - SearchResponse rescored = prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference("test") // ensure we hit the same shards for tie-breaking - .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) - .setFrom(0) - .setSize(resultSize) - .setRescorer( - new QueryRescorerBuilder(constantScoreQuery(matchPhraseQuery("field1", intToEnglish).slop(3))).setQueryWeight(1.0f) - // no weight - so we basically use the same score as the actual query - .setRescoreQueryWeight(0.0f), - rescoreWindow - ) - .get(); - - SearchResponse plain = prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference("test") // ensure we hit the same shards for tie-breaking - .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) - .setFrom(0) - .setSize(resultSize) - .get(); - - // check equivalence - assertEquivalent(query, plain, rescored); - rescored = prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference("test") // ensure we hit the same shards for tie-breaking - .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) - .setFrom(0) - .setSize(resultSize) - .setRescorer( - new QueryRescorerBuilder(constantScoreQuery(matchPhraseQuery("field1", "not in the index").slop(3))).setQueryWeight( - 1.0f - ).setRescoreQueryWeight(1.0f), - rescoreWindow - ) - .get(); - // check equivalence - assertEquivalent(query, plain, rescored); + assertResponse( + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setPreference("test") // ensure we hit the same shards for tie-breaking + .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) + .setFrom(0) + .setSize(resultSize), + plain -> { + assertResponse( + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setPreference("test") // ensure we hit the same shards for tie-breaking + .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) + .setFrom(0) + .setSize(resultSize) + .setRescorer( + new QueryRescorerBuilder(constantScoreQuery(matchPhraseQuery("field1", intToEnglish).slop(3))) + .setQueryWeight(1.0f) + // no weight - so we basically use the same score as the actual query + .setRescoreQueryWeight(0.0f), + rescoreWindow + ), + rescored -> assertEquivalent(query, plain, rescored) + ); // check equivalence + + assertResponse( + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setPreference("test") // ensure we hit the same shards for tie-breaking + .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) + .setFrom(0) + .setSize(resultSize) + .setRescorer( + new QueryRescorerBuilder(constantScoreQuery(matchPhraseQuery("field1", "not in the index").slop(3))) + .setQueryWeight(1.0f) + .setRescoreQueryWeight(1.0f), + rescoreWindow + ), + rescored -> assertEquivalent(query, plain, rescored) + ); // check equivalence + } + ); } } @@ -495,39 +508,42 @@ public void testExplain() throws Exception { refresh(); { - SearchResponse searchResponse = prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer( - new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)).setQueryWeight(0.5f) - .setRescoreQueryWeight(0.4f), - 5 - ) - .setExplain(true) - .get(); - assertHitCount(searchResponse, 3); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - assertThirdHit(searchResponse, hasId("3")); - - for (int i = 0; i < 3; i++) { - assertThat(searchResponse.getHits().getAt(i).getExplanation(), notNullValue()); - assertThat(searchResponse.getHits().getAt(i).getExplanation().isMatch(), equalTo(true)); - assertThat(searchResponse.getHits().getAt(i).getExplanation().getDetails().length, equalTo(2)); - assertThat(searchResponse.getHits().getAt(i).getExplanation().getDetails()[0].isMatch(), equalTo(true)); - if (i == 2) { - assertThat(searchResponse.getHits().getAt(i).getExplanation().getDetails()[1].getValue(), equalTo(0.5f)); - } else { - assertThat(searchResponse.getHits().getAt(i).getExplanation().getDescription(), equalTo("sum of:")); - assertThat( - searchResponse.getHits().getAt(i).getExplanation().getDetails()[0].getDetails()[1].getValue(), - equalTo(0.5f) - ); - assertThat( - searchResponse.getHits().getAt(i).getExplanation().getDetails()[1].getDetails()[1].getValue(), - equalTo(0.4f) - ); + assertResponse( + prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + .setRescorer( + new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)).setQueryWeight(0.5f) + .setRescoreQueryWeight(0.4f), + 5 + ) + .setExplain(true), + response -> { + assertHitCount(response, 3); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("2")); + assertThirdHit(response, hasId("3")); + + for (int i = 0; i < 3; i++) { + assertThat(response.getHits().getAt(i).getExplanation(), notNullValue()); + assertThat(response.getHits().getAt(i).getExplanation().isMatch(), equalTo(true)); + assertThat(response.getHits().getAt(i).getExplanation().getDetails().length, equalTo(2)); + assertThat(response.getHits().getAt(i).getExplanation().getDetails()[0].isMatch(), equalTo(true)); + if (i == 2) { + assertThat(response.getHits().getAt(i).getExplanation().getDetails()[1].getValue(), equalTo(0.5f)); + } else { + assertThat(response.getHits().getAt(i).getExplanation().getDescription(), equalTo("sum of:")); + assertThat( + response.getHits().getAt(i).getExplanation().getDetails()[0].getDetails()[1].getValue(), + equalTo(0.5f) + ); + assertThat( + response.getHits().getAt(i).getExplanation().getDetails()[1].getDetails()[1].getValue(), + equalTo(0.4f) + ); + } + } } - } + ); } String[] scoreModes = new String[] { "max", "min", "avg", "total", "multiply", "" }; @@ -540,21 +556,26 @@ public void testExplain() throws Exception { if ("".equals(scoreModes[innerMode]) == false) { innerRescoreQuery.setScoreMode(QueryRescoreMode.fromString(scoreModes[innerMode])); } - - SearchResponse searchResponse = prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer(innerRescoreQuery, 5) - .setExplain(true) - .get(); - assertHitCount(searchResponse, 3); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - assertThirdHit(searchResponse, hasId("3")); - - for (int j = 0; j < 3; j++) { - assertThat(searchResponse.getHits().getAt(j).getExplanation().getDescription(), equalTo(descriptionModes[innerMode])); - } - + final int finalInnerMode = innerMode; + assertResponse( + prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + .setRescorer(innerRescoreQuery, 5) + .setExplain(true), + response -> { + assertHitCount(response, 3); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("2")); + assertThirdHit(response, hasId("3")); + + for (int j = 0; j < 3; j++) { + assertThat( + response.getHits().getAt(j).getExplanation().getDescription(), + equalTo(descriptionModes[finalInnerMode]) + ); + } + } + ); for (int outerMode = 0; outerMode < scoreModes.length; outerMode++) { QueryRescorerBuilder outerRescoreQuery = new QueryRescorerBuilder(matchQuery("field1", "the quick brown").boost(4.0f)) .setQueryWeight(0.5f) @@ -563,23 +584,29 @@ public void testExplain() throws Exception { if ("".equals(scoreModes[outerMode]) == false) { outerRescoreQuery.setScoreMode(QueryRescoreMode.fromString(scoreModes[outerMode])); } - - searchResponse = prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .addRescorer(innerRescoreQuery, 5) - .addRescorer(outerRescoreQuery.windowSize(10)) - .setExplain(true) - .get(); - assertHitCount(searchResponse, 3); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - assertThirdHit(searchResponse, hasId("3")); - - for (int j = 0; j < 3; j++) { - Explanation explanation = searchResponse.getHits().getAt(j).getExplanation(); - assertThat(explanation.getDescription(), equalTo(descriptionModes[outerMode])); - assertThat(explanation.getDetails()[0].getDetails()[0].getDescription(), equalTo(descriptionModes[innerMode])); - } + final int finalOuterMode = outerMode; + assertResponse( + prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + .addRescorer(innerRescoreQuery, 5) + .addRescorer(outerRescoreQuery.windowSize(10)) + .setExplain(true), + response -> { + assertHitCount(response, 3); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("2")); + assertThirdHit(response, hasId("3")); + + for (int j = 0; j < 3; j++) { + Explanation explanation = response.getHits().getAt(j).getExplanation(); + assertThat(explanation.getDescription(), equalTo(descriptionModes[finalOuterMode])); + assertThat( + explanation.getDetails()[0].getDetails()[0].getDescription(), + equalTo(descriptionModes[finalInnerMode]) + ); + } + } + ); } } } @@ -617,58 +644,66 @@ public void testScoring() throws Exception { if ("".equals(scoreMode) == false) { rescoreQuery.setScoreMode(QueryRescoreMode.fromString(scoreMode)); } - - SearchResponse rescored = prepareSearch().setPreference("test") // ensure we hit the same shards for tie-breaking - .setFrom(0) - .setSize(10) - .setQuery(query) - .setRescorer(rescoreQuery, 50) - .get(); - - assertHitCount(rescored, 4); - - assertThat(rescored.getHits().getMaxScore(), equalTo(rescored.getHits().getHits()[0].getScore())); - if ("total".equals(scoreMode) || "".equals(scoreMode)) { - assertFirstHit(rescored, hasId(String.valueOf(i + 1))); - assertSecondHit(rescored, hasId(String.valueOf(i))); - assertThirdHit(rescored, hasId(String.valueOf(i + 2))); - assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(3.0f * primaryWeight + 7.0f * secondaryWeight)); - assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(2.0f * primaryWeight + 5.0f * secondaryWeight)); - assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight)); - assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.2f * primaryWeight + 0.0f * secondaryWeight)); - } else if ("max".equals(scoreMode)) { - assertFirstHit(rescored, hasId(String.valueOf(i + 1))); - assertSecondHit(rescored, hasId(String.valueOf(i))); - assertThirdHit(rescored, hasId(String.valueOf(i + 2))); - assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(7.0f * secondaryWeight)); - assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(5.0f * secondaryWeight)); - assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight)); - assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.2f * primaryWeight)); - } else if ("min".equals(scoreMode)) { - assertFirstHit(rescored, hasId(String.valueOf(i + 2))); - assertSecondHit(rescored, hasId(String.valueOf(i + 1))); - assertThirdHit(rescored, hasId(String.valueOf(i))); - assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(5.0f * primaryWeight)); - assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(3.0f * primaryWeight)); - assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(2.0f * primaryWeight)); - assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.0f * secondaryWeight)); - } else if ("avg".equals(scoreMode)) { - assertFirstHit(rescored, hasId(String.valueOf(i + 1))); - assertSecondHit(rescored, hasId(String.valueOf(i + 2))); - assertThirdHit(rescored, hasId(String.valueOf(i))); - assertThat(rescored.getHits().getHits()[0].getScore(), equalTo((3.0f * primaryWeight + 7.0f * secondaryWeight) / 2.0f)); - assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(5.0f * primaryWeight)); - assertThat(rescored.getHits().getHits()[2].getScore(), equalTo((2.0f * primaryWeight + 5.0f * secondaryWeight) / 2.0f)); - assertThat(rescored.getHits().getHits()[3].getScore(), equalTo((0.2f * primaryWeight) / 2.0f)); - } else if ("multiply".equals(scoreMode)) { - assertFirstHit(rescored, hasId(String.valueOf(i + 1))); - assertSecondHit(rescored, hasId(String.valueOf(i))); - assertThirdHit(rescored, hasId(String.valueOf(i + 2))); - assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(3.0f * primaryWeight * 7.0f * secondaryWeight)); - assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(2.0f * primaryWeight * 5.0f * secondaryWeight)); - assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight)); - assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.2f * primaryWeight * 0.0f * secondaryWeight)); - } + final int finalI = i; + assertResponse( + prepareSearch().setPreference("test") // ensure we hit the same shards for tie-breaking + .setFrom(0) + .setSize(10) + .setQuery(query) + .setRescorer(rescoreQuery, 50), + rescored -> { + assertHitCount(rescored, 4); + + assertThat(rescored.getHits().getMaxScore(), equalTo(rescored.getHits().getHits()[0].getScore())); + if ("total".equals(scoreMode) || "".equals(scoreMode)) { + assertFirstHit(rescored, hasId(String.valueOf(finalI + 1))); + assertSecondHit(rescored, hasId(String.valueOf(finalI))); + assertThirdHit(rescored, hasId(String.valueOf(finalI + 2))); + assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(3.0f * primaryWeight + 7.0f * secondaryWeight)); + assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(2.0f * primaryWeight + 5.0f * secondaryWeight)); + assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight)); + assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.2f * primaryWeight + 0.0f * secondaryWeight)); + } else if ("max".equals(scoreMode)) { + assertFirstHit(rescored, hasId(String.valueOf(finalI + 1))); + assertSecondHit(rescored, hasId(String.valueOf(finalI))); + assertThirdHit(rescored, hasId(String.valueOf(finalI + 2))); + assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(7.0f * secondaryWeight)); + assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(5.0f * secondaryWeight)); + assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight)); + assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.2f * primaryWeight)); + } else if ("min".equals(scoreMode)) { + assertFirstHit(rescored, hasId(String.valueOf(finalI + 2))); + assertSecondHit(rescored, hasId(String.valueOf(finalI + 1))); + assertThirdHit(rescored, hasId(String.valueOf(finalI))); + assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(5.0f * primaryWeight)); + assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(3.0f * primaryWeight)); + assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(2.0f * primaryWeight)); + assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.0f * secondaryWeight)); + } else if ("avg".equals(scoreMode)) { + assertFirstHit(rescored, hasId(String.valueOf(finalI + 1))); + assertSecondHit(rescored, hasId(String.valueOf(finalI + 2))); + assertThirdHit(rescored, hasId(String.valueOf(finalI))); + assertThat( + rescored.getHits().getHits()[0].getScore(), + equalTo((3.0f * primaryWeight + 7.0f * secondaryWeight) / 2.0f) + ); + assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(5.0f * primaryWeight)); + assertThat( + rescored.getHits().getHits()[2].getScore(), + equalTo((2.0f * primaryWeight + 5.0f * secondaryWeight) / 2.0f) + ); + assertThat(rescored.getHits().getHits()[3].getScore(), equalTo((0.2f * primaryWeight) / 2.0f)); + } else if ("multiply".equals(scoreMode)) { + assertFirstHit(rescored, hasId(String.valueOf(finalI + 1))); + assertSecondHit(rescored, hasId(String.valueOf(finalI))); + assertThirdHit(rescored, hasId(String.valueOf(finalI + 2))); + assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(3.0f * primaryWeight * 7.0f * secondaryWeight)); + assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(2.0f * primaryWeight * 5.0f * secondaryWeight)); + assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight)); + assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.2f * primaryWeight * 0.0f * secondaryWeight)); + } + } + ); } } } @@ -688,13 +723,16 @@ public void testMultipleRescores() throws Exception { // First set the rescore window large enough that both rescores take effect SearchRequestBuilder request = prepareSearch(); request.addRescorer(eightIsGreat, numDocs).addRescorer(sevenIsBetter, numDocs); - SearchResponse response = request.get(); - assertFirstHit(response, hasId("7")); - assertSecondHit(response, hasId("8")); + assertResponse(request, response -> { + assertFirstHit(response, hasId("7")); + assertSecondHit(response, hasId("8")); + }); // Now squash the second rescore window so it never gets to see a seven - response = request.setSize(1).clearRescorers().addRescorer(eightIsGreat, numDocs).addRescorer(sevenIsBetter, 1).get(); - assertFirstHit(response, hasId("8")); + assertResponse( + request.setSize(1).clearRescorers().addRescorer(eightIsGreat, numDocs).addRescorer(sevenIsBetter, 1), + response -> assertFirstHit(response, hasId("8")) + ); // We have no idea what the second hit will be because we didn't get a chance to look for seven // Now use one rescore to drag the number we're looking for into the window of another @@ -709,11 +747,12 @@ public void testMultipleRescores() throws Exception { ) ).setScoreMode(QueryRescoreMode.Total); request.clearRescorers().addRescorer(ninetyIsGood, numDocs).addRescorer(oneToo, 10); - response = request.setSize(2).get(); - assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); - assertFirstHit(response, hasId("91")); - assertFirstHit(response, hasScore(2001.0f)); - assertSecondHit(response, hasScore(1001.0f)); // Not sure which one it is but it is ninety something + assertResponse(request.setSize(2), response -> { + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("91")); + assertFirstHit(response, hasScore(2001.0f)); + assertSecondHit(response, hasScore(1001.0f)); // Not sure which one it is but it is ninety something + }); } private int indexRandomNumbers(String analyzer) throws Exception { @@ -797,14 +836,17 @@ public void testRescorePhaseWithInvalidSort() throws Exception { assertNotNull(exc.getCause()); assertThat(exc.getCause().getMessage(), containsString("Cannot use [sort] option in conjunction with [rescore].")); - SearchResponse resp = prepareSearch().addSort(SortBuilders.scoreSort()) - .setTrackScores(true) - .addRescorer(new QueryRescorerBuilder(matchAllQuery()).setRescoreQueryWeight(100.0f), 50) - .get(); - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); - assertThat(resp.getHits().getHits().length, equalTo(5)); - for (SearchHit hit : resp.getHits().getHits()) { - assertThat(hit.getScore(), equalTo(101f)); - } + assertResponse( + prepareSearch().addSort(SortBuilders.scoreSort()) + .setTrackScores(true) + .addRescorer(new QueryRescorerBuilder(matchAllQuery()).setRescoreQueryWeight(100.0f), 50), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getHits().length, equalTo(5)); + for (SearchHit hit : response.getHits().getHits()) { + assertThat(hit.getScore(), equalTo(101f)); + } + } + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index ef8ffcf0d806a..5109491c5faca 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.functionscore; import org.apache.lucene.util.ArrayUtil; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; @@ -37,6 +36,8 @@ import static org.elasticsearch.script.MockScriptPlugin.NAME; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -97,35 +98,39 @@ public void testConsistentHitsWithSameSeed() throws Exception { preference = randomRealisticUnicodeOfLengthBetween(1, 10); } int innerIters = scaledRandomIntBetween(2, 5); - SearchHit[] hits = null; + final SearchHit[][] hits = new SearchHit[1][]; for (int i = 0; i < innerIters; i++) { - SearchResponse searchResponse = prepareSearch().setSize(docCount) // get all docs otherwise we are prone to tie-breaking - .setPreference(preference) - .setQuery(functionScoreQuery(matchAllQuery(), randomFunction().seed(seed).setField("foo"))) - .get(); - assertThat( - "Failures " + Arrays.toString(searchResponse.getShardFailures()), - searchResponse.getShardFailures().length, - CoreMatchers.equalTo(0) - ); - final int hitCount = searchResponse.getHits().getHits().length; - final SearchHit[] currentHits = searchResponse.getHits().getHits(); - ArrayUtil.timSort(currentHits, (o1, o2) -> { - // for tie-breaking we have to resort here since if the score is - // identical we rely on collection order which might change. - int cmp = Float.compare(o1.getScore(), o2.getScore()); - return cmp == 0 ? o1.getId().compareTo(o2.getId()) : cmp; - }); - if (i == 0) { - assertThat(hits, nullValue()); - hits = currentHits; - } else { - assertThat(hits.length, equalTo(searchResponse.getHits().getHits().length)); - for (int j = 0; j < hitCount; j++) { - assertThat("" + j, currentHits[j].getScore(), equalTo(hits[j].getScore())); - assertThat("" + j, currentHits[j].getId(), equalTo(hits[j].getId())); + final int finalI = i; + assertResponse( + prepareSearch().setSize(docCount) // get all docs otherwise we are prone to tie-breaking + .setPreference(preference) + .setQuery(functionScoreQuery(matchAllQuery(), randomFunction().seed(seed).setField("foo"))), + response -> { + assertThat( + "Failures " + Arrays.toString(response.getShardFailures()), + response.getShardFailures().length, + CoreMatchers.equalTo(0) + ); + final int hitCount = response.getHits().getHits().length; + final SearchHit[] currentHits = response.getHits().getHits(); + ArrayUtil.timSort(currentHits, (o1, o2) -> { + // for tie-breaking we have to resort here since if the score is + // identical we rely on collection order which might change. + int cmp = Float.compare(o1.getScore(), o2.getScore()); + return cmp == 0 ? o1.getId().compareTo(o2.getId()) : cmp; + }); + if (finalI == 0) { + assertThat(hits[0], nullValue()); + hits[0] = currentHits; + } else { + assertThat(hits[0].length, equalTo(response.getHits().getHits().length)); + for (int j = 0; j < hitCount; j++) { + assertThat("" + j, currentHits[j].getScore(), equalTo(hits[0][j].getScore())); + assertThat("" + j, currentHits[j].getId(), equalTo(hits[0][j].getId())); + } + } } - } + ); // randomly change some docs to get them in different segments int numDocsToChange = randomIntBetween(20, 50); @@ -165,73 +170,88 @@ public void testScoreAccessWithinScript() throws Exception { // Test for accessing _score Script script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score))", params); - SearchResponse resp = prepareSearch("test").setQuery( - functionScoreQuery( - matchQuery("body", "foo"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } - ) - ).get(); - assertNoFailures(resp); - SearchHit firstHit = resp.getHits().getAt(0); - assertThat(firstHit.getScore(), greaterThan(1f)); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + functionScoreQuery( + matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } + ) + ), + response -> { + SearchHit firstHit = response.getHits().getAt(0); + assertThat(firstHit.getScore(), greaterThan(1f)); + } + ); // Test for accessing _score.intValue() script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.intValue()))", params); - resp = prepareSearch("test").setQuery( - functionScoreQuery( - matchQuery("body", "foo"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } - ) - ).get(); - assertNoFailures(resp); - firstHit = resp.getHits().getAt(0); - assertThat(firstHit.getScore(), greaterThan(1f)); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + functionScoreQuery( + matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } + ) + ), + response -> { + SearchHit firstHit = response.getHits().getAt(0); + assertThat(firstHit.getScore(), greaterThan(1f)); + } + ); // Test for accessing _score.longValue() script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.longValue()))", params); - resp = prepareSearch("test").setQuery( - functionScoreQuery( - matchQuery("body", "foo"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } - ) - ).get(); - assertNoFailures(resp); - firstHit = resp.getHits().getAt(0); - assertThat(firstHit.getScore(), greaterThan(1f)); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + functionScoreQuery( + matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } + ) + ), + response -> { + SearchHit firstHit = response.getHits().getAt(0); + assertThat(firstHit.getScore(), greaterThan(1f)); + } + ); // Test for accessing _score.floatValue() script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.floatValue()))", params); - resp = prepareSearch("test").setQuery( - functionScoreQuery( - matchQuery("body", "foo"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } - ) - ).get(); - assertNoFailures(resp); - firstHit = resp.getHits().getAt(0); - assertThat(firstHit.getScore(), greaterThan(1f)); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + functionScoreQuery( + matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } + ) + ), + response -> { + SearchHit firstHit = response.getHits().getAt(0); + assertThat(firstHit.getScore(), greaterThan(1f)); + } + ); // Test for accessing _score.doubleValue() script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.doubleValue()))", params); - resp = prepareSearch("test").setQuery( - functionScoreQuery( - matchQuery("body", "foo"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } - ) - ).get(); - assertNoFailures(resp); - firstHit = resp.getHits().getAt(0); - assertThat(firstHit.getScore(), greaterThan(1f)); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + functionScoreQuery( + matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } + ) + ), + response -> { + SearchHit firstHit = response.getHits().getAt(0); + assertThat(firstHit.getScore(), greaterThan(1f)); + } + ); } public void testSeedReportedInExplain() throws Exception { @@ -243,28 +263,33 @@ public void testSeedReportedInExplain() throws Exception { int seed = 12345678; - SearchResponse resp = prepareSearch("test").setQuery( - functionScoreQuery(matchAllQuery(), randomFunction().seed(seed).setField(SeqNoFieldMapper.NAME)) - ).setExplain(true).get(); - assertNoFailures(resp); - assertEquals(1, resp.getHits().getTotalHits().value); - SearchHit firstHit = resp.getHits().getAt(0); - assertThat(firstHit.getExplanation().toString(), containsString("" + seed)); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), randomFunction().seed(seed).setField(SeqNoFieldMapper.NAME))) + .setExplain(true), + response -> { + assertNoFailures(response); + assertEquals(1, response.getHits().getTotalHits().value); + SearchHit firstHit = response.getHits().getAt(0); + assertThat(firstHit.getExplanation().toString(), containsString("" + seed)); + } + ); } public void testNoDocs() throws Exception { createIndex("test"); ensureGreen(); - SearchResponse resp = prepareSearch("test").setQuery( - functionScoreQuery(matchAllQuery(), randomFunction().seed(1234).setField(SeqNoFieldMapper.NAME)) - ).get(); - assertNoFailures(resp); - assertEquals(0, resp.getHits().getTotalHits().value); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + functionScoreQuery(matchAllQuery(), randomFunction().seed(1234).setField(SeqNoFieldMapper.NAME)) + ), + response -> assertEquals(0, response.getHits().getTotalHits().value) + ); - resp = prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), randomFunction())).get(); - assertNoFailures(resp); - assertEquals(0, resp.getHits().getTotalHits().value); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), randomFunction())), + response -> assertEquals(0, response.getHits().getTotalHits().value) + ); } public void testScoreRange() throws Exception { @@ -280,14 +305,14 @@ public void testScoreRange() throws Exception { refresh(); int iters = scaledRandomIntBetween(10, 20); for (int i = 0; i < iters; ++i) { - SearchResponse searchResponse = prepareSearch().setQuery(functionScoreQuery(matchAllQuery(), randomFunction())) - .setSize(docCount) - .get(); - - assertNoFailures(searchResponse); - for (SearchHit hit : searchResponse.getHits().getHits()) { - assertThat(hit.getScore(), allOf(greaterThanOrEqualTo(0.0f), lessThanOrEqualTo(1.0f))); - } + assertNoFailuresAndResponse( + prepareSearch().setQuery(functionScoreQuery(matchAllQuery(), randomFunction())).setSize(docCount), + response -> { + for (SearchHit hit : response.getHits().getHits()) { + assertThat(hit.getScore(), allOf(greaterThanOrEqualTo(0.0f), lessThanOrEqualTo(1.0f))); + } + } + ); } } @@ -338,10 +363,10 @@ public void checkDistribution() throws Exception { for (int i = 0; i < count; i++) { - SearchResponse searchResponse = prepareSearch().setQuery(functionScoreQuery(matchAllQuery(), new RandomScoreFunctionBuilder())) - .get(); - - matrix[Integer.valueOf(searchResponse.getHits().getAt(0).getId())]++; + assertResponse( + prepareSearch().setQuery(functionScoreQuery(matchAllQuery(), new RandomScoreFunctionBuilder())), + response -> matrix[Integer.valueOf(response.getHits().getAt(0).getId())]++ + ); } int filled = 0; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index 415de06030938..93891a12dd861 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; @@ -41,8 +40,10 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -225,26 +226,36 @@ public void testMoreLikeThisWithAliases() throws Exception { ); logger.info("Running moreLikeThis on beta shard"); - SearchResponse response = prepareSearch("beta").setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1) - ).get(); - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), equalTo("3")); - + assertResponse( + prepareSearch("beta").setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1) + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), equalTo("3")); + } + ); logger.info("Running moreLikeThis on release shard"); - response = prepareSearch("release").setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1) - ).get(); - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + assertResponse( + prepareSearch("release").setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1) + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + } + ); logger.info("Running moreLikeThis on alias with node client"); - response = internalCluster().coordOnlyNodeClient() - .prepareSearch("beta") - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)) - .get(); - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), equalTo("3")); + assertResponse( + internalCluster().coordOnlyNodeClient() + .prepareSearch("beta") + .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), equalTo("3")); + } + ); } // Issue #14944 @@ -267,11 +278,15 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { ).actionGet(); refresh(indexName); - SearchResponse response = prepareSearch().setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] { new Item(aliasName, "1") }).minTermFreq(1).minDocFreq(1) - ).get(); - assertHitCount(response, 2L); - assertThat(response.getHits().getAt(0).getId(), equalTo("3")); + assertResponse( + prepareSearch().setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] { new Item(aliasName, "1") }).minTermFreq(1).minDocFreq(1) + ), + response -> { + assertHitCount(response, 2L); + assertThat(response.getHits().getAt(0).getId(), equalTo("3")); + } + ); } public void testMoreLikeThisIssue2197() throws Exception { @@ -620,13 +635,14 @@ public void testMinimumShouldMatch() throws ExecutionException, InterruptedExcep .minDocFreq(1) .minimumShouldMatch(minimumShouldMatch); logger.info("Testing with minimum_should_match = {}", minimumShouldMatch); - SearchResponse response = prepareSearch("test").setQuery(mltQuery).get(); - assertNoFailures(response); - if (minimumShouldMatch.equals("0%")) { - assertHitCount(response, 10); - } else { - assertHitCount(response, 11 - i); - } + final int finalI = i; + assertNoFailuresAndResponse(prepareSearch("test").setQuery(mltQuery), response -> { + if (minimumShouldMatch.equals("0%")) { + assertHitCount(response, 10); + } else { + assertHitCount(response, 11 - finalI); + } + }); } } @@ -773,8 +789,7 @@ public void testWithRouting() throws IOException { ); moreLikeThisQueryBuilder.minTermFreq(1); moreLikeThisQueryBuilder.minDocFreq(1); - SearchResponse searchResponse = prepareSearch("index").setQuery(moreLikeThisQueryBuilder).get(); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertHitCount(prepareSearch("index").setQuery(moreLikeThisQueryBuilder), 2L); } // Issue #29678 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 736796d73f164..f5f672c1fed9a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; @@ -37,7 +36,10 @@ import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -51,10 +53,8 @@ public void testSimpleNested() throws Exception { ensureGreen(); // check on no data, see it works - SearchResponse searchResponse = prepareSearch("test").get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - searchResponse = prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertHitCount(prepareSearch("test"), 0L); + assertHitCount(prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")), 0L); client().prepareIndex("test") .setId("1") @@ -83,26 +83,22 @@ public void testSimpleNested() throws Exception { // check the numDocs assertDocumentCount("test", 3); - searchResponse = prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertHitCount(prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")), 0L); // search for something that matches the nested doc, and see that we don't find the nested doc - searchResponse = prepareSearch("test").setQuery(matchAllQuery()).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertHitCount(prepareSearch("test"), 1L); + assertHitCount(prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")), 0L); // now, do a nested query - searchResponse = prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)), + 1L + ); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) + .setSearchType(SearchType.DFS_QUERY_THEN_FETCH), + 1L + ); // add another doc, one that would match if it was not nested... @@ -128,40 +124,44 @@ public void testSimpleNested() throws Exception { refresh(); assertDocumentCount("test", 6); - searchResponse = prepareSearch("test").setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), - ScoreMode.Avg - ) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), + ScoreMode.Avg + ) + ), + 1L + ); + ; // filter - searchResponse = prepareSearch("test").setQuery( - boolQuery().must(matchAllQuery()) - .mustNot( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), - ScoreMode.Avg + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()) + .mustNot( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), + ScoreMode.Avg + ) ) - ) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + ), + 1L + ); // check with type prefix - searchResponse = prepareSearch("test").setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), - ScoreMode.Avg - ) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), + ScoreMode.Avg + ) + ), + 1L + ); // check delete, so all is gone... DeleteResponse deleteResponse = client().prepareDelete("test", "2").get(); @@ -170,10 +170,10 @@ public void testSimpleNested() throws Exception { refresh(); assertDocumentCount("test", 3); - searchResponse = prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)), + 1L + ); } public void testMultiNested() throws Exception { @@ -238,83 +238,87 @@ public void testMultiNested() throws Exception { assertDocumentCount("test", 7); // do some multi nested queries - SearchResponse searchResponse = prepareSearch("test").setQuery( - nestedQuery("nested1", termQuery("nested1.field1", "1"), ScoreMode.Avg) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = prepareSearch("test").setQuery( - nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = prepareSearch("test").setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "1")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg)), - ScoreMode.Avg - ) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = prepareSearch("test").setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "1")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "3"), ScoreMode.Avg)), - ScoreMode.Avg - ) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = prepareSearch("test").setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "1")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "4"), ScoreMode.Avg)), - ScoreMode.Avg - ) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - - searchResponse = prepareSearch("test").setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "1")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"), ScoreMode.Avg)), - ScoreMode.Avg - ) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - - searchResponse = prepareSearch("test").setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "4")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"), ScoreMode.Avg)), - ScoreMode.Avg - ) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = prepareSearch("test").setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "4")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg)), - ScoreMode.Avg - ) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.field1", "1"), ScoreMode.Avg)), + 1L + ); + + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg)), + 1L + ); + + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "1")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg)), + ScoreMode.Avg + ) + ), + 1L + ); + + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "1")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "3"), ScoreMode.Avg)), + ScoreMode.Avg + ) + ), + 1L + ); + + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "1")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "4"), ScoreMode.Avg)), + ScoreMode.Avg + ) + ), + 0L + ); + + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "1")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"), ScoreMode.Avg)), + ScoreMode.Avg + ) + ), + 0L + ); + + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "4")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"), ScoreMode.Avg)), + ScoreMode.Avg + ) + ), + 1L + ); + + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "4")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg)), + ScoreMode.Avg + ) + ), + 0L + ); } // When IncludeNestedDocsQuery is wrapped in a FilteredQuery then a in-finite loop occurs b/c of a bug in @@ -421,14 +425,17 @@ public void testExplain() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse searchResponse = prepareSearch("test").setQuery( - nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1"), ScoreMode.Total) - ).setExplain(true).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - Explanation explanation = searchResponse.getHits().getHits()[0].getExplanation(); - assertThat(explanation.getValue(), equalTo(searchResponse.getHits().getHits()[0].getScore())); - assertThat(explanation.toString(), startsWith("0.36464313 = Score based on 2 child docs in range from 0 to 1")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1"), ScoreMode.Total)) + .setExplain(true), + response -> { + assertNoFailures(response); + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + Explanation explanation = response.getHits().getHits()[0].getExplanation(); + assertThat(explanation.getValue(), equalTo(response.getHits().getHits()[0].getScore())); + assertThat(explanation.toString(), startsWith("0.36464313 = Score based on 2 child docs in range from 0 to 1")); + } + ); } public void testSimpleNestedSorting() throws Exception { @@ -504,33 +511,32 @@ public void testSimpleNestedSorting() throws Exception { .get(); refresh(); - SearchResponse searchResponse = prepareSearch("test") - - .setQuery(QueryBuilders.matchAllQuery()) - .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.ASC).setNestedSort(new NestedSortBuilder("nested1"))) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("4")); - - searchResponse = prepareSearch("test") - - .setQuery(QueryBuilders.matchAllQuery()) - .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.DESC).setNestedSort(new NestedSortBuilder("nested1"))) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("5")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("4")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("2")); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) + .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.ASC).setNestedSort(new NestedSortBuilder("nested1"))), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("4")); + } + ); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) + .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.DESC).setNestedSort(new NestedSortBuilder("nested1"))), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("5")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("4")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("2")); + } + ); } public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { @@ -628,16 +634,15 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { searchRequestBuilder.setScroll("10m"); } - SearchResponse searchResponse = searchRequestBuilder.get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("4")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("10")); - + assertResponse(searchRequestBuilder, response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("4")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("10")); + }); searchRequestBuilder = prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .addSort( SortBuilders.fieldSort("nested1.field1") @@ -650,16 +655,16 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { searchRequestBuilder.setScroll("10m"); } - searchResponse = searchRequestBuilder.get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("10")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("5")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("2")); - client().prepareClearScroll().addScrollId("_all").get(); + assertResponse(searchRequestBuilder, response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("10")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("5")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("2")); + client().prepareClearScroll().addScrollId("_all").get(); + }); } public void testNestedSortWithMultiLevelFiltering() throws Exception { @@ -788,101 +793,106 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { refresh(); // access id = 1, read, max value, asc, should use matt and shay - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("acl.operation.user.username") - .setNestedSort( - new NestedSortBuilder("acl").setFilter(QueryBuilders.termQuery("acl.access_id", "1")) - .setNestedSort( - new NestedSortBuilder("acl.operation").setFilter(QueryBuilders.termQuery("acl.operation.name", "read")) - .setNestedSort(new NestedSortBuilder("acl.operation.user")) - ) - ) - .sortMode(SortMode.MAX) - .order(SortOrder.ASC) - ) - .get(); - - assertHitCount(searchResponse, 2); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("matt")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("shay")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("acl.operation.user.username") + .setNestedSort( + new NestedSortBuilder("acl").setFilter(QueryBuilders.termQuery("acl.access_id", "1")) + .setNestedSort( + new NestedSortBuilder("acl.operation").setFilter(QueryBuilders.termQuery("acl.operation.name", "read")) + .setNestedSort(new NestedSortBuilder("acl.operation.user")) + ) + ) + .sortMode(SortMode.MAX) + .order(SortOrder.ASC) + ), + response -> { + assertHitCount(response, 2); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("matt")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("shay")); + } + ); // access id = 1, read, min value, asc, should now use adrien and luca - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("acl.operation.user.username") - .setNestedSort( - new NestedSortBuilder("acl").setFilter(QueryBuilders.termQuery("acl.access_id", "1")) - .setNestedSort( - new NestedSortBuilder("acl.operation").setFilter(QueryBuilders.termQuery("acl.operation.name", "read")) - .setNestedSort(new NestedSortBuilder("acl.operation.user")) - ) - ) - .sortMode(SortMode.MIN) - .order(SortOrder.ASC) - ) - .get(); - - assertHitCount(searchResponse, 2); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("adrien")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("luca")); - - // execute, by matt or luca, by user id, sort missing first - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("acl.operation.user.id") - .setNestedSort( - new NestedSortBuilder("acl").setNestedSort( - new NestedSortBuilder("acl.operation").setFilter(QueryBuilders.termQuery("acl.operation.name", "execute")) + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("acl.operation.user.username") + .setNestedSort( + new NestedSortBuilder("acl").setFilter(QueryBuilders.termQuery("acl.access_id", "1")) .setNestedSort( - new NestedSortBuilder("acl.operation.user").setFilter( - QueryBuilders.termsQuery("acl.operation.user.username", "matt", "luca") - ) + new NestedSortBuilder("acl.operation").setFilter(QueryBuilders.termQuery("acl.operation.name", "read")) + .setNestedSort(new NestedSortBuilder("acl.operation.user")) ) ) - ) - .missing("_first") - .sortMode(SortMode.MIN) - .order(SortOrder.DESC) - ) - .get(); - - assertHitCount(searchResponse, 2); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); // missing first - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("1")); - + .sortMode(SortMode.MIN) + .order(SortOrder.ASC) + ), + response -> { + assertHitCount(response, 2); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("adrien")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("luca")); + } + ); + // execute, by matt or luca, by user id, sort missing first + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("acl.operation.user.id") + .setNestedSort( + new NestedSortBuilder("acl").setNestedSort( + new NestedSortBuilder("acl.operation").setFilter(QueryBuilders.termQuery("acl.operation.name", "execute")) + .setNestedSort( + new NestedSortBuilder("acl.operation.user").setFilter( + QueryBuilders.termsQuery("acl.operation.user.username", "matt", "luca") + ) + ) + ) + ) + .missing("_first") + .sortMode(SortMode.MIN) + .order(SortOrder.DESC) + ), + response -> { + assertHitCount(response, 2); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); // missing first + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("1")); + } + ); // execute, by matt or luca, by username, sort missing last (default) - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("acl.operation.user.username") - .setNestedSort( - new NestedSortBuilder("acl").setNestedSort( - new NestedSortBuilder("acl.operation").setFilter(QueryBuilders.termQuery("acl.operation.name", "execute")) - .setNestedSort( - new NestedSortBuilder("acl.operation.user").setFilter( - QueryBuilders.termsQuery("acl.operation.user.username", "matt", "luca") + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("acl.operation.user.username") + .setNestedSort( + new NestedSortBuilder("acl").setNestedSort( + new NestedSortBuilder("acl.operation").setFilter(QueryBuilders.termQuery("acl.operation.name", "execute")) + .setNestedSort( + new NestedSortBuilder("acl.operation.user").setFilter( + QueryBuilders.termsQuery("acl.operation.user.username", "matt", "luca") + ) ) - ) + ) ) - ) - .sortMode(SortMode.MIN) - .order(SortOrder.DESC) - ) - .get(); - - assertHitCount(searchResponse, 2); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("luca")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("1")); // missing last + .sortMode(SortMode.MIN) + .order(SortOrder.DESC) + ), + response -> { + assertHitCount(response, 2); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("luca")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("1")); // missing last + } + ); } // https://github.com/elastic/elasticsearch/issues/31554 @@ -944,22 +954,25 @@ public void testLeakingSortValues() throws Exception { refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(termQuery("_id", 2)) - .addSort( - SortBuilders.fieldSort("nested1.nested2.sortVal") - .setNestedSort( - new NestedSortBuilder("nested1").setNestedSort( - new NestedSortBuilder("nested1.nested2").setFilter(termQuery("nested1.nested2.nested2_keyword", "nested2_bar")) + assertResponse( + prepareSearch().setQuery(termQuery("_id", 2)) + .addSort( + SortBuilders.fieldSort("nested1.nested2.sortVal") + .setNestedSort( + new NestedSortBuilder("nested1").setNestedSort( + new NestedSortBuilder("nested1.nested2").setFilter( + termQuery("nested1.nested2.nested2_keyword", "nested2_bar") + ) + ) ) - ) - ) - .get(); - - assertHitCount(searchResponse, 1); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("2")); - + ), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("2")); + } + ); } public void testSortNestedWithNestedFilter() throws Exception { @@ -1126,215 +1139,236 @@ public void testSortNestedWithNestedFilter() throws Exception { refresh(); // Without nested filter - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_values") - .setNestedSort(new NestedSortBuilder("parent.child")) - .order(SortOrder.ASC) - ) - .get(); - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("-3")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("-2")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("-1")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_values") + .setNestedSort(new NestedSortBuilder("parent.child")) + .order(SortOrder.ASC) + ), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("-3")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("-2")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("-1")); + } + ); // With nested filter NestedSortBuilder nestedSort = new NestedSortBuilder("parent.child"); nestedSort.setFilter(QueryBuilders.termQuery("parent.child.filter", true)); - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("parent.child.child_values").setNestedSort(nestedSort).order(SortOrder.ASC)) - .get(); - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(SortBuilders.fieldSort("parent.child.child_values").setNestedSort(nestedSort).order(SortOrder.ASC)), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); + } + ); // Nested path should be automatically detected, expect same results as above search request - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("parent.child.child_values").setNestedSort(nestedSort).order(SortOrder.ASC)) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(SortBuilders.fieldSort("parent.child.child_values").setNestedSort(nestedSort).order(SortOrder.ASC)), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); + } + ); nestedSort.setFilter(QueryBuilders.termQuery("parent.filter", false)); - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("parent.parent_values").setNestedSort(nestedSort).order(SortOrder.ASC)) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_values") - .setNestedSort( - new NestedSortBuilder("parent").setFilter(QueryBuilders.termQuery("parent.filter", false)) - .setNestedSort(new NestedSortBuilder("parent.child")) - ) - .sortMode(SortMode.MAX) - .order(SortOrder.ASC) - ) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("4")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("6")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(SortBuilders.fieldSort("parent.parent_values").setNestedSort(nestedSort).order(SortOrder.ASC)), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); + } + ); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_values") + .setNestedSort( + new NestedSortBuilder("parent").setFilter(QueryBuilders.termQuery("parent.filter", false)) + .setNestedSort(new NestedSortBuilder("parent.child")) + ) + .sortMode(SortMode.MAX) + .order(SortOrder.ASC) + ), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("4")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("6")); + } + ); // Check if closest nested type is resolved - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_obj.value") - .setNestedSort(new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true))) - .order(SortOrder.ASC) - ) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_obj.value") + .setNestedSort( + new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true)) + ) + .order(SortOrder.ASC) + ), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); + } + ); // Sort mode: sum - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_values") - .setNestedSort(new NestedSortBuilder("parent.child")) - .sortMode(SortMode.SUM) - .order(SortOrder.ASC) - ) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("7")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("11")); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_values") - .setNestedSort(new NestedSortBuilder("parent.child")) - .sortMode(SortMode.SUM) - .order(SortOrder.DESC) - ) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("11")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("7")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("2")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_values") + .setNestedSort(new NestedSortBuilder("parent.child")) + .sortMode(SortMode.SUM) + .order(SortOrder.ASC) + ), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("7")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("11")); + } + ); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_values") + .setNestedSort(new NestedSortBuilder("parent.child")) + .sortMode(SortMode.SUM) + .order(SortOrder.DESC) + ), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("11")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("7")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("2")); + } + ); // Sort mode: sum with filter - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_values") - .setNestedSort(new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true))) - .sortMode(SortMode.SUM) - .order(SortOrder.ASC) - ) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_values") + .setNestedSort( + new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true)) + ) + .sortMode(SortMode.SUM) + .order(SortOrder.ASC) + ), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); + } + ); // Sort mode: avg - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_values") - .setNestedSort(new NestedSortBuilder("parent.child")) - .sortMode(SortMode.AVG) - .order(SortOrder.ASC) - ) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_values") - .setNestedSort(new NestedSortBuilder("parent.child")) - .sortMode(SortMode.AVG) - .order(SortOrder.DESC) - ) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("1")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_values") + .setNestedSort(new NestedSortBuilder("parent.child")) + .sortMode(SortMode.AVG) + .order(SortOrder.ASC) + ), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); + } + ); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_values") + .setNestedSort(new NestedSortBuilder("parent.child")) + .sortMode(SortMode.AVG) + .order(SortOrder.DESC) + ), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("1")); + } + ); // Sort mode: avg with filter - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_values") - .setNestedSort(new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true))) - .sortMode(SortMode.AVG) - .order(SortOrder.ASC) - ) - .get(); - - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_values") + .setNestedSort( + new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true)) + ) + .sortMode(SortMode.AVG) + .order(SortOrder.ASC) + ), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); + } + ); } // Issue #9305 @@ -1482,27 +1516,30 @@ public void testNestedSortingWithNestedFilterAsFilter() throws Exception { assertTrue(indexResponse2.getShardInfo().getSuccessful() > 0); refresh(); - SearchResponse searchResponse = prepareSearch("test").addSort( - SortBuilders.fieldSort("users.first").setNestedSort(new NestedSortBuilder("users")).order(SortOrder.ASC) - ) - .addSort( - SortBuilders.fieldSort("users.first") - .order(SortOrder.ASC) - .setNestedSort( - new NestedSortBuilder("users").setFilter( - nestedQuery("users.workstations", termQuery("users.workstations.stationid", "s5"), ScoreMode.Avg) - ) - ) + assertNoFailuresAndResponse( + prepareSearch("test").addSort( + SortBuilders.fieldSort("users.first").setNestedSort(new NestedSortBuilder("users")).order(SortOrder.ASC) ) - .get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 2); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("fname1")); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[1].toString(), equalTo("fname1")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0].toString(), equalTo("fname1")); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[1].toString(), equalTo("fname3")); + .addSort( + SortBuilders.fieldSort("users.first") + .order(SortOrder.ASC) + .setNestedSort( + new NestedSortBuilder("users").setFilter( + nestedQuery("users.workstations", termQuery("users.workstations.stationid", "s5"), ScoreMode.Avg) + ) + ) + ), + response -> { + assertNoFailures(response); + assertHitCount(response, 2); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("fname1")); + assertThat(response.getHits().getAt(0).getSortValues()[1].toString(), equalTo("fname1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("fname1")); + assertThat(response.getHits().getAt(1).getSortValues()[1].toString(), equalTo("fname3")); + } + ); } public void testCheckFixedBitSetCache() throws Exception { @@ -1546,11 +1583,10 @@ public void testCheckFixedBitSetCache() throws Exception { assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0L)); // only when querying with nested the fixed bitsets are loaded - SearchResponse searchResponse = prepareSearch("test").setQuery( - nestedQuery("array1", termQuery("array1.field1", "value1"), ScoreMode.Avg) - ).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(nestedQuery("array1", termQuery("array1.field1", "value1"), ScoreMode.Avg)), + 5L + ); } clusterStatsResponse = clusterAdmin().prepareClusterStats().get(); assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), greaterThan(0L)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java index 9219641f1d3bf..4832964427540 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.nested; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.vectors.KnnSearchBuilder; @@ -18,6 +17,7 @@ import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -66,10 +66,11 @@ public void testSimpleNested() throws Exception { assertThat(getResponse.getSourceAsBytes(), notNullValue()); refresh(); - SearchResponse searchResponse = prepareSearch("test").setKnnSearch( - List.of(new KnnSearchBuilder("nested.vector", new float[] { 1, 1, 1 }, 1, 1, null)) - ).setAllowPartialSearchResults(false).get(); - assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); + assertResponse( + prepareSearch("test").setKnnSearch(List.of(new KnnSearchBuilder("nested.vector", new float[] { 1, 1, 1 }, 1, 1, null))) + .setAllowPartialSearchResults(false), + response -> assertThat(response.getHits().getHits().length, greaterThan(0)) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java index 526d523bb0638..560806a68c908 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.profile.aggregation; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -42,7 +41,7 @@ import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -122,110 +121,113 @@ protected void setupSuiteScopeCluster() throws Exception { } public void testSimpleProfile() { - SearchResponse response = prepareSearch("idx").setProfile(true) - .addAggregation(histogram("histo").field(NUMBER_FIELD).interval(1L)) - .get(); - assertNoFailures(response); - Map profileResults = response.getProfileResults(); - assertThat(profileResults, notNullValue()); - assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (SearchProfileShardResult profileShardResult : profileResults.values()) { - assertThat(profileShardResult, notNullValue()); - AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); - assertThat(aggProfileResults, notNullValue()); - List aggProfileResultsList = aggProfileResults.getProfileResults(); - assertThat(aggProfileResultsList, notNullValue()); - assertThat(aggProfileResultsList.size(), equalTo(1)); - ProfileResult histoAggResult = aggProfileResultsList.get(0); - assertThat(histoAggResult, notNullValue()); - assertThat(histoAggResult.getQueryName(), equalTo("NumericHistogramAggregator")); - assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); - assertThat(histoAggResult.getProfiledChildren().size(), equalTo(0)); - assertThat(histoAggResult.getTime(), greaterThan(0L)); - Map breakdown = histoAggResult.getTimeBreakdown(); - assertThat(breakdown, notNullValue()); - assertThat(breakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(breakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(breakdown.get(COLLECT), greaterThan(0L)); - assertThat(breakdown.get(BUILD_AGGREGATION).longValue(), greaterThan(0L)); - assertThat(breakdown.get(REDUCE), equalTo(0L)); - assertMap( - histoAggResult.getDebugInfo(), - matchesMap().entry(TOTAL_BUCKETS, greaterThan(0L)).entry(BUILT_BUCKETS, greaterThan(0)) - ); - } + assertNoFailuresAndResponse( + prepareSearch("idx").setProfile(true).addAggregation(histogram("histo").field(NUMBER_FIELD).interval(1L)), + response -> { + Map profileResults = response.getProfileResults(); + assertThat(profileResults, notNullValue()); + assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); + for (SearchProfileShardResult profileShardResult : profileResults.values()) { + assertThat(profileShardResult, notNullValue()); + AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); + assertThat(aggProfileResults, notNullValue()); + List aggProfileResultsList = aggProfileResults.getProfileResults(); + assertThat(aggProfileResultsList, notNullValue()); + assertThat(aggProfileResultsList.size(), equalTo(1)); + ProfileResult histoAggResult = aggProfileResultsList.get(0); + assertThat(histoAggResult, notNullValue()); + assertThat(histoAggResult.getQueryName(), equalTo("NumericHistogramAggregator")); + assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); + assertThat(histoAggResult.getProfiledChildren().size(), equalTo(0)); + assertThat(histoAggResult.getTime(), greaterThan(0L)); + Map breakdown = histoAggResult.getTimeBreakdown(); + assertThat(breakdown, notNullValue()); + assertThat(breakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(breakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(breakdown.get(COLLECT), greaterThan(0L)); + assertThat(breakdown.get(BUILD_AGGREGATION).longValue(), greaterThan(0L)); + assertThat(breakdown.get(REDUCE), equalTo(0L)); + assertMap( + histoAggResult.getDebugInfo(), + matchesMap().entry(TOTAL_BUCKETS, greaterThan(0L)).entry(BUILT_BUCKETS, greaterThan(0)) + ); + } + } + ); } public void testMultiLevelProfile() { - SearchResponse response = prepareSearch("idx").setProfile(true) - .addAggregation( - histogram("histo").field(NUMBER_FIELD) - .interval(1L) - .subAggregation( - terms("terms").field(TAG_FIELD) - .order(BucketOrder.aggregation("avg", false)) - .subAggregation(avg("avg").field(NUMBER_FIELD)) - ) - ) - .get(); - assertNoFailures(response); - Map profileResults = response.getProfileResults(); - assertThat(profileResults, notNullValue()); - assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (SearchProfileShardResult profileShardResult : profileResults.values()) { - assertThat(profileShardResult, notNullValue()); - AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); - assertThat(aggProfileResults, notNullValue()); - List aggProfileResultsList = aggProfileResults.getProfileResults(); - assertThat(aggProfileResultsList, notNullValue()); - assertThat(aggProfileResultsList.size(), equalTo(1)); - ProfileResult histoAggResult = aggProfileResultsList.get(0); - assertThat(histoAggResult, notNullValue()); - assertThat(histoAggResult.getQueryName(), equalTo("NumericHistogramAggregator")); - assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); - assertThat(histoAggResult.getTime(), greaterThan(0L)); - Map histoBreakdown = histoAggResult.getTimeBreakdown(); - assertThat(histoBreakdown, notNullValue()); - assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(histoBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(histoBreakdown.get(REDUCE), equalTo(0L)); - assertMap( - histoAggResult.getDebugInfo(), - matchesMap().entry(TOTAL_BUCKETS, greaterThan(0L)).entry(BUILT_BUCKETS, greaterThan(0)) - ); - - ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0); - assertThat(termsAggResult, notNullValue()); - assertThat(termsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); - assertThat(termsAggResult.getLuceneDescription(), equalTo("terms")); - assertThat(termsAggResult.getTime(), greaterThan(0L)); - Map termsBreakdown = termsAggResult.getTimeBreakdown(); - assertThat(termsBreakdown, notNullValue()); - assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(termsBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(termsBreakdown.get(REDUCE), equalTo(0L)); - assertRemapTermsDebugInfo(termsAggResult); - assertThat(termsAggResult.getProfiledChildren().size(), equalTo(1)); - - ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0); - assertThat(avgAggResult, notNullValue()); - assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); - assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); - assertThat(avgAggResult.getTime(), greaterThan(0L)); - Map avgBreakdown = avgAggResult.getTimeBreakdown(); - assertThat(avgBreakdown, notNullValue()); - assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(avgBreakdown.get(REDUCE), equalTo(0L)); - assertMap(avgAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); - assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); - } + assertNoFailuresAndResponse( + prepareSearch("idx").setProfile(true) + .addAggregation( + histogram("histo").field(NUMBER_FIELD) + .interval(1L) + .subAggregation( + terms("terms").field(TAG_FIELD) + .order(BucketOrder.aggregation("avg", false)) + .subAggregation(avg("avg").field(NUMBER_FIELD)) + ) + ), + response -> { + Map profileResults = response.getProfileResults(); + assertThat(profileResults, notNullValue()); + assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); + for (SearchProfileShardResult profileShardResult : profileResults.values()) { + assertThat(profileShardResult, notNullValue()); + AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); + assertThat(aggProfileResults, notNullValue()); + List aggProfileResultsList = aggProfileResults.getProfileResults(); + assertThat(aggProfileResultsList, notNullValue()); + assertThat(aggProfileResultsList.size(), equalTo(1)); + ProfileResult histoAggResult = aggProfileResultsList.get(0); + assertThat(histoAggResult, notNullValue()); + assertThat(histoAggResult.getQueryName(), equalTo("NumericHistogramAggregator")); + assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); + assertThat(histoAggResult.getTime(), greaterThan(0L)); + Map histoBreakdown = histoAggResult.getTimeBreakdown(); + assertThat(histoBreakdown, notNullValue()); + assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(histoBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(histoBreakdown.get(REDUCE), equalTo(0L)); + assertMap( + histoAggResult.getDebugInfo(), + matchesMap().entry(TOTAL_BUCKETS, greaterThan(0L)).entry(BUILT_BUCKETS, greaterThan(0)) + ); + + ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0); + assertThat(termsAggResult, notNullValue()); + assertThat(termsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); + assertThat(termsAggResult.getLuceneDescription(), equalTo("terms")); + assertThat(termsAggResult.getTime(), greaterThan(0L)); + Map termsBreakdown = termsAggResult.getTimeBreakdown(); + assertThat(termsBreakdown, notNullValue()); + assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(termsBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(termsBreakdown.get(REDUCE), equalTo(0L)); + assertRemapTermsDebugInfo(termsAggResult); + assertThat(termsAggResult.getProfiledChildren().size(), equalTo(1)); + + ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0); + assertThat(avgAggResult, notNullValue()); + assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); + assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); + assertThat(avgAggResult.getTime(), greaterThan(0L)); + Map avgBreakdown = avgAggResult.getTimeBreakdown(); + assertThat(avgBreakdown, notNullValue()); + assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(avgBreakdown.get(REDUCE), equalTo(0L)); + assertMap(avgAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); + assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); + } + } + ); } private void assertRemapTermsDebugInfo(ProfileResult termsAggResult, String... deferredAggregators) { @@ -243,375 +245,386 @@ private void assertRemapTermsDebugInfo(ProfileResult termsAggResult, String... d } public void testMultiLevelProfileBreadthFirst() { - SearchResponse response = prepareSearch("idx").setProfile(true) - .addAggregation( - histogram("histo").field(NUMBER_FIELD) - .interval(1L) - .subAggregation( - terms("terms").collectMode(SubAggCollectionMode.BREADTH_FIRST) - .field(TAG_FIELD) - .subAggregation(avg("avg").field(NUMBER_FIELD)) - ) - ) - .get(); - assertNoFailures(response); - Map profileResults = response.getProfileResults(); - assertThat(profileResults, notNullValue()); - assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (SearchProfileShardResult profileShardResult : profileResults.values()) { - assertThat(profileShardResult, notNullValue()); - AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); - assertThat(aggProfileResults, notNullValue()); - List aggProfileResultsList = aggProfileResults.getProfileResults(); - assertThat(aggProfileResultsList, notNullValue()); - assertThat(aggProfileResultsList.size(), equalTo(1)); - ProfileResult histoAggResult = aggProfileResultsList.get(0); - assertThat(histoAggResult, notNullValue()); - assertThat(histoAggResult.getQueryName(), equalTo("NumericHistogramAggregator")); - assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); - assertThat(histoAggResult.getTime(), greaterThan(0L)); - Map histoBreakdown = histoAggResult.getTimeBreakdown(); - assertThat(histoBreakdown, notNullValue()); - assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(histoBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(histoBreakdown.get(REDUCE), equalTo(0L)); - assertMap( - histoAggResult.getDebugInfo(), - matchesMap().entry(TOTAL_BUCKETS, greaterThan(0L)).entry(BUILT_BUCKETS, greaterThan(0)) - ); - assertThat(histoAggResult.getProfiledChildren().size(), equalTo(1)); - - ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0); - assertThat(termsAggResult, notNullValue()); - assertThat(termsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); - assertThat(termsAggResult.getLuceneDescription(), equalTo("terms")); - assertThat(termsAggResult.getTime(), greaterThan(0L)); - Map termsBreakdown = termsAggResult.getTimeBreakdown(); - assertThat(termsBreakdown, notNullValue()); - assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(termsBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(termsBreakdown.get(REDUCE), equalTo(0L)); - assertRemapTermsDebugInfo(termsAggResult, "avg"); - assertThat(termsAggResult.getProfiledChildren().size(), equalTo(1)); - - ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0); - assertThat(avgAggResult, notNullValue()); - assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); - assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); - assertThat(avgAggResult.getTime(), greaterThan(0L)); - Map avgBreakdown = avgAggResult.getTimeBreakdown(); - assertThat(avgBreakdown, notNullValue()); - assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(avgBreakdown.get(REDUCE), equalTo(0L)); - assertMap(avgAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); - assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); - } + assertNoFailuresAndResponse( + prepareSearch("idx").setProfile(true) + .addAggregation( + histogram("histo").field(NUMBER_FIELD) + .interval(1L) + .subAggregation( + terms("terms").collectMode(SubAggCollectionMode.BREADTH_FIRST) + .field(TAG_FIELD) + .subAggregation(avg("avg").field(NUMBER_FIELD)) + ) + ), + response -> { + Map profileResults = response.getProfileResults(); + assertThat(profileResults, notNullValue()); + assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); + for (SearchProfileShardResult profileShardResult : profileResults.values()) { + assertThat(profileShardResult, notNullValue()); + AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); + assertThat(aggProfileResults, notNullValue()); + List aggProfileResultsList = aggProfileResults.getProfileResults(); + assertThat(aggProfileResultsList, notNullValue()); + assertThat(aggProfileResultsList.size(), equalTo(1)); + ProfileResult histoAggResult = aggProfileResultsList.get(0); + assertThat(histoAggResult, notNullValue()); + assertThat(histoAggResult.getQueryName(), equalTo("NumericHistogramAggregator")); + assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); + assertThat(histoAggResult.getTime(), greaterThan(0L)); + Map histoBreakdown = histoAggResult.getTimeBreakdown(); + assertThat(histoBreakdown, notNullValue()); + assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(histoBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(histoBreakdown.get(REDUCE), equalTo(0L)); + assertMap( + histoAggResult.getDebugInfo(), + matchesMap().entry(TOTAL_BUCKETS, greaterThan(0L)).entry(BUILT_BUCKETS, greaterThan(0)) + ); + assertThat(histoAggResult.getProfiledChildren().size(), equalTo(1)); + + ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0); + assertThat(termsAggResult, notNullValue()); + assertThat(termsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); + assertThat(termsAggResult.getLuceneDescription(), equalTo("terms")); + assertThat(termsAggResult.getTime(), greaterThan(0L)); + Map termsBreakdown = termsAggResult.getTimeBreakdown(); + assertThat(termsBreakdown, notNullValue()); + assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(termsBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(termsBreakdown.get(REDUCE), equalTo(0L)); + assertRemapTermsDebugInfo(termsAggResult, "avg"); + assertThat(termsAggResult.getProfiledChildren().size(), equalTo(1)); + + ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0); + assertThat(avgAggResult, notNullValue()); + assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); + assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); + assertThat(avgAggResult.getTime(), greaterThan(0L)); + Map avgBreakdown = avgAggResult.getTimeBreakdown(); + assertThat(avgBreakdown, notNullValue()); + assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(avgBreakdown.get(REDUCE), equalTo(0L)); + assertMap(avgAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); + assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); + } + } + ); } public void testDiversifiedAggProfile() { - SearchResponse response = prepareSearch("idx").setProfile(true) - .addAggregation( - diversifiedSampler("diversify").shardSize(10) - .field(STRING_FIELD) - .maxDocsPerValue(2) - .subAggregation(max("max").field(NUMBER_FIELD)) - ) - .get(); - assertNoFailures(response); - Map profileResults = response.getProfileResults(); - assertThat(profileResults, notNullValue()); - assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (SearchProfileShardResult profileShardResult : profileResults.values()) { - assertThat(profileShardResult, notNullValue()); - AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); - assertThat(aggProfileResults, notNullValue()); - List aggProfileResultsList = aggProfileResults.getProfileResults(); - assertThat(aggProfileResultsList, notNullValue()); - assertThat(aggProfileResultsList.size(), equalTo(1)); - ProfileResult diversifyAggResult = aggProfileResultsList.get(0); - assertThat(diversifyAggResult, notNullValue()); - assertThat(diversifyAggResult.getQueryName(), equalTo(DiversifiedOrdinalsSamplerAggregator.class.getSimpleName())); - assertThat(diversifyAggResult.getLuceneDescription(), equalTo("diversify")); - assertThat(diversifyAggResult.getTime(), greaterThan(0L)); - Map diversifyBreakdown = diversifyAggResult.getTimeBreakdown(); - assertThat(diversifyBreakdown, notNullValue()); - assertThat(diversifyBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(diversifyBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(diversifyBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(diversifyBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(diversifyBreakdown.get(REDUCE), equalTo(0L)); - assertMap(diversifyAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0)).entry(DEFERRED, List.of("max"))); - - ProfileResult maxAggResult = diversifyAggResult.getProfiledChildren().get(0); - assertThat(maxAggResult, notNullValue()); - assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); - assertThat(maxAggResult.getLuceneDescription(), equalTo("max")); - assertThat(maxAggResult.getTime(), greaterThan(0L)); - Map maxBreakdown = maxAggResult.getTimeBreakdown(); - assertThat(maxBreakdown, notNullValue()); - assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(diversifyBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(diversifyBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(maxBreakdown.get(REDUCE), equalTo(0L)); - assertMap(maxAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); - assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0)); - } + assertNoFailuresAndResponse( + prepareSearch("idx").setProfile(true) + .addAggregation( + diversifiedSampler("diversify").shardSize(10) + .field(STRING_FIELD) + .maxDocsPerValue(2) + .subAggregation(max("max").field(NUMBER_FIELD)) + ), + response -> { + Map profileResults = response.getProfileResults(); + assertThat(profileResults, notNullValue()); + assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); + for (SearchProfileShardResult profileShardResult : profileResults.values()) { + assertThat(profileShardResult, notNullValue()); + AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); + assertThat(aggProfileResults, notNullValue()); + List aggProfileResultsList = aggProfileResults.getProfileResults(); + assertThat(aggProfileResultsList, notNullValue()); + assertThat(aggProfileResultsList.size(), equalTo(1)); + ProfileResult diversifyAggResult = aggProfileResultsList.get(0); + assertThat(diversifyAggResult, notNullValue()); + assertThat(diversifyAggResult.getQueryName(), equalTo(DiversifiedOrdinalsSamplerAggregator.class.getSimpleName())); + assertThat(diversifyAggResult.getLuceneDescription(), equalTo("diversify")); + assertThat(diversifyAggResult.getTime(), greaterThan(0L)); + Map diversifyBreakdown = diversifyAggResult.getTimeBreakdown(); + assertThat(diversifyBreakdown, notNullValue()); + assertThat(diversifyBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(diversifyBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(diversifyBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(diversifyBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(diversifyBreakdown.get(REDUCE), equalTo(0L)); + assertMap( + diversifyAggResult.getDebugInfo(), + matchesMap().entry(BUILT_BUCKETS, greaterThan(0)).entry(DEFERRED, List.of("max")) + ); + + ProfileResult maxAggResult = diversifyAggResult.getProfiledChildren().get(0); + assertThat(maxAggResult, notNullValue()); + assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); + assertThat(maxAggResult.getLuceneDescription(), equalTo("max")); + assertThat(maxAggResult.getTime(), greaterThan(0L)); + Map maxBreakdown = maxAggResult.getTimeBreakdown(); + assertThat(maxBreakdown, notNullValue()); + assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(diversifyBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(diversifyBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(maxBreakdown.get(REDUCE), equalTo(0L)); + assertMap(maxAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); + assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0)); + } + } + ); } public void testComplexProfile() { - SearchResponse response = prepareSearch("idx").setProfile(true) - .addAggregation( - histogram("histo").field(NUMBER_FIELD) - .interval(1L) - .subAggregation( - terms("tags").field(TAG_FIELD) - .subAggregation(avg("avg").field(NUMBER_FIELD)) - .subAggregation(max("max").field(NUMBER_FIELD)) - ) - .subAggregation( - terms("strings").field(STRING_FIELD) - .subAggregation(avg("avg").field(NUMBER_FIELD)) - .subAggregation(max("max").field(NUMBER_FIELD)) - .subAggregation( - terms("tags").field(TAG_FIELD) - .subAggregation(avg("avg").field(NUMBER_FIELD)) - .subAggregation(max("max").field(NUMBER_FIELD)) - ) - ) - ) - .get(); - assertNoFailures(response); - Map profileResults = response.getProfileResults(); - assertThat(profileResults, notNullValue()); - assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (SearchProfileShardResult profileShardResult : profileResults.values()) { - assertThat(profileShardResult, notNullValue()); - AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); - assertThat(aggProfileResults, notNullValue()); - List aggProfileResultsList = aggProfileResults.getProfileResults(); - assertThat(aggProfileResultsList, notNullValue()); - assertThat(aggProfileResultsList.size(), equalTo(1)); - ProfileResult histoAggResult = aggProfileResultsList.get(0); - assertThat(histoAggResult, notNullValue()); - assertThat(histoAggResult.getQueryName(), equalTo("NumericHistogramAggregator")); - assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); - assertThat(histoAggResult.getTime(), greaterThan(0L)); - Map histoBreakdown = histoAggResult.getTimeBreakdown(); - assertThat(histoBreakdown, notNullValue()); - assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(histoBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(histoBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(histoBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(histoBreakdown.get(REDUCE), equalTo(0L)); - assertMap( - histoAggResult.getDebugInfo(), - matchesMap().entry(TOTAL_BUCKETS, greaterThan(0L)).entry(BUILT_BUCKETS, greaterThan(0)) - ); - assertThat(histoAggResult.getProfiledChildren().size(), equalTo(2)); - - Map histoAggResultSubAggregations = histoAggResult.getProfiledChildren() - .stream() - .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); - - ProfileResult tagsAggResult = histoAggResultSubAggregations.get("tags"); - assertThat(tagsAggResult, notNullValue()); - assertThat(tagsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); - assertThat(tagsAggResult.getTime(), greaterThan(0L)); - Map tagsBreakdown = tagsAggResult.getTimeBreakdown(); - assertThat(tagsBreakdown, notNullValue()); - assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(tagsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(tagsBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(tagsBreakdown.get(REDUCE), equalTo(0L)); - assertRemapTermsDebugInfo(tagsAggResult); - assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2)); - - Map tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren() - .stream() - .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); - - ProfileResult avgAggResult = tagsAggResultSubAggregations.get("avg"); - assertThat(avgAggResult, notNullValue()); - assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); - assertThat(avgAggResult.getTime(), greaterThan(0L)); - Map avgBreakdown = avgAggResult.getTimeBreakdown(); - assertThat(avgBreakdown, notNullValue()); - assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(avgBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(avgBreakdown.get(REDUCE), equalTo(0L)); - assertMap(avgAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); - assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); - - ProfileResult maxAggResult = tagsAggResultSubAggregations.get("max"); - assertThat(maxAggResult, notNullValue()); - assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); - assertThat(maxAggResult.getTime(), greaterThan(0L)); - Map maxBreakdown = maxAggResult.getTimeBreakdown(); - assertThat(maxBreakdown, notNullValue()); - assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(maxBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(maxBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(maxBreakdown.get(REDUCE), equalTo(0L)); - assertMap(maxAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); - assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0)); - - ProfileResult stringsAggResult = histoAggResultSubAggregations.get("strings"); - assertThat(stringsAggResult, notNullValue()); - assertThat(stringsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); - assertThat(stringsAggResult.getTime(), greaterThan(0L)); - Map stringsBreakdown = stringsAggResult.getTimeBreakdown(); - assertThat(stringsBreakdown, notNullValue()); - assertThat(stringsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(stringsBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(stringsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(stringsBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(stringsBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(stringsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(stringsBreakdown.get(REDUCE), equalTo(0L)); - assertRemapTermsDebugInfo(stringsAggResult); - assertThat(stringsAggResult.getProfiledChildren().size(), equalTo(3)); - - Map stringsAggResultSubAggregations = stringsAggResult.getProfiledChildren() - .stream() - .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); - - avgAggResult = stringsAggResultSubAggregations.get("avg"); - assertThat(avgAggResult, notNullValue()); - assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); - assertThat(avgAggResult.getTime(), greaterThan(0L)); - avgBreakdown = avgAggResult.getTimeBreakdown(); - assertThat(avgBreakdown, notNullValue()); - assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(avgBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(avgBreakdown.get(REDUCE), equalTo(0L)); - assertMap(avgAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); - assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); - - maxAggResult = stringsAggResultSubAggregations.get("max"); - assertThat(maxAggResult, notNullValue()); - assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); - assertThat(maxAggResult.getTime(), greaterThan(0L)); - maxBreakdown = maxAggResult.getTimeBreakdown(); - assertThat(maxBreakdown, notNullValue()); - assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(maxBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(maxBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(maxBreakdown.get(REDUCE), equalTo(0L)); - assertMap(maxAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); - assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0)); - - tagsAggResult = stringsAggResultSubAggregations.get("tags"); - assertThat(tagsAggResult, notNullValue()); - assertThat(tagsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); - assertThat(tagsAggResult.getLuceneDescription(), equalTo("tags")); - assertThat(tagsAggResult.getTime(), greaterThan(0L)); - tagsBreakdown = tagsAggResult.getTimeBreakdown(); - assertThat(tagsBreakdown, notNullValue()); - assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(tagsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(tagsBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(tagsBreakdown.get(REDUCE), equalTo(0L)); - assertRemapTermsDebugInfo(tagsAggResult); - assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2)); - - tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren() - .stream() - .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); - - avgAggResult = tagsAggResultSubAggregations.get("avg"); - assertThat(avgAggResult, notNullValue()); - assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); - assertThat(avgAggResult.getTime(), greaterThan(0L)); - avgBreakdown = avgAggResult.getTimeBreakdown(); - assertThat(avgBreakdown, notNullValue()); - assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(avgBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(avgBreakdown.get(REDUCE), equalTo(0L)); - assertMap(avgAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); - assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); - - maxAggResult = tagsAggResultSubAggregations.get("max"); - assertThat(maxAggResult, notNullValue()); - assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); - assertThat(maxAggResult.getTime(), greaterThan(0L)); - maxBreakdown = maxAggResult.getTimeBreakdown(); - assertThat(maxBreakdown, notNullValue()); - assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); - assertThat(maxBreakdown.get(COLLECT), greaterThan(0L)); - assertThat(maxBreakdown.get(POST_COLLECTION), greaterThan(0L)); - assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); - assertThat(maxBreakdown.get(REDUCE), equalTo(0L)); - assertMap(maxAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); - assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0)); - } + assertNoFailuresAndResponse( + prepareSearch("idx").setProfile(true) + .addAggregation( + histogram("histo").field(NUMBER_FIELD) + .interval(1L) + .subAggregation( + terms("tags").field(TAG_FIELD) + .subAggregation(avg("avg").field(NUMBER_FIELD)) + .subAggregation(max("max").field(NUMBER_FIELD)) + ) + .subAggregation( + terms("strings").field(STRING_FIELD) + .subAggregation(avg("avg").field(NUMBER_FIELD)) + .subAggregation(max("max").field(NUMBER_FIELD)) + .subAggregation( + terms("tags").field(TAG_FIELD) + .subAggregation(avg("avg").field(NUMBER_FIELD)) + .subAggregation(max("max").field(NUMBER_FIELD)) + ) + ) + ), + response -> { + Map profileResults = response.getProfileResults(); + assertThat(profileResults, notNullValue()); + assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); + for (SearchProfileShardResult profileShardResult : profileResults.values()) { + assertThat(profileShardResult, notNullValue()); + AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); + assertThat(aggProfileResults, notNullValue()); + List aggProfileResultsList = aggProfileResults.getProfileResults(); + assertThat(aggProfileResultsList, notNullValue()); + assertThat(aggProfileResultsList.size(), equalTo(1)); + ProfileResult histoAggResult = aggProfileResultsList.get(0); + assertThat(histoAggResult, notNullValue()); + assertThat(histoAggResult.getQueryName(), equalTo("NumericHistogramAggregator")); + assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); + assertThat(histoAggResult.getTime(), greaterThan(0L)); + Map histoBreakdown = histoAggResult.getTimeBreakdown(); + assertThat(histoBreakdown, notNullValue()); + assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(histoBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(histoBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(histoBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(histoBreakdown.get(REDUCE), equalTo(0L)); + assertMap( + histoAggResult.getDebugInfo(), + matchesMap().entry(TOTAL_BUCKETS, greaterThan(0L)).entry(BUILT_BUCKETS, greaterThan(0)) + ); + assertThat(histoAggResult.getProfiledChildren().size(), equalTo(2)); + + Map histoAggResultSubAggregations = histoAggResult.getProfiledChildren() + .stream() + .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); + + ProfileResult tagsAggResult = histoAggResultSubAggregations.get("tags"); + assertThat(tagsAggResult, notNullValue()); + assertThat(tagsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); + assertThat(tagsAggResult.getTime(), greaterThan(0L)); + Map tagsBreakdown = tagsAggResult.getTimeBreakdown(); + assertThat(tagsBreakdown, notNullValue()); + assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(tagsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(tagsBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(tagsBreakdown.get(REDUCE), equalTo(0L)); + assertRemapTermsDebugInfo(tagsAggResult); + assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2)); + + Map tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren() + .stream() + .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); + + ProfileResult avgAggResult = tagsAggResultSubAggregations.get("avg"); + assertThat(avgAggResult, notNullValue()); + assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); + assertThat(avgAggResult.getTime(), greaterThan(0L)); + Map avgBreakdown = avgAggResult.getTimeBreakdown(); + assertThat(avgBreakdown, notNullValue()); + assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(avgBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(avgBreakdown.get(REDUCE), equalTo(0L)); + assertMap(avgAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); + assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); + + ProfileResult maxAggResult = tagsAggResultSubAggregations.get("max"); + assertThat(maxAggResult, notNullValue()); + assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); + assertThat(maxAggResult.getTime(), greaterThan(0L)); + Map maxBreakdown = maxAggResult.getTimeBreakdown(); + assertThat(maxBreakdown, notNullValue()); + assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(maxBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(maxBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(maxBreakdown.get(REDUCE), equalTo(0L)); + assertMap(maxAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); + assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0)); + + ProfileResult stringsAggResult = histoAggResultSubAggregations.get("strings"); + assertThat(stringsAggResult, notNullValue()); + assertThat(stringsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); + assertThat(stringsAggResult.getTime(), greaterThan(0L)); + Map stringsBreakdown = stringsAggResult.getTimeBreakdown(); + assertThat(stringsBreakdown, notNullValue()); + assertThat(stringsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(stringsBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(stringsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(stringsBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(stringsBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(stringsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(stringsBreakdown.get(REDUCE), equalTo(0L)); + assertRemapTermsDebugInfo(stringsAggResult); + assertThat(stringsAggResult.getProfiledChildren().size(), equalTo(3)); + + Map stringsAggResultSubAggregations = stringsAggResult.getProfiledChildren() + .stream() + .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); + + avgAggResult = stringsAggResultSubAggregations.get("avg"); + assertThat(avgAggResult, notNullValue()); + assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); + assertThat(avgAggResult.getTime(), greaterThan(0L)); + avgBreakdown = avgAggResult.getTimeBreakdown(); + assertThat(avgBreakdown, notNullValue()); + assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(avgBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(avgBreakdown.get(REDUCE), equalTo(0L)); + assertMap(avgAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); + assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); + + maxAggResult = stringsAggResultSubAggregations.get("max"); + assertThat(maxAggResult, notNullValue()); + assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); + assertThat(maxAggResult.getTime(), greaterThan(0L)); + maxBreakdown = maxAggResult.getTimeBreakdown(); + assertThat(maxBreakdown, notNullValue()); + assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(maxBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(maxBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(maxBreakdown.get(REDUCE), equalTo(0L)); + assertMap(maxAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); + assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0)); + + tagsAggResult = stringsAggResultSubAggregations.get("tags"); + assertThat(tagsAggResult, notNullValue()); + assertThat(tagsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); + assertThat(tagsAggResult.getLuceneDescription(), equalTo("tags")); + assertThat(tagsAggResult.getTime(), greaterThan(0L)); + tagsBreakdown = tagsAggResult.getTimeBreakdown(); + assertThat(tagsBreakdown, notNullValue()); + assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(tagsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(tagsBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(tagsBreakdown.get(REDUCE), equalTo(0L)); + assertRemapTermsDebugInfo(tagsAggResult); + assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2)); + + tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren() + .stream() + .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); + + avgAggResult = tagsAggResultSubAggregations.get("avg"); + assertThat(avgAggResult, notNullValue()); + assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); + assertThat(avgAggResult.getTime(), greaterThan(0L)); + avgBreakdown = avgAggResult.getTimeBreakdown(); + assertThat(avgBreakdown, notNullValue()); + assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(avgBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(avgBreakdown.get(REDUCE), equalTo(0L)); + assertMap(avgAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); + assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); + + maxAggResult = tagsAggResultSubAggregations.get("max"); + assertThat(maxAggResult, notNullValue()); + assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); + assertThat(maxAggResult.getTime(), greaterThan(0L)); + maxBreakdown = maxAggResult.getTimeBreakdown(); + assertThat(maxBreakdown, notNullValue()); + assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); + assertThat(maxBreakdown.get(COLLECT), greaterThan(0L)); + assertThat(maxBreakdown.get(POST_COLLECTION), greaterThan(0L)); + assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); + assertThat(maxBreakdown.get(REDUCE), equalTo(0L)); + assertMap(maxAggResult.getDebugInfo(), matchesMap().entry(BUILT_BUCKETS, greaterThan(0))); + assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0)); + } + } + ); } public void testNoProfile() { - SearchResponse response = prepareSearch("idx").setProfile(false) - .addAggregation( - histogram("histo").field(NUMBER_FIELD) - .interval(1L) - .subAggregation( - terms("tags").field(TAG_FIELD) - .subAggregation(avg("avg").field(NUMBER_FIELD)) - .subAggregation(max("max").field(NUMBER_FIELD)) - ) - .subAggregation( - terms("strings").field(STRING_FIELD) - .subAggregation(avg("avg").field(NUMBER_FIELD)) - .subAggregation(max("max").field(NUMBER_FIELD)) - .subAggregation( - terms("tags").field(TAG_FIELD) - .subAggregation(avg("avg").field(NUMBER_FIELD)) - .subAggregation(max("max").field(NUMBER_FIELD)) - ) - ) - ) - .get(); - assertNoFailures(response); - Map profileResults = response.getProfileResults(); - assertThat(profileResults, notNullValue()); - assertThat(profileResults.size(), equalTo(0)); + assertNoFailuresAndResponse( + prepareSearch("idx").setProfile(false) + .addAggregation( + histogram("histo").field(NUMBER_FIELD) + .interval(1L) + .subAggregation( + terms("tags").field(TAG_FIELD) + .subAggregation(avg("avg").field(NUMBER_FIELD)) + .subAggregation(max("max").field(NUMBER_FIELD)) + ) + .subAggregation( + terms("strings").field(STRING_FIELD) + .subAggregation(avg("avg").field(NUMBER_FIELD)) + .subAggregation(max("max").field(NUMBER_FIELD)) + .subAggregation( + terms("tags").field(TAG_FIELD) + .subAggregation(avg("avg").field(NUMBER_FIELD)) + .subAggregation(max("max").field(NUMBER_FIELD)) + ) + ) + ), + response -> { + Map profileResults = response.getProfileResults(); + assertThat(profileResults, notNullValue()); + assertThat(profileResults.size(), equalTo(0)); + } + ); } /** @@ -634,62 +647,66 @@ public void testFilterByFilter() throws InterruptedException, IOException { } indexRandom(true, false, builders); - SearchResponse response = prepareSearch("dateidx").setProfile(true) - .addAggregation( - new DateHistogramAggregationBuilder("histo").field("date") - .calendarInterval(DateHistogramInterval.MONTH) - // Add a sub-agg so we don't get to use metadata. That's great and all, but it outputs less debugging info for us to - // verify. - .subAggregation(new MaxAggregationBuilder("m").field("date")) - ) - .get(); - assertNoFailures(response); - Map profileResults = response.getProfileResults(); - assertThat(profileResults, notNullValue()); - assertThat(profileResults.size(), equalTo(getNumShards("dateidx").numPrimaries)); - for (SearchProfileShardResult profileShardResult : profileResults.values()) { - assertThat(profileShardResult, notNullValue()); - AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); - assertThat(aggProfileResults, notNullValue()); - List aggProfileResultsList = aggProfileResults.getProfileResults(); - assertThat(aggProfileResultsList, notNullValue()); - assertThat(aggProfileResultsList.size(), equalTo(1)); - ProfileResult histoAggResult = aggProfileResultsList.get(0); - assertThat(histoAggResult, notNullValue()); - assertThat(histoAggResult.getQueryName(), equalTo("DateHistogramAggregator.FromDateRange")); - assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); - assertThat(histoAggResult.getProfiledChildren().size(), equalTo(1)); - assertThat(histoAggResult.getTime(), greaterThan(0L)); - Map breakdown = histoAggResult.getTimeBreakdown(); - assertThat(breakdown, notNullValue()); - assertThat(breakdown.keySet(), equalTo(BREAKDOWN_KEYS)); - assertThat(breakdown.get(INITIALIZE), greaterThan(0L)); - assertThat(breakdown.get(COLLECT), equalTo(0L)); - assertThat(breakdown.get(BUILD_AGGREGATION).longValue(), greaterThan(0L)); - assertThat(breakdown.get(REDUCE), equalTo(0L)); - assertMap( - histoAggResult.getDebugInfo(), - matchesMap().entry(BUILT_BUCKETS, greaterThan(0)) - .entry("delegate", "RangeAggregator.FromFilters") - .entry( - "delegate_debug", - matchesMap().entry("average_docs_per_range", equalTo(RangeAggregator.DOCS_PER_RANGE_TO_USE_FILTERS * 2)) - .entry("ranges", 1) - .entry("delegate", "FilterByFilterAggregator") + assertNoFailuresAndResponse( + prepareSearch("dateidx").setProfile(true) + .addAggregation( + new DateHistogramAggregationBuilder("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + // Add a sub-agg so we don't get to use metadata. That's great and all, but it outputs less debugging info for us to + // verify. + .subAggregation(new MaxAggregationBuilder("m").field("date")) + ), + response -> { + Map profileResults = response.getProfileResults(); + assertThat(profileResults, notNullValue()); + assertThat(profileResults.size(), equalTo(getNumShards("dateidx").numPrimaries)); + for (SearchProfileShardResult profileShardResult : profileResults.values()) { + assertThat(profileShardResult, notNullValue()); + AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); + assertThat(aggProfileResults, notNullValue()); + List aggProfileResultsList = aggProfileResults.getProfileResults(); + assertThat(aggProfileResultsList, notNullValue()); + assertThat(aggProfileResultsList.size(), equalTo(1)); + ProfileResult histoAggResult = aggProfileResultsList.get(0); + assertThat(histoAggResult, notNullValue()); + assertThat(histoAggResult.getQueryName(), equalTo("DateHistogramAggregator.FromDateRange")); + assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); + assertThat(histoAggResult.getProfiledChildren().size(), equalTo(1)); + assertThat(histoAggResult.getTime(), greaterThan(0L)); + Map breakdown = histoAggResult.getTimeBreakdown(); + assertThat(breakdown, notNullValue()); + assertThat(breakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + assertThat(breakdown.get(INITIALIZE), greaterThan(0L)); + assertThat(breakdown.get(COLLECT), equalTo(0L)); + assertThat(breakdown.get(BUILD_AGGREGATION).longValue(), greaterThan(0L)); + assertThat(breakdown.get(REDUCE), equalTo(0L)); + assertMap( + histoAggResult.getDebugInfo(), + matchesMap().entry(BUILT_BUCKETS, greaterThan(0)) + .entry("delegate", "RangeAggregator.FromFilters") .entry( "delegate_debug", - matchesMap().entry("segments_with_deleted_docs", greaterThanOrEqualTo(0)) - .entry("segments_with_doc_count_field", 0) - .entry("segments_counted", 0) - .entry("segments_collected", greaterThan(0)) + matchesMap().entry("average_docs_per_range", equalTo(RangeAggregator.DOCS_PER_RANGE_TO_USE_FILTERS * 2)) + .entry("ranges", 1) + .entry("delegate", "FilterByFilterAggregator") .entry( - "filters", - matchesList().item(matchesMap().entry("query", "*:*").entry("segments_counted_in_constant_time", 0)) + "delegate_debug", + matchesMap().entry("segments_with_deleted_docs", greaterThanOrEqualTo(0)) + .entry("segments_with_doc_count_field", 0) + .entry("segments_counted", 0) + .entry("segments_collected", greaterThan(0)) + .entry( + "filters", + matchesList().item( + matchesMap().entry("query", "*:*").entry("segments_counted_in_constant_time", 0) + ) + ) ) ) - ) - ); - } + ); + } + } + ); } public void testDateHistogramFilterByFilterDisabled() throws InterruptedException, IOException { @@ -710,56 +727,60 @@ public void testDateHistogramFilterByFilterDisabled() throws InterruptedExceptio } indexRandom(true, false, builders); - SearchResponse response = prepareSearch("date_filter_by_filter_disabled").setProfile(true) - .addAggregation(new DateHistogramAggregationBuilder("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) - .get(); - assertNoFailures(response); - Map profileResults = response.getProfileResults(); - assertThat(profileResults, notNullValue()); - assertThat(profileResults.size(), equalTo(getNumShards("date_filter_by_filter_disabled").numPrimaries)); - for (SearchProfileShardResult profileShardResult : profileResults.values()) { - assertThat(profileShardResult, notNullValue()); - AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); - assertThat(aggProfileResults, notNullValue()); - List aggProfileResultsList = aggProfileResults.getProfileResults(); - assertThat(aggProfileResultsList, notNullValue()); - assertThat(aggProfileResultsList.size(), equalTo(1)); - ProfileResult histoAggResult = aggProfileResultsList.get(0); - assertThat(histoAggResult, notNullValue()); - assertThat(histoAggResult.getQueryName(), equalTo("DateHistogramAggregator.FromDateRange")); - assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); - assertThat(histoAggResult.getProfiledChildren().size(), equalTo(0)); - assertThat(histoAggResult.getTime(), greaterThan(0L)); - Map breakdown = histoAggResult.getTimeBreakdown(); - assertMap( - breakdown, - matchesMap().entry(INITIALIZE, greaterThan(0L)) - .entry(INITIALIZE + "_count", greaterThan(0L)) - .entry(BUILD_LEAF_COLLECTOR, greaterThan(0L)) - .entry(BUILD_LEAF_COLLECTOR + "_count", greaterThan(0L)) - .entry(COLLECT, greaterThan(0L)) - .entry(COLLECT + "_count", greaterThan(0L)) - .entry(POST_COLLECTION, greaterThan(0L)) - .entry(POST_COLLECTION + "_count", 1L) - .entry(BUILD_AGGREGATION, greaterThan(0L)) - .entry(BUILD_AGGREGATION + "_count", greaterThan(0L)) - .entry(REDUCE, 0L) - .entry(REDUCE + "_count", 0L) - ); - Map debug = histoAggResult.getDebugInfo(); - assertMap( - debug, - matchesMap().entry("delegate", "RangeAggregator.NoOverlap") - .entry("built_buckets", 1) - .entry( - "delegate_debug", - matchesMap().entry("ranges", 1) - .entry("average_docs_per_range", 10000.0) - .entry("singletons", greaterThan(0)) - .entry("non-singletons", 0) - ) - ); - } + assertNoFailuresAndResponse( + prepareSearch("date_filter_by_filter_disabled").setProfile(true) + .addAggregation( + new DateHistogramAggregationBuilder("histo").field("date").calendarInterval(DateHistogramInterval.MONTH) + ), + response -> { + Map profileResults = response.getProfileResults(); + assertThat(profileResults, notNullValue()); + assertThat(profileResults.size(), equalTo(getNumShards("date_filter_by_filter_disabled").numPrimaries)); + for (SearchProfileShardResult profileShardResult : profileResults.values()) { + assertThat(profileShardResult, notNullValue()); + AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); + assertThat(aggProfileResults, notNullValue()); + List aggProfileResultsList = aggProfileResults.getProfileResults(); + assertThat(aggProfileResultsList, notNullValue()); + assertThat(aggProfileResultsList.size(), equalTo(1)); + ProfileResult histoAggResult = aggProfileResultsList.get(0); + assertThat(histoAggResult, notNullValue()); + assertThat(histoAggResult.getQueryName(), equalTo("DateHistogramAggregator.FromDateRange")); + assertThat(histoAggResult.getLuceneDescription(), equalTo("histo")); + assertThat(histoAggResult.getProfiledChildren().size(), equalTo(0)); + assertThat(histoAggResult.getTime(), greaterThan(0L)); + Map breakdown = histoAggResult.getTimeBreakdown(); + assertMap( + breakdown, + matchesMap().entry(INITIALIZE, greaterThan(0L)) + .entry(INITIALIZE + "_count", greaterThan(0L)) + .entry(BUILD_LEAF_COLLECTOR, greaterThan(0L)) + .entry(BUILD_LEAF_COLLECTOR + "_count", greaterThan(0L)) + .entry(COLLECT, greaterThan(0L)) + .entry(COLLECT + "_count", greaterThan(0L)) + .entry(POST_COLLECTION, greaterThan(0L)) + .entry(POST_COLLECTION + "_count", 1L) + .entry(BUILD_AGGREGATION, greaterThan(0L)) + .entry(BUILD_AGGREGATION + "_count", greaterThan(0L)) + .entry(REDUCE, 0L) + .entry(REDUCE + "_count", 0L) + ); + Map debug = histoAggResult.getDebugInfo(); + assertMap( + debug, + matchesMap().entry("delegate", "RangeAggregator.NoOverlap") + .entry("built_buckets", 1) + .entry( + "delegate_debug", + matchesMap().entry("ranges", 1) + .entry("average_docs_per_range", 10000.0) + .entry("singletons", greaterThan(0)) + .entry("non-singletons", 0) + ) + ); + } + } + ); } finally { updateClusterSettings(Settings.builder().putNull(SearchService.ENABLE_REWRITE_AGGS_TO_FILTER_BY_FILTER.getKey())); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java index f7b2b0f4443d3..0bc23d9bd331b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java @@ -10,7 +10,6 @@ import org.apache.lucene.tests.util.English; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.profile.ProfileResult; @@ -28,6 +27,7 @@ import static org.elasticsearch.search.profile.query.RandomQueryGenerator.randomQueryBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -67,53 +67,55 @@ public void testProfileDfs() throws Exception { for (int i = 0; i < iters; i++) { QueryBuilder q = randomQueryBuilder(List.of(textField), List.of(numericField), numDocs, 3); logger.info("Query: {}", q); - SearchResponse resp = prepareSearch().setQuery(q) - .setTrackTotalHits(true) - .setProfile(true) - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setKnnSearch( - randomList( - 2, - 5, - () -> new KnnSearchBuilder( - vectorField, - new float[] { randomFloat(), randomFloat(), randomFloat() }, - randomIntBetween(5, 10), - 50, - randomBoolean() ? null : randomFloat() + assertResponse( + prepareSearch().setQuery(q) + .setTrackTotalHits(true) + .setProfile(true) + .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setKnnSearch( + randomList( + 2, + 5, + () -> new KnnSearchBuilder( + vectorField, + new float[] { randomFloat(), randomFloat(), randomFloat() }, + randomIntBetween(5, 10), + 50, + randomBoolean() ? null : randomFloat() + ) ) - ) - ) - .get(); - - assertNotNull("Profile response element should not be null", resp.getProfileResults()); - assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shard : resp.getProfileResults().entrySet()) { - for (QueryProfileShardResult searchProfiles : shard.getValue().getQueryProfileResults()) { - for (ProfileResult result : searchProfiles.getQueryResults()) { - assertNotNull(result.getQueryName()); - assertNotNull(result.getLuceneDescription()); - assertThat(result.getTime(), greaterThan(0L)); - } - CollectorResult result = searchProfiles.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); - } - SearchProfileDfsPhaseResult searchProfileDfsPhaseResult = shard.getValue().getSearchProfileDfsPhaseResult(); - assertThat(searchProfileDfsPhaseResult, is(notNullValue())); - for (QueryProfileShardResult queryProfileShardResult : searchProfileDfsPhaseResult.getQueryProfileShardResult()) { - for (ProfileResult result : queryProfileShardResult.getQueryResults()) { - assertNotNull(result.getQueryName()); - assertNotNull(result.getLuceneDescription()); - assertThat(result.getTime(), greaterThan(0L)); + ), + response -> { + assertNotNull("Profile response element should not be null", response.getProfileResults()); + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); + for (Map.Entry shard : response.getProfileResults().entrySet()) { + for (QueryProfileShardResult searchProfiles : shard.getValue().getQueryProfileResults()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + } + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); + assertThat(result.getTime(), greaterThan(0L)); + } + SearchProfileDfsPhaseResult searchProfileDfsPhaseResult = shard.getValue().getSearchProfileDfsPhaseResult(); + assertThat(searchProfileDfsPhaseResult, is(notNullValue())); + for (QueryProfileShardResult queryProfileShardResult : searchProfileDfsPhaseResult.getQueryProfileShardResult()) { + for (ProfileResult result : queryProfileShardResult.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + } + CollectorResult result = queryProfileShardResult.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); + assertThat(result.getTime(), greaterThan(0L)); + } + ProfileResult statsResult = searchProfileDfsPhaseResult.getDfsShardResult(); + assertThat(statsResult.getQueryName(), equalTo("statistics")); } - CollectorResult result = queryProfileShardResult.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); } - ProfileResult statsResult = searchProfileDfsPhaseResult.getDfsShardResult(); - assertThat(statsResult.getQueryName(), equalTo("statistics")); - } + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index e7b02faede9b1..9aa5a85dba973 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -30,6 +30,7 @@ import java.util.Set; import static org.elasticsearch.search.profile.query.RandomQueryGenerator.randomQueryBuilder; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -62,29 +63,26 @@ public void testProfileQuery() throws Exception { for (int i = 0; i < iters; i++) { QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); logger.info("Query: {}", q); - - SearchResponse resp = prepareSearch().setQuery(q) - .setTrackTotalHits(true) - .setProfile(true) - .setSearchType(SearchType.QUERY_THEN_FETCH) - .get(); - - assertNotNull("Profile response element should not be null", resp.getProfileResults()); - assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shard : resp.getProfileResults().entrySet()) { - for (QueryProfileShardResult searchProfiles : shard.getValue().getQueryProfileResults()) { - for (ProfileResult result : searchProfiles.getQueryResults()) { - assertNotNull(result.getQueryName()); - assertNotNull(result.getLuceneDescription()); - assertThat(result.getTime(), greaterThan(0L)); + assertResponse( + prepareSearch().setQuery(q).setTrackTotalHits(true).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH), + response -> { + assertNotNull("Profile response element should not be null", response.getProfileResults()); + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); + for (Map.Entry shard : response.getProfileResults().entrySet()) { + for (QueryProfileShardResult searchProfiles : shard.getValue().getQueryProfileResults()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); + assertThat(result.getTime(), greaterThan(0L)); + } } - - CollectorResult result = searchProfiles.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); } - } - + ); } } @@ -185,26 +183,26 @@ public void testSimpleMatch() throws Exception { QueryBuilder q = QueryBuilders.matchQuery("field1", "one"); - SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + assertResponse(prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH), response -> { + Map p = response.getProfileResults(); + assertNotNull(p); + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); - Map p = resp.getProfileResults(); - assertNotNull(p); - assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + for (Map.Entry shardResult : response.getProfileResults().entrySet()) { + for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertEquals(result.getQueryName(), "TermQuery"); + assertEquals(result.getLuceneDescription(), "field1:one"); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { - for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { - for (ProfileResult result : searchProfiles.getQueryResults()) { - assertEquals(result.getQueryName(), "TermQuery"); - assertEquals(result.getLuceneDescription(), "field1:one"); + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); assertThat(result.getTime(), greaterThan(0L)); - assertNotNull(result.getTimeBreakdown()); } - - CollectorResult result = searchProfiles.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); } - } + }); } /** @@ -226,45 +224,44 @@ public void testBool() throws Exception { .must(QueryBuilders.matchQuery("field1", "one")) .must(QueryBuilders.matchQuery("field1", "two")); - SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + assertResponse(prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH), response -> { + Map p = response.getProfileResults(); + assertNotNull(p); + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); - Map p = resp.getProfileResults(); - assertNotNull(p); - assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + for (Map.Entry shardResult : response.getProfileResults().entrySet()) { + for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertEquals(result.getQueryName(), "BooleanQuery"); + assertEquals(result.getLuceneDescription(), "+field1:one +field1:two"); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + assertEquals(result.getProfiledChildren().size(), 2); + + // Check the children + List children = result.getProfiledChildren(); + assertEquals(children.size(), 2); + + ProfileResult childProfile = children.get(0); + assertEquals(childProfile.getQueryName(), "TermQuery"); + assertEquals(childProfile.getLuceneDescription(), "field1:one"); + assertThat(childProfile.getTime(), greaterThan(0L)); + assertNotNull(childProfile.getTimeBreakdown()); + assertEquals(childProfile.getProfiledChildren().size(), 0); + + childProfile = children.get(1); + assertEquals(childProfile.getQueryName(), "TermQuery"); + assertEquals(childProfile.getLuceneDescription(), "field1:two"); + assertThat(childProfile.getTime(), greaterThan(0L)); + assertNotNull(childProfile.getTimeBreakdown()); + } - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { - for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { - for (ProfileResult result : searchProfiles.getQueryResults()) { - assertEquals(result.getQueryName(), "BooleanQuery"); - assertEquals(result.getLuceneDescription(), "+field1:one +field1:two"); + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); assertThat(result.getTime(), greaterThan(0L)); - assertNotNull(result.getTimeBreakdown()); - assertEquals(result.getProfiledChildren().size(), 2); - - // Check the children - List children = result.getProfiledChildren(); - assertEquals(children.size(), 2); - - ProfileResult childProfile = children.get(0); - assertEquals(childProfile.getQueryName(), "TermQuery"); - assertEquals(childProfile.getLuceneDescription(), "field1:one"); - assertThat(childProfile.getTime(), greaterThan(0L)); - assertNotNull(childProfile.getTimeBreakdown()); - assertEquals(childProfile.getProfiledChildren().size(), 0); - - childProfile = children.get(1); - assertEquals(childProfile.getQueryName(), "TermQuery"); - assertEquals(childProfile.getLuceneDescription(), "field1:two"); - assertThat(childProfile.getTime(), greaterThan(0L)); - assertNotNull(childProfile.getTimeBreakdown()); } - - CollectorResult result = searchProfiles.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); } - } - + }); } /** @@ -287,25 +284,25 @@ public void testEmptyBool() throws Exception { QueryBuilder q = QueryBuilders.boolQuery(); logger.info("Query: {}", q); - SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + assertResponse(prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH), response -> { + assertNotNull("Profile response element should not be null", response.getProfileResults()); + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); - assertNotNull("Profile response element should not be null", resp.getProfileResults()); - assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + for (Map.Entry shardResult : response.getProfileResults().entrySet()) { + for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { - for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { - for (ProfileResult result : searchProfiles.getQueryResults()) { - assertNotNull(result.getQueryName()); - assertNotNull(result.getLuceneDescription()); + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); assertThat(result.getTime(), greaterThan(0L)); - assertNotNull(result.getTimeBreakdown()); } - - CollectorResult result = searchProfiles.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); } - } + }); } /** @@ -332,25 +329,25 @@ public void testCollapsingBool() throws Exception { logger.info("Query: {}", q); - SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + assertResponse(prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH), response -> { + assertNotNull("Profile response element should not be null", response.getProfileResults()); + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); - assertNotNull("Profile response element should not be null", resp.getProfileResults()); - assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + for (Map.Entry shardResult : response.getProfileResults().entrySet()) { + for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { - for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { - for (ProfileResult result : searchProfiles.getQueryResults()) { - assertNotNull(result.getQueryName()); - assertNotNull(result.getLuceneDescription()); + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); assertThat(result.getTime(), greaterThan(0L)); - assertNotNull(result.getTimeBreakdown()); } - - CollectorResult result = searchProfiles.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); } - } + }); } public void testBoosting() throws Exception { @@ -372,25 +369,25 @@ public void testBoosting() throws Exception { .negativeBoost(randomFloat()); logger.info("Query: {}", q); - SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + assertResponse(prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH), response -> { + assertNotNull("Profile response element should not be null", response.getProfileResults()); + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); - assertNotNull("Profile response element should not be null", resp.getProfileResults()); - assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + for (Map.Entry shardResult : response.getProfileResults().entrySet()) { + for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { - for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { - for (ProfileResult result : searchProfiles.getQueryResults()) { - assertNotNull(result.getQueryName()); - assertNotNull(result.getLuceneDescription()); + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); assertThat(result.getTime(), greaterThan(0L)); - assertNotNull(result.getTimeBreakdown()); } - - CollectorResult result = searchProfiles.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); } - } + }); } public void testDisMaxRange() throws Exception { @@ -412,25 +409,25 @@ public void testDisMaxRange() throws Exception { .add(QueryBuilders.rangeQuery("field2").from(null).to(73).includeLower(true).includeUpper(true)); logger.info("Query: {}", q); - SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + assertResponse(prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH), response -> { + assertNotNull("Profile response element should not be null", response.getProfileResults()); + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); - assertNotNull("Profile response element should not be null", resp.getProfileResults()); - assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + for (Map.Entry shardResult : response.getProfileResults().entrySet()) { + for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { - for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { - for (ProfileResult result : searchProfiles.getQueryResults()) { - assertNotNull(result.getQueryName()); - assertNotNull(result.getLuceneDescription()); + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); assertThat(result.getTime(), greaterThan(0L)); - assertNotNull(result.getTimeBreakdown()); } - - CollectorResult result = searchProfiles.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); } - } + }); } public void testRange() throws Exception { @@ -451,25 +448,25 @@ public void testRange() throws Exception { logger.info("Query: {}", q.toString()); - SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + assertResponse(prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH), response -> { + assertNotNull("Profile response element should not be null", response.getProfileResults()); + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); - assertNotNull("Profile response element should not be null", resp.getProfileResults()); - assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + for (Map.Entry shardResult : response.getProfileResults().entrySet()) { + for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { - for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { - for (ProfileResult result : searchProfiles.getQueryResults()) { - assertNotNull(result.getQueryName()); - assertNotNull(result.getLuceneDescription()); + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); assertThat(result.getTime(), greaterThan(0L)); - assertNotNull(result.getTimeBreakdown()); } - - CollectorResult result = searchProfiles.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); } - } + }); } public void testPhrase() throws Exception { @@ -492,36 +489,35 @@ public void testPhrase() throws Exception { logger.info("Query: {}", q); - SearchResponse resp = prepareSearch().setQuery(q) - .setIndices("test") - .setProfile(true) - .setSearchType(SearchType.QUERY_THEN_FETCH) - .get(); + assertResponse( + prepareSearch().setQuery(q).setIndices("test").setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH), + response -> { + if (response.getShardFailures().length > 0) { + for (ShardSearchFailure f : response.getShardFailures()) { + logger.error("Shard search failure: {}", f); + } + fail(); + } - if (resp.getShardFailures().length > 0) { - for (ShardSearchFailure f : resp.getShardFailures()) { - logger.error("Shard search failure: {}", f); - } - fail(); - } + assertNotNull("Profile response element should not be null", response.getProfileResults()); + assertThat("Profile response should not be an empty array", response.getProfileResults().size(), not(0)); - assertNotNull("Profile response element should not be null", resp.getProfileResults()); - assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + for (Map.Entry shardResult : response.getProfileResults().entrySet()) { + for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { - for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { - for (ProfileResult result : searchProfiles.getQueryResults()) { - assertNotNull(result.getQueryName()); - assertNotNull(result.getLuceneDescription()); - assertThat(result.getTime(), greaterThan(0L)); - assertNotNull(result.getTimeBreakdown()); + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), is(not(emptyOrNullString()))); + assertThat(result.getTime(), greaterThan(0L)); + } } - - CollectorResult result = searchProfiles.getCollectorResult(); - assertThat(result.getName(), is(not(emptyOrNullString()))); - assertThat(result.getTime(), greaterThan(0L)); } - } + ); } /** @@ -543,8 +539,9 @@ public void testNoProfile() throws Exception { logger.info("Query: {}", q); - SearchResponse resp = prepareSearch().setQuery(q).setProfile(false).get(); - assertThat("Profile response element should be an empty map", resp.getProfileResults().size(), equalTo(0)); + assertResponse( + prepareSearch().setQuery(q).setProfile(false), + response -> assertThat("Profile response element should be an empty map", response.getProfileResults().size(), equalTo(0)) + ); } - } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java index 099100a7a67e3..846696c81e288 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; @@ -30,7 +29,9 @@ import static java.util.Collections.singletonMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; public class ExistsIT extends ESIntegTestCase { @@ -113,46 +114,46 @@ public void testExists() throws Exception { expected.put("vec", 2); final long numDocs = sources.length; - SearchResponse allDocs = prepareSearch("idx").setSize(sources.length).get(); - assertNoFailures(allDocs); - assertHitCount(allDocs, numDocs); - for (Map.Entry entry : expected.entrySet()) { - final String fieldName = entry.getKey(); - final int count = entry.getValue(); - // exists - SearchResponse resp = prepareSearch("idx").setQuery(QueryBuilders.existsQuery(fieldName)).get(); - assertNoFailures(resp); - try { - assertEquals( - String.format( - Locale.ROOT, - "exists(%s, %d) mapping: %s response: %s", - fieldName, - count, - Strings.toString(mapping), - resp - ), - count, - resp.getHits().getTotalHits().value - ); - } catch (AssertionError e) { - for (SearchHit searchHit : allDocs.getHits()) { - final String index = searchHit.getIndex(); - final String id = searchHit.getId(); - final ExplainResponse explanation = client().prepareExplain(index, id) - .setQuery(QueryBuilders.existsQuery(fieldName)) - .get(); - logger.info( - "Explanation for [{}] / [{}] / [{}]: [{}]", - fieldName, - id, - searchHit.getSourceAsString(), - explanation.getExplanation() - ); - } - throw e; + assertNoFailuresAndResponse(prepareSearch("idx").setSize(sources.length), allDocs -> { + assertHitCount(allDocs, numDocs); + for (Map.Entry entry : expected.entrySet()) { + final String fieldName = entry.getKey(); + final int count = entry.getValue(); + // exists + assertNoFailuresAndResponse(prepareSearch("idx").setQuery(QueryBuilders.existsQuery(fieldName)), response -> { + try { + assertEquals( + String.format( + Locale.ROOT, + "exists(%s, %d) mapping: %s response: %s", + fieldName, + count, + Strings.toString(mapping), + response + ), + count, + response.getHits().getTotalHits().value + ); + } catch (AssertionError e) { + for (SearchHit searchHit : allDocs.getHits()) { + final String index = searchHit.getIndex(); + final String id = searchHit.getId(); + final ExplainResponse explanation = client().prepareExplain(index, id) + .setQuery(QueryBuilders.existsQuery(fieldName)) + .get(); + logger.info( + "Explanation for [{}] / [{}] / [{}]: [{}]", + fieldName, + id, + searchHit.getSourceAsString(), + explanation.getExplanation() + ); + } + throw e; + } + }); } - } + }); } public void testFieldAlias() throws Exception { @@ -198,10 +199,7 @@ public void testFieldAlias() throws Exception { for (Map.Entry entry : expected.entrySet()) { String fieldName = entry.getKey(); int expectedCount = entry.getValue(); - - SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.existsQuery(fieldName)).get(); - assertNoFailures(response); - assertHitCount(response, expectedCount); + assertHitCountAndNoFailures(prepareSearch("idx").setQuery(QueryBuilders.existsQuery(fieldName)), expectedCount); } } @@ -231,8 +229,6 @@ public void testFieldAliasWithNoDocValues() throws Exception { indexRequests.add(client().prepareIndex("idx").setSource("foo", 43)); indexRandom(true, false, indexRequests); - SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.existsQuery("foo-alias")).get(); - assertNoFailures(response); - assertHitCount(response, 2); + assertHitCountAndNoFailures(prepareSearch("idx").setQuery(QueryBuilders.existsQuery("foo-alias")), 2L); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/IntervalQueriesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/IntervalQueriesIT.java index 1e18c0ca3c59c..fd119d3145353 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/IntervalQueriesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/IntervalQueriesIT.java @@ -12,7 +12,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.query.IntervalQueryBuilder; @@ -30,6 +29,7 @@ import static java.util.Collections.singletonMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; public class IntervalQueriesIT extends ESIntegTestCase { @@ -56,10 +56,11 @@ public void testEmptyIntervalsWithNestedMappings() throws InterruptedException { client().prepareIndex("nested").setId("3").setSource("text", "quick") ); - SearchResponse resp = prepareSearch("nested").setQuery( - new IntervalQueryBuilder("empty_text", new IntervalsSourceProvider.Match("an empty query", 0, true, null, null, null)) - ).get(); - assertEquals(0, resp.getFailedShards()); + assertNoFailures( + prepareSearch("nested").setQuery( + new IntervalQueryBuilder("empty_text", new IntervalsSourceProvider.Match("an empty query", 0, true, null, null, null)) + ) + ); } private static class EmptyAnalyzer extends Analyzer { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index f251ab5cb6269..26c2da7736f6c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; @@ -53,6 +54,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; @@ -267,72 +270,91 @@ private XContentBuilder createMapping() throws IOException { public void testDefaults() throws ExecutionException, InterruptedException { MatchQueryParser.Type type = MatchQueryParser.Type.BOOLEAN; - SearchResponse searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").operator(Operator.OR) - ) - ).get(); - Set topNIds = Sets.newHashSet("theone", "theother"); - for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { - topNIds.remove(searchResponse.getHits().getAt(i).getId()); - // very likely that we hit a random doc that has the same score so orders are random since - // the doc id is the tie-breaker - } - assertThat(topNIds, empty()); - assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").operator(Operator.OR) - .type(type) - ) - ).get(); - assertFirstHit(searchResponse, anyOf(hasId("theone"), hasId("theother"))); - assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category").operator(Operator.OR).type(type) - ) - ).get(); - assertFirstHit(searchResponse, hasId("theother")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").operator(Operator.AND).type(type) - ) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").operator(Operator.AND).type(type) - ) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").operator(Operator.OR) + ) + ), + response -> { + Set topNIds = Sets.newHashSet("theone", "theother"); + for (int i = 0; i < response.getHits().getHits().length; i++) { + topNIds.remove(response.getHits().getAt(i).getId()); + // very likely that we hit a random doc that has the same score so orders are random since + // the doc id is the tie-breaker + } + assertThat(topNIds, empty()); + assertThat(response.getHits().getHits()[0].getScore(), greaterThan(response.getHits().getHits()[1].getScore())); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").operator(Operator.OR) + .type(type) + ) + ), + response -> { + assertFirstHit(response, anyOf(hasId("theone"), hasId("theother"))); + assertThat(response.getHits().getHits()[0].getScore(), greaterThan(response.getHits().getHits()[1].getScore())); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category").operator(Operator.OR).type(type) + ) + ), + response -> assertFirstHit(response, hasId("theother")) + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").operator(Operator.AND).type(type) + ) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").operator(Operator.AND).type(type) + ) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); } public void testPhraseType() { - SearchResponse searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("Man the Ultimate", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase") - .operator(Operator.OR) - .type(MatchQueryParser.Type.PHRASE) - ) - ).get(); - assertFirstHit(searchResponse, hasId("ultimate2")); - assertHitCount(searchResponse, 1L); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("Captain", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase").operator( - Operator.OR - ).type(MatchQueryParser.Type.PHRASE) - ) - ).get(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(1L)); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("Man the Ultimate", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase") + .operator(Operator.OR) + .type(MatchQueryParser.Type.PHRASE) + ) + ), + response -> { + assertFirstHit(response, hasId("ultimate2")); + assertHitCount(response, 1L); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("Captain", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase").operator( + Operator.OR + ).type(MatchQueryParser.Type.PHRASE) + ) + ), + response -> assertThat(response.getHits().getTotalHits().value, greaterThan(1L)) + ); assertSearchHitsWithoutFailures( prepareSearch("test").setQuery( @@ -348,14 +370,15 @@ public void testPhraseType() { } public void testSingleField() throws NoSuchFieldException, IllegalAccessException { - SearchResponse searchResponse = prepareSearch("test").setQuery(randomizeType(multiMatchQuery("15", "skill"))).get(); - assertNoFailures(searchResponse); - assertFirstHit(searchResponse, hasId("theone")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(randomizeType(multiMatchQuery("15", "skill"))), + response -> assertFirstHit(response, hasId("theone")) + ); - searchResponse = prepareSearch("test").setQuery(randomizeType(multiMatchQuery("15", "skill", "int-field")).analyzer("category")) - .get(); - assertNoFailures(searchResponse); - assertFirstHit(searchResponse, hasId("theone")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(randomizeType(multiMatchQuery("15", "skill", "int-field")).analyzer("category")), + response -> assertFirstHit(response, hasId("theone")) + ); String[] fields = { "full_name", @@ -393,34 +416,39 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio builder.append(RandomPicks.randomFrom(random(), query)).append(" "); } MultiMatchQueryBuilder multiMatchQueryBuilder = randomizeType(multiMatchQuery(builder.toString(), field)); - SearchResponse multiMatchResp = prepareSearch("test") - // id sort field is a tie, in case hits have the same score, - // the hits will be sorted the same consistently - .addSort("_score", SortOrder.DESC) - .addSort("id", SortOrder.ASC) - .setQuery(multiMatchQueryBuilder) - .get(); - MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(field, builder.toString()); - - SearchResponse matchResp = prepareSearch("test") - // id tie sort - .addSort("_score", SortOrder.DESC) - .addSort("id", SortOrder.ASC) - .setQuery(matchQueryBuilder) - .get(); - assertThat( - "field: " + field + " query: " + builder.toString(), - multiMatchResp.getHits().getTotalHits().value, - equalTo(matchResp.getHits().getTotalHits().value) + assertResponse( + prepareSearch("test") + // id sort field is a tie, in case hits have the same score, + // the hits will be sorted the same consistently + .addSort("_score", SortOrder.DESC) + .addSort("id", SortOrder.ASC) + .setQuery(multiMatchQueryBuilder), + multiMatchResp -> { + MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(field, builder.toString()); + assertResponse( + prepareSearch("test") + // id tie sort + .addSort("_score", SortOrder.DESC) + .addSort("id", SortOrder.ASC) + .setQuery(matchQueryBuilder), + matchResp -> { + assertThat( + "field: " + field + " query: " + builder.toString(), + multiMatchResp.getHits().getTotalHits().value, + equalTo(matchResp.getHits().getTotalHits().value) + ); + SearchHits hits = multiMatchResp.getHits(); + if (field.startsWith("missing")) { + assertEquals(0, hits.getHits().length); + } + for (int j = 0; j < hits.getHits().length; j++) { + assertThat(hits.getHits()[j].getScore(), equalTo(matchResp.getHits().getHits()[j].getScore())); + assertThat(hits.getHits()[j].getId(), equalTo(matchResp.getHits().getHits()[j].getId())); + } + } + ); + } ); - SearchHits hits = multiMatchResp.getHits(); - if (field.startsWith("missing")) { - assertEquals(0, hits.getHits().length); - } - for (int j = 0; j < hits.getHits().length; j++) { - assertThat(hits.getHits()[j].getScore(), equalTo(matchResp.getHits().getHits()[j].getScore())); - assertThat(hits.getHits()[j].getId(), equalTo(matchResp.getHits().getHits()[j].getId())); - } } } @@ -435,23 +463,24 @@ public void testEquivalence() { MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") : multiMatchQuery("marvel hero captain america", "*_name", randomBoolean() ? "category" : "categ*"); - SearchResponse left = prepareSearch("test").setSize(numDocs) - .addSort(SortBuilders.scoreSort()) - .addSort(SortBuilders.fieldSort("id")) - .setQuery(randomizeType(multiMatchQueryBuilder.operator(Operator.OR).type(type))) - .get(); - - SearchResponse right = prepareSearch("test").setSize(numDocs) - .addSort(SortBuilders.scoreSort()) - .addSort(SortBuilders.fieldSort("id")) - .setQuery( - disMaxQuery().add(matchQuery("full_name", "marvel hero captain america")) - .add(matchQuery("first_name", "marvel hero captain america")) - .add(matchQuery("last_name", "marvel hero captain america")) - .add(matchQuery("category", "marvel hero captain america")) + assertResponse( + prepareSearch("test").setSize(numDocs) + .addSort(SortBuilders.scoreSort()) + .addSort(SortBuilders.fieldSort("id")) + .setQuery(randomizeType(multiMatchQueryBuilder.operator(Operator.OR).type(type))), + left -> assertResponse( + prepareSearch("test").setSize(numDocs) + .addSort(SortBuilders.scoreSort()) + .addSort(SortBuilders.fieldSort("id")) + .setQuery( + disMaxQuery().add(matchQuery("full_name", "marvel hero captain america")) + .add(matchQuery("first_name", "marvel hero captain america")) + .add(matchQuery("last_name", "marvel hero captain america")) + .add(matchQuery("category", "marvel hero captain america")) + ), + right -> assertEquivalent("marvel hero captain america", left, right) ) - .get(); - assertEquivalent("marvel hero captain america", left, right); + ); } { @@ -461,64 +490,68 @@ public void testEquivalence() { MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") : multiMatchQuery("captain america", "*_name", randomBoolean() ? "category" : "categ*"); - SearchResponse left = prepareSearch("test").setSize(numDocs) - .addSort(SortBuilders.scoreSort()) - .addSort(SortBuilders.fieldSort("id")) - .setQuery( - randomizeType(multiMatchQueryBuilder.operator(op).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch).type(type)) - ) - .get(); - - SearchResponse right = prepareSearch("test").setSize(numDocs) - .addSort(SortBuilders.scoreSort()) - .addSort(SortBuilders.fieldSort("id")) - .setQuery( - boolQuery().minimumShouldMatch(minShouldMatch) - .should( - randomBoolean() - ? termQuery("full_name", "captain america") - : matchQuery("full_name", "captain america").operator(op) + assertResponse( + prepareSearch("test").setSize(numDocs) + .addSort(SortBuilders.scoreSort()) + .addSort(SortBuilders.fieldSort("id")) + .setQuery( + randomizeType( + multiMatchQueryBuilder.operator(op).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch).type(type) ) - .should(matchQuery("first_name", "captain america").operator(op)) - .should(matchQuery("last_name", "captain america").operator(op)) - .should(matchQuery("category", "captain america").operator(op)) + ), + left -> assertResponse( + prepareSearch("test").setSize(numDocs) + .addSort(SortBuilders.scoreSort()) + .addSort(SortBuilders.fieldSort("id")) + .setQuery( + boolQuery().minimumShouldMatch(minShouldMatch) + .should( + randomBoolean() + ? termQuery("full_name", "captain america") + : matchQuery("full_name", "captain america").operator(op) + ) + .should(matchQuery("first_name", "captain america").operator(op)) + .should(matchQuery("last_name", "captain america").operator(op)) + .should(matchQuery("category", "captain america").operator(op)) + ), + right -> assertEquivalent("captain america", left, right) ) - .get(); - assertEquivalent("captain america", left, right); + ); } { String minShouldMatch = randomBoolean() ? null : "" + between(0, 1); - SearchResponse left = prepareSearch("test").setSize(numDocs) - .addSort(SortBuilders.scoreSort()) - .addSort(SortBuilders.fieldSort("id")) - .setQuery( - randomizeType( - multiMatchQuery("capta", "full_name", "first_name", "last_name", "category").type( - MatchQueryParser.Type.PHRASE_PREFIX - ).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch) - ) - ) - .get(); - - SearchResponse right = prepareSearch("test").setSize(numDocs) - .addSort(SortBuilders.scoreSort()) - .addSort(SortBuilders.fieldSort("id")) - .setQuery( - boolQuery().minimumShouldMatch(minShouldMatch) - .should(matchPhrasePrefixQuery("full_name", "capta")) - .should(matchPhrasePrefixQuery("first_name", "capta")) - .should(matchPhrasePrefixQuery("last_name", "capta")) - .should(matchPhrasePrefixQuery("category", "capta")) + assertResponse( + prepareSearch("test").setSize(numDocs) + .addSort(SortBuilders.scoreSort()) + .addSort(SortBuilders.fieldSort("id")) + .setQuery( + randomizeType( + multiMatchQuery("capta", "full_name", "first_name", "last_name", "category").type( + MatchQueryParser.Type.PHRASE_PREFIX + ).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch) + ) + ), + left -> assertResponse( + prepareSearch("test").setSize(numDocs) + .addSort(SortBuilders.scoreSort()) + .addSort(SortBuilders.fieldSort("id")) + .setQuery( + boolQuery().minimumShouldMatch(minShouldMatch) + .should(matchPhrasePrefixQuery("full_name", "capta")) + .should(matchPhrasePrefixQuery("first_name", "capta")) + .should(matchPhrasePrefixQuery("last_name", "capta")) + .should(matchPhrasePrefixQuery("category", "capta")) + ), + right -> assertEquivalent("capta", left, right) ) - .get(); - assertEquivalent("capta", left, right); + ); } { String minShouldMatch = randomBoolean() ? null : "" + between(0, 1); - SearchResponse left; + SearchRequestBuilder leftSearch; if (randomBoolean()) { - left = prepareSearch("test").setSize(numDocs) + leftSearch = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery( @@ -527,10 +560,9 @@ public void testEquivalence() { MatchQueryParser.Type.PHRASE ).minimumShouldMatch(minShouldMatch) ) - ) - .get(); + ); } else { - left = prepareSearch("test").setSize(numDocs) + leftSearch = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery( @@ -539,163 +571,206 @@ public void testEquivalence() { MatchQueryParser.Type.PHRASE ).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch) ) - ) - .get(); + ); } - SearchResponse right = prepareSearch("test").setSize(numDocs) - .addSort(SortBuilders.scoreSort()) - .addSort(SortBuilders.fieldSort("id")) - .setQuery( - boolQuery().minimumShouldMatch(minShouldMatch) - .should(matchPhraseQuery("full_name", "captain america")) - .should(matchPhraseQuery("first_name", "captain america")) - .should(matchPhraseQuery("last_name", "captain america")) - .should(matchPhraseQuery("category", "captain america")) + assertResponse( + leftSearch, + left -> assertResponse( + prepareSearch("test").setSize(numDocs) + .addSort(SortBuilders.scoreSort()) + .addSort(SortBuilders.fieldSort("id")) + .setQuery( + boolQuery().minimumShouldMatch(minShouldMatch) + .should(matchPhraseQuery("full_name", "captain america")) + .should(matchPhraseQuery("first_name", "captain america")) + .should(matchPhraseQuery("last_name", "captain america")) + .should(matchPhraseQuery("category", "captain america")) + ), + right -> assertEquivalent("captain america", left, right) ) - .get(); - assertEquivalent("captain america", left, right); + ); } } } public void testCrossFieldMode() throws ExecutionException, InterruptedException { - SearchResponse searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("captain america", "full_name", "first_name", "last_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .operator(Operator.OR) - ) - ).get(); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).operator(Operator.OR) - ) - ).get(); - assertFirstHit(searchResponse, hasId("theother")); - assertSecondHit(searchResponse, hasId("theone")); - assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).operator(Operator.OR) - ) - ).get(); - assertFirstHit(searchResponse, hasId("theother")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).operator(Operator.AND) - ) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).analyzer("category").lenient(true).operator(Operator.AND) - ) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill", "int-field").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).analyzer("category").lenient(true).operator(Operator.AND) - ) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("captain america 15", "skill", "full_name", "first_name", "last_name", "category", "int-field").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).analyzer("category").lenient(true).operator(Operator.AND) - ) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("captain america 15", "first_name", "last_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .lenient(true) - .analyzer("category") - ) - ).get(); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType(multiMatchQuery("15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category")) - ).get(); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType(multiMatchQuery("25 15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category")) - ).get(); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("25 15", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category") - ) - ).get(); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("25 15", "first_name", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .analyzer("category") - ) - ).get(); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("25 15", "int-field", "skill", "first_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .analyzer("category") - ) - ).get(); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("25 15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .analyzer("category") - ) - ).get(); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).analyzer("category").operator(Operator.OR) - ) - ).get(); - assertFirstHit(searchResponse, hasId("theone")); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america", "full_name", "first_name", "last_name").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).operator(Operator.OR) + ) + ), + response -> assertFirstHit(response, hasId("theone")) + ); + + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).operator(Operator.OR) + ) + ), + response -> { + assertFirstHit(response, hasId("theother")); + assertSecondHit(response, hasId("theone")); + assertThat(response.getHits().getHits()[0].getScore(), greaterThan(response.getHits().getHits()[1].getScore())); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).operator(Operator.OR) + ) + ), + response -> assertFirstHit(response, hasId("theother")) + ); + + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).operator(Operator.AND) + ) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).analyzer("category").lenient(true).operator(Operator.AND) + ) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill", "int-field").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).analyzer("category").lenient(true).operator(Operator.AND) + ) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america 15", "skill", "full_name", "first_name", "last_name", "category", "int-field").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).analyzer("category").lenient(true).operator(Operator.AND) + ) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america 15", "first_name", "last_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .lenient(true) + .analyzer("category") + ) + ), + response -> assertFirstHit(response, hasId("theone")) + ); + + assertResponse( + prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category")) + ), + response -> assertFirstHit(response, hasId("theone")) + ); + + assertResponse( + prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("25 15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category")) + ), + response -> assertFirstHit(response, hasId("theone")) + ); + + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("25 15", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category") + ) + ), + response -> assertFirstHit(response, hasId("theone")) + ); + + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("25 15", "first_name", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category") + ) + ), + response -> assertFirstHit(response, hasId("theone")) + ); + + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("25 15", "int-field", "skill", "first_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category") + ) + ), + response -> assertFirstHit(response, hasId("theone")) + ); + + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("25 15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category") + ) + ), + response -> assertFirstHit(response, hasId("theone")) + ); + + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).analyzer("category").operator(Operator.OR) + ) + ), + response -> assertFirstHit(response, hasId("theone")) + ); // test group based on analyzer -- all fields are grouped into a cross field search - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).analyzer("category").operator(Operator.AND) - ) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).analyzer("category").operator(Operator.AND) + ) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); // counter example assertHitCount( prepareSearch("test").setQuery( @@ -721,83 +796,112 @@ public void testCrossFieldMode() throws ExecutionException, InterruptedException ); // test if boosts work - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("the ultimate", "full_name", "first_name", "category").field("last_name", 10) - .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .operator(Operator.AND) - ) - ).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasId("ultimate1")); // has ultimate in the last_name and that is boosted - assertSecondHit(searchResponse, hasId("ultimate2")); - assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); - + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("the ultimate", "full_name", "first_name", "category").field("last_name", 10) + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .operator(Operator.AND) + ) + ), + response -> { + assertHitCount(response, 2L); + assertFirstHit(response, hasId("ultimate1")); // has ultimate in the last_name and that is boosted + assertSecondHit(response, hasId("ultimate2")); + assertThat(response.getHits().getHits()[0].getScore(), greaterThan(response.getHits().getHits()[1].getScore())); + } + ); // since we try to treat the matching fields as one field scores are very similar but we have a small bias towards the // more frequent field that acts as a tie-breaker internally - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).operator(Operator.AND) - ) - ).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasId("ultimate2")); - assertSecondHit(searchResponse, hasId("ultimate1")); - assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); - + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).operator(Operator.AND) + ) + ), + response -> { + assertHitCount(response, 2L); + assertFirstHit(response, hasId("ultimate2")); + assertSecondHit(response, hasId("ultimate1")); + assertThat(response.getHits().getHits()[0].getScore(), greaterThan(response.getHits().getHits()[1].getScore())); + } + ); // Test group based on numeric fields - searchResponse = prepareSearch("test").setQuery( - randomizeType(multiMatchQuery("15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType(multiMatchQuery("15", "skill", "first_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); - + assertResponse( + prepareSearch("test").setQuery(randomizeType(multiMatchQuery("15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("15", "skill", "first_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); // Two numeric fields together caused trouble at one point! - searchResponse = prepareSearch("test").setQuery( - randomizeType(multiMatchQuery("15", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType(multiMatchQuery("15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("theone")); - - searchResponse = prepareSearch("test").setQuery( - randomizeType(multiMatchQuery("alpha 15", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).lenient(true)) - ).get(); - /* - * Doesn't find the one because "alpha 15" isn't a number and we don't - * break on spaces. - */ - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("ultimate1")); - + assertResponse( + prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("15", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("alpha 15", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).lenient(true) + ) + ), + response -> { + /* + * Doesn't find the one because "alpha 15" isn't a number and we don't + * break on spaces. + */ + assertHitCount(response, 1L); + assertFirstHit(response, hasId("ultimate1")); + } + ); // Lenient wasn't always properly lenient with two numeric fields - searchResponse = prepareSearch("test").setQuery( - randomizeType( - multiMatchQuery("alpha 15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).lenient(true) - ) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("ultimate1")); - + assertResponse( + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("alpha 15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .lenient(true) + ) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("ultimate1")); + } + ); // Check that cross fields works with date fields - searchResponse = prepareSearch("test").setQuery( - randomizeType(multiMatchQuery("now", "f*", "date").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)).lenient(true) - ).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("nowHero")); + assertResponse( + prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("now", "f*", "date").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)).lenient(true) + ), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("nowHero")); + } + ); } /** @@ -819,14 +923,21 @@ public void testFuzzyFieldLevelBoosting() throws InterruptedException, Execution builders.add(client().prepareIndex(idx).setId("2").setSource("title", "bar", "body", "foo")); indexRandom(true, false, builders); - SearchResponse searchResponse = prepareSearch(idx).setExplain(true) - .setQuery(multiMatchQuery("foo").field("title", 100).field("body").fuzziness(Fuzziness.ZERO)) - .get(); - SearchHit[] hits = searchResponse.getHits().getHits(); - assertNotEquals("both documents should be on different shards", hits[0].getShard().getShardId(), hits[1].getShard().getShardId()); - assertEquals("1", hits[0].getId()); - assertEquals("2", hits[1].getId()); - assertThat(hits[0].getScore(), greaterThan(hits[1].getScore())); + assertResponse( + prepareSearch(idx).setExplain(true) + .setQuery(multiMatchQuery("foo").field("title", 100).field("body").fuzziness(Fuzziness.ZERO)), + response -> { + SearchHit[] hits = response.getHits().getHits(); + assertNotEquals( + "both documents should be on different shards", + hits[0].getShard().getShardId(), + hits[1].getShard().getShardId() + ); + assertEquals("1", hits[0].getId()); + assertEquals("2", hits[1].getId()); + assertThat(hits[0].getScore(), greaterThan(hits[1].getScore())); + } + ); } private static void assertEquivalent(String query, SearchResponse left, SearchResponse right) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java index 882e18eb593aa..f101106917184 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.Operator; import org.elasticsearch.search.SearchHit; @@ -28,7 +27,8 @@ import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -49,17 +49,18 @@ public void testBasicAllQuery() throws Exception { reqs.add(client().prepareIndex("test").setId("3").setSource("f3", "foo bar baz")); indexRandom(true, false, reqs); - SearchResponse resp = prepareSearch("test").setQuery(queryStringQuery("foo")).get(); - assertHitCount(resp, 2L); - assertHits(resp.getHits(), "1", "3"); - - resp = prepareSearch("test").setQuery(queryStringQuery("bar")).get(); - assertHitCount(resp, 2L); - assertHits(resp.getHits(), "1", "3"); - - resp = prepareSearch("test").setQuery(queryStringQuery("Bar")).get(); - assertHitCount(resp, 3L); - assertHits(resp.getHits(), "1", "2", "3"); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("foo")), response -> { + assertHitCount(response, 2L); + assertHits(response.getHits(), "1", "3"); + }); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("bar")), response -> { + assertHitCount(response, 2L); + assertHits(response.getHits(), "1", "3"); + }); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("Bar")), response -> { + assertHitCount(response, 3L); + assertHits(response.getHits(), "1", "2", "3"); + }); } public void testWithDate() throws Exception { @@ -68,21 +69,22 @@ public void testWithDate() throws Exception { reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01")); indexRandom(true, false, reqs); - SearchResponse resp = prepareSearch("test").setQuery(queryStringQuery("foo bar")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); - - resp = prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")).get(); - assertHits(resp.getHits(), "1"); - assertHitCount(resp, 1L); - - resp = prepareSearch("test").setQuery(queryStringQuery("bar \"2015/09/02\"")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); - - resp = prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\" \"2015/09/01\"")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("foo bar")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")), response -> { + assertHits(response.getHits(), "1"); + assertHitCount(response, 1L); + }); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("bar \"2015/09/02\"")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\" \"2015/09/01\"")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); } public void testWithLotsOfTypes() throws Exception { @@ -95,21 +97,22 @@ public void testWithLotsOfTypes() throws Exception { ); indexRandom(true, false, reqs); - SearchResponse resp = prepareSearch("test").setQuery(queryStringQuery("foo bar")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); - - resp = prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")).get(); - assertHits(resp.getHits(), "1"); - assertHitCount(resp, 1L); - - resp = prepareSearch("test").setQuery(queryStringQuery("127.0.0.2 \"2015/09/02\"")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); - - resp = prepareSearch("test").setQuery(queryStringQuery("127.0.0.1 OR 1.8")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("foo bar")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")), response -> { + assertHits(response.getHits(), "1"); + assertHitCount(response, 1L); + }); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("127.0.0.2 \"2015/09/02\"")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("127.0.0.1 OR 1.8")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); } public void testDocWithAllTypes() throws Exception { @@ -118,32 +121,20 @@ public void testDocWithAllTypes() throws Exception { reqs.add(client().prepareIndex("test").setId("1").setSource(docBody, XContentType.JSON)); indexRandom(true, false, reqs); - SearchResponse resp = prepareSearch("test").setQuery(queryStringQuery("foo")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(queryStringQuery("Bar")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(queryStringQuery("Baz")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(queryStringQuery("19")).get(); - assertHits(resp.getHits(), "1"); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("foo")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("Bar")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("Baz")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("19")), response -> assertHits(response.getHits(), "1")); // nested doesn't match because it's hidden - resp = prepareSearch("test").setQuery(queryStringQuery("1476383971")).get(); - assertHits(resp.getHits(), "1"); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("1476383971")), response -> assertHits(response.getHits(), "1")); // bool doesn't match - resp = prepareSearch("test").setQuery(queryStringQuery("7")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(queryStringQuery("23")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(queryStringQuery("1293")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(queryStringQuery("42")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(queryStringQuery("1.7")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(queryStringQuery("1.5")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(queryStringQuery("127.0.0.1")).get(); - assertHits(resp.getHits(), "1"); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("7")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("23")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("1293")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("42")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("1.7")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("1.5")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("127.0.0.1")), response -> assertHits(response.getHits(), "1")); // binary doesn't match // suggest doesn't match // geo_point doesn't match @@ -156,17 +147,18 @@ public void testKeywordWithWhitespace() throws Exception { reqs.add(client().prepareIndex("test").setId("3").setSource("f1", "foo bar")); indexRandom(true, false, reqs); - SearchResponse resp = prepareSearch("test").setQuery(queryStringQuery("foo")).get(); - assertHits(resp.getHits(), "3"); - assertHitCount(resp, 1L); - - resp = prepareSearch("test").setQuery(queryStringQuery("bar")).get(); - assertHits(resp.getHits(), "2", "3"); - assertHitCount(resp, 2L); - - resp = prepareSearch("test").setQuery(queryStringQuery("Foo Bar")).get(); - assertHits(resp.getHits(), "1", "2", "3"); - assertHitCount(resp, 3L); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("foo")), response -> { + assertHits(response.getHits(), "3"); + assertHitCount(response, 1L); + }); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("bar")), response -> { + assertHits(response.getHits(), "2", "3"); + assertHitCount(response, 2L); + }); + assertResponse(prepareSearch("test").setQuery(queryStringQuery("Foo Bar")), response -> { + assertHits(response.getHits(), "1", "2", "3"); + assertHitCount(response, 3L); + }); } public void testAllFields() throws Exception { @@ -182,9 +174,10 @@ public void testAllFields() throws Exception { assertHitCount(prepareSearch("test_1").setQuery(queryStringQuery("foo eggplant").defaultOperator(Operator.AND)), 0L); - SearchResponse resp = prepareSearch("test_1").setQuery(queryStringQuery("foo eggplant").defaultOperator(Operator.OR)).get(); - assertHits(resp.getHits(), "1"); - assertHitCount(resp, 1L); + assertResponse(prepareSearch("test_1").setQuery(queryStringQuery("foo eggplant").defaultOperator(Operator.OR)), response -> { + assertHits(response.getHits(), "1"); + assertHitCount(response, 1L); + }); } public void testPhraseQueryOnFieldWithNoPositions() throws Exception { @@ -227,11 +220,10 @@ public void testFieldAlias() throws Exception { indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); - SearchResponse response = prepareSearch("test").setQuery(queryStringQuery("value").field("f3_alias")).get(); - - assertNoFailures(response); - assertHitCount(response, 2); - assertHits(response.getHits(), "2", "3"); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(queryStringQuery("value").field("f3_alias")), response -> { + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + }); } public void testFieldAliasWithEmbeddedFieldNames() throws Exception { @@ -241,11 +233,10 @@ public void testFieldAliasWithEmbeddedFieldNames() throws Exception { indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); - SearchResponse response = prepareSearch("test").setQuery(queryStringQuery("f3_alias:value AND f2:three")).get(); - - assertNoFailures(response); - assertHitCount(response, 1); - assertHits(response.getHits(), "3"); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(queryStringQuery("f3_alias:value AND f2:three")), response -> { + assertHitCount(response, 1); + assertHits(response.getHits(), "3"); + }); } public void testFieldAliasWithWildcardField() throws Exception { @@ -255,11 +246,10 @@ public void testFieldAliasWithWildcardField() throws Exception { indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); - SearchResponse response = prepareSearch("test").setQuery(queryStringQuery("value").field("f3_*")).get(); - - assertNoFailures(response); - assertHitCount(response, 2); - assertHits(response.getHits(), "2", "3"); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(queryStringQuery("value").field("f3_*")), response -> { + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + }); } public void testFieldAliasOnDisallowedFieldType() throws Exception { @@ -269,11 +259,10 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { // The wildcard field matches aliases for both a text and geo_point field. // By default, the geo_point field should be ignored when building the query. - SearchResponse response = prepareSearch("test").setQuery(queryStringQuery("text").field("f*_alias")).get(); - - assertNoFailures(response); - assertHitCount(response, 1); - assertHits(response.getHits(), "1"); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(queryStringQuery("text").field("f*_alias")), response -> { + assertHitCount(response, 1); + assertHits(response.getHits(), "1"); + }); } private void assertHits(SearchHits hits, String... ids) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java index c9c7c2a56eea9..95ad5560aacd9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.query; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.QueryBuilder; @@ -32,6 +31,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThirdHit; @@ -73,17 +73,21 @@ public void testScriptScore() { Map params = new HashMap<>(); params.put("param1", 0.1); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['field2'].value * param1", params); - SearchResponse resp = prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script)).get(); - assertNoFailures(resp); - assertOrderedSearchHits(resp, "10", "8", "6", "4", "2"); - assertFirstHit(resp, hasScore(1.0f)); - assertSecondHit(resp, hasScore(0.8f)); - assertThirdHit(resp, hasScore(0.6f)); + assertNoFailuresAndResponse( + prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script)), + response -> { + assertOrderedSearchHits(response, "10", "8", "6", "4", "2"); + assertFirstHit(response, hasScore(1.0f)); + assertSecondHit(response, hasScore(0.8f)); + assertThirdHit(response, hasScore(0.6f)); + } + ); // applying min score - resp = prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script).setMinScore(0.6f)).get(); - assertNoFailures(resp); - assertOrderedSearchHits(resp, "10", "8", "6"); + assertNoFailuresAndResponse( + prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script).setMinScore(0.6f)), + response -> assertOrderedSearchHits(response, "10", "8", "6") + ); } public void testScriptScoreBoolQuery() { @@ -98,11 +102,11 @@ public void testScriptScoreBoolQuery() { params.put("param1", 0.1); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['field2'].value * param1", params); QueryBuilder boolQuery = boolQuery().should(matchQuery("field1", "text1")).should(matchQuery("field1", "text10")); - SearchResponse resp = prepareSearch("test-index").setQuery(scriptScoreQuery(boolQuery, script)).get(); - assertNoFailures(resp); - assertOrderedSearchHits(resp, "10", "1"); - assertFirstHit(resp, hasScore(1.0f)); - assertSecondHit(resp, hasScore(0.1f)); + assertNoFailuresAndResponse(prepareSearch("test-index").setQuery(scriptScoreQuery(boolQuery, script)), response -> { + assertOrderedSearchHits(response, "10", "1"); + assertFirstHit(response, hasScore(1.0f)); + assertSecondHit(response, hasScore(0.1f)); + }); } // test that when the internal query is rewritten script_score works well @@ -118,9 +122,10 @@ public void testRewrittenQuery() { RangeQueryBuilder rangeQB = new RangeQueryBuilder("field1").from("2019-01-01"); // the query should be rewritten to from:null Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['field2'].value * param1", Map.of("param1", 0.1)); - SearchResponse resp = prepareSearch("test-index2").setQuery(scriptScoreQuery(rangeQB, script)).get(); - assertNoFailures(resp); - assertOrderedSearchHits(resp, "3", "2", "1"); + assertNoFailuresAndResponse( + prepareSearch("test-index2").setQuery(scriptScoreQuery(rangeQB, script)), + response -> assertOrderedSearchHits(response, "3", "2", "1") + ); } public void testDisallowExpensiveQueries() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java index 918746021f381..072922da54798 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -17,7 +17,6 @@ import org.apache.lucene.util.AttributeSource; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.document.DocumentField; @@ -106,6 +105,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; @@ -208,36 +209,47 @@ public void testConstantScoreQuery() throws Exception { client().prepareIndex("test").setId("2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") ); - SearchResponse searchResponse = prepareSearch().setQuery(constantScoreQuery(matchQuery("field1", "quick"))).get(); - assertHitCount(searchResponse, 2L); - for (SearchHit searchHit : searchResponse.getHits().getHits()) { - assertThat(searchHit, hasScore(1.0f)); - } - - searchResponse = prepareSearch("test").setQuery( - boolQuery().must(matchAllQuery()).must(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + random().nextFloat())) - ).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).getScore())); - - prepareSearch("test").setQuery(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + random().nextFloat())).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).getScore())); - - searchResponse = prepareSearch("test").setQuery( - constantScoreQuery( - boolQuery().must(matchAllQuery()) - .must( - constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + (random.nextBoolean() ? 0.0f : random.nextFloat())) - ) - ) - ).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).getScore())); - for (SearchHit searchHit : searchResponse.getHits().getHits()) { - assertThat(searchHit, hasScore(1.0f)); - } - + assertResponse(prepareSearch().setQuery(constantScoreQuery(matchQuery("field1", "quick"))), response -> { + assertHitCount(response, 2L); + for (SearchHit searchHit : response.getHits().getHits()) { + assertThat(searchHit, hasScore(1.0f)); + } + }); + assertResponse( + prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).must(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + random().nextFloat())) + ), + response -> { + assertHitCount(response, 2L); + assertFirstHit(response, hasScore(response.getHits().getAt(1).getScore())); + } + ); + assertResponse( + prepareSearch("test").setQuery(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + random().nextFloat())), + response -> { + assertHitCount(response, 2L); + assertFirstHit(response, hasScore(response.getHits().getAt(1).getScore())); + } + ); + assertResponse( + prepareSearch("test").setQuery( + constantScoreQuery( + boolQuery().must(matchAllQuery()) + .must( + constantScoreQuery(matchQuery("field1", "quick")).boost( + 1.0f + (random.nextBoolean() ? 0.0f : random.nextFloat()) + ) + ) + ) + ), + response -> { + assertHitCount(response, 2L); + assertFirstHit(response, hasScore(response.getHits().getAt(1).getScore())); + for (SearchHit searchHit : response.getHits().getHits()) { + assertThat(searchHit, hasScore(1.0f)); + } + } + ); int num = scaledRandomIntBetween(100, 200); IndexRequestBuilder[] builders = new IndexRequestBuilder[num]; for (int i = 0; i < builders.length; i++) { @@ -249,24 +261,30 @@ public void testConstantScoreQuery() throws Exception { int queryRounds = scaledRandomIntBetween(10, 20); for (int i = 0; i < queryRounds; i++) { MatchQueryBuilder matchQuery = matchQuery("f", English.intToEnglish(between(0, num))); - searchResponse = prepareSearch("test_1").setQuery(constantScoreQuery(matchQuery)).setSize(num).get(); - long totalHits = searchResponse.getHits().getTotalHits().value; - SearchHits hits = searchResponse.getHits(); - for (SearchHit searchHit : hits) { - assertThat(searchHit, hasScore(1.0f)); - } - searchResponse = prepareSearch("test_1").setQuery( - boolQuery().must(matchAllQuery()) - .must(constantScoreQuery(matchQuery).boost(1.0f + (random.nextBoolean() ? 0.0f : random.nextFloat()))) - ).setSize(num).get(); - hits = searchResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(totalHits)); - if (totalHits > 1) { - float expected = hits.getAt(0).getScore(); + final long[] constantScoreTotalHits = new long[1]; + assertResponse(prepareSearch("test_1").setQuery(constantScoreQuery(matchQuery)).setSize(num), response -> { + constantScoreTotalHits[0] = response.getHits().getTotalHits().value; + SearchHits hits = response.getHits(); for (SearchHit searchHit : hits) { - assertThat(searchHit, hasScore(expected)); + assertThat(searchHit, hasScore(1.0f)); } - } + }); + assertResponse( + prepareSearch("test_1").setQuery( + boolQuery().must(matchAllQuery()) + .must(constantScoreQuery(matchQuery).boost(1.0f + (random.nextBoolean() ? 0.0f : random.nextFloat()))) + ).setSize(num), + response -> { + SearchHits hits = response.getHits(); + assertThat(hits.getTotalHits().value, equalTo(constantScoreTotalHits[0])); + if (constantScoreTotalHits[0] > 1) { + float expected = hits.getAt(0).getScore(); + for (SearchHit searchHit : hits) { + assertThat(searchHit, hasScore(expected)); + } + } + } + ); } } @@ -283,12 +301,14 @@ public void testAllDocsQueryString() throws InterruptedException, ExecutionExcep for (int i = 0; i < iters; i++) { assertHitCount(prepareSearch("test").setQuery(queryStringQuery("*:*^10.0").boost(10.0f)), 2L); - SearchResponse searchResponse = prepareSearch("test").setQuery( - boolQuery().must(matchAllQuery()).must(constantScoreQuery(matchAllQuery())) - ).get(); - assertHitCount(searchResponse, 2L); - assertThat((double) searchResponse.getHits().getAt(0).getScore(), closeTo(2.0, 0.1)); - assertThat((double) searchResponse.getHits().getAt(1).getScore(), closeTo(2.0, 0.1)); + assertResponse( + prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).must(constantScoreQuery(matchAllQuery()))), + response -> { + assertHitCount(response, 2L); + assertThat((double) response.getHits().getAt(0).getScore(), closeTo(2.0, 0.1)); + assertThat((double) response.getHits().getAt(1).getScore(), closeTo(2.0, 0.1)); + } + ); } } @@ -525,14 +545,15 @@ public void testMatchQueryNumeric() throws Exception { client().prepareIndex("test").setId("3").setSource("long", 3L, "double", 3.0d) ); - SearchResponse searchResponse = prepareSearch().setQuery(matchQuery("long", "1")).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - - searchResponse = prepareSearch().setQuery(matchQuery("double", "2")).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); - expectThrows(SearchPhaseExecutionException.class, () -> prepareSearch().setQuery(matchQuery("double", "2 3 4")).get()); + assertResponse(prepareSearch().setQuery(matchQuery("long", "1")), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch().setQuery(matchQuery("double", "2")), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("2")); + expectThrows(SearchPhaseExecutionException.class, () -> prepareSearch().setQuery(matchQuery("double", "2 3 4")).get()); + }); } public void testMatchQueryFuzzy() throws Exception { @@ -594,10 +615,10 @@ public void testMultiMatchQuery() throws Exception { builder = multiMatchQuery("value1").field("field1").field("field3", 1.5f).operator(Operator.AND); // Operator only applies on terms // inside a field! Fields are // always OR-ed together. - SearchResponse searchResponse = prepareSearch().setQuery(builder).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "3", "1"); - + assertResponse(prepareSearch().setQuery(builder), response -> { + assertHitCount(response, 2L); + assertSearchHits(response, "3", "1"); + }); // Test lenient client().prepareIndex("test").setId("3").setSource("field1", "value7", "field2", "value8", "field4", 5).get(); refresh(); @@ -607,19 +628,23 @@ public void testMultiMatchQuery() throws Exception { // when the number for shards is randomized and we expect failures // we can either run into partial or total failures depending on the current number of shards Matcher reasonMatcher = containsString("NumberFormatException: For input string: \"value1\""); - ShardSearchFailure[] shardFailures; try { - prepareSearch().setQuery(builder).get(); - shardFailures = searchResponse.getShardFailures(); - assertThat("Expected shard failures, got none", shardFailures, not(emptyArray())); + assertResponse(prepareSearch().setQuery(builder), response -> { + ShardSearchFailure[] shardFailures = response.getShardFailures(); + assertThat("Expected shard failures, got none", shardFailures, not(emptyArray())); + for (ShardSearchFailure shardSearchFailure : shardFailures) { + assertThat(shardSearchFailure.status(), equalTo(RestStatus.BAD_REQUEST)); + assertThat(shardSearchFailure.reason(), reasonMatcher); + } + }); + } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - shardFailures = e.shardFailures(); - } - - for (ShardSearchFailure shardSearchFailure : shardFailures) { - assertThat(shardSearchFailure.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(shardSearchFailure.reason(), reasonMatcher); + ShardSearchFailure[] shardFailures = e.shardFailures(); + for (ShardSearchFailure shardSearchFailure : shardFailures) { + assertThat(shardSearchFailure.status(), equalTo(RestStatus.BAD_REQUEST)); + assertThat(shardSearchFailure.reason(), reasonMatcher); + } } builder.lenient(true); @@ -672,35 +697,36 @@ public void testMultiMatchQueryMinShouldMatch() { MultiMatchQueryBuilder multiMatchQuery = multiMatchQuery("value1 value2 foo", "field1", "field2"); multiMatchQuery.minimumShouldMatch("70%"); - SearchResponse searchResponse = prepareSearch().setQuery(multiMatchQuery).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - + assertResponse(prepareSearch().setQuery(multiMatchQuery), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); multiMatchQuery.minimumShouldMatch("30%"); - searchResponse = prepareSearch().setQuery(multiMatchQuery).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - + assertResponse(prepareSearch().setQuery(multiMatchQuery), response -> { + assertHitCount(response, 2L); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("2")); + }); multiMatchQuery.minimumShouldMatch("70%"); - searchResponse = prepareSearch().setQuery(multiMatchQuery).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - + assertResponse(prepareSearch().setQuery(multiMatchQuery), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); multiMatchQuery.minimumShouldMatch("30%"); - searchResponse = prepareSearch().setQuery(multiMatchQuery).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - + assertResponse(prepareSearch().setQuery(multiMatchQuery), response -> { + assertHitCount(response, 2L); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("2")); + }); multiMatchQuery = multiMatchQuery("value1 value2 bar", "field1"); multiMatchQuery.minimumShouldMatch("100%"); assertHitCount(prepareSearch().setQuery(multiMatchQuery), 0L); multiMatchQuery.minimumShouldMatch("70%"); - searchResponse = prepareSearch().setQuery(multiMatchQuery).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); + assertResponse(prepareSearch().setQuery(multiMatchQuery), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); // Min should match > # optional clauses returns no docs. multiMatchQuery = multiMatchQuery("value1 value2 value3", "field1", "field2"); multiMatchQuery.minimumShouldMatch("4"); @@ -715,10 +741,10 @@ public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws BoolQueryBuilder boolQuery = boolQuery().must(termQuery("field1", "value1")) .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)); - SearchResponse searchResponse = prepareSearch().setQuery(boolQuery).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - + assertResponse(prepareSearch().setQuery(boolQuery), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); boolQuery = boolQuery().must(termQuery("field1", "value1")) .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(1)) // Only one should clause is defined, returns no docs. @@ -728,10 +754,10 @@ public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws boolQuery = boolQuery().should(termQuery("field1", "value1")) .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)) .minimumShouldMatch(1); - searchResponse = prepareSearch().setQuery(boolQuery).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - + assertResponse(prepareSearch().setQuery(boolQuery), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); boolQuery = boolQuery().must(termQuery("field1", "value1")) .must(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)); assertHitCount(prepareSearch().setQuery(boolQuery), 0L); @@ -743,10 +769,10 @@ public void testFuzzyQueryString() { client().prepareIndex("test").setId("2").setSource("str", "shay", "date", "2012-02-05", "num", 20).get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(queryStringQuery("str:kimcy~1")).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); + assertNoFailuresAndResponse(prepareSearch().setQuery(queryStringQuery("str:kimcy~1")), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); } @TestIssueLogging( @@ -764,15 +790,14 @@ public void testQuotedQueryStringWithBoost() throws InterruptedException { client().prepareIndex("test").setId("2").setSource("important", "nothing important", "less_important", "phrase match") ); - SearchResponse searchResponse = prepareSearch().setQuery( - queryStringQuery("\"phrase match\"").field("important", boost).field("less_important") - ).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - assertThat( - (double) searchResponse.getHits().getAt(0).getScore(), - closeTo(boost * searchResponse.getHits().getAt(1).getScore(), .1) + assertResponse( + prepareSearch().setQuery(queryStringQuery("\"phrase match\"").field("important", boost).field("less_important")), + response -> { + assertHitCount(response, 2L); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("2")); + assertThat((double) response.getHits().getAt(0).getScore(), closeTo(boost * response.getHits().getAt(1).getScore(), .1)); + } ); } @@ -782,16 +807,16 @@ public void testSpecialRangeSyntaxInQueryString() { client().prepareIndex("test").setId("2").setSource("str", "shay", "date", "2012-02-05", "num", 20).get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(queryStringQuery("num:>19")).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); - + assertResponse(prepareSearch().setQuery(queryStringQuery("num:>19")), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("2")); + }); assertHitCount(prepareSearch().setQuery(queryStringQuery("num:>20")), 0L); - searchResponse = prepareSearch().setQuery(queryStringQuery("num:>=20")).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); - + assertResponse(prepareSearch().setQuery(queryStringQuery("num:>=20")), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("2")); + }); assertHitCount(prepareSearch().setQuery(queryStringQuery("num:>11")), 2L); assertHitCount(prepareSearch().setQuery(queryStringQuery("num:<20")), 1L); assertHitCount(prepareSearch().setQuery(queryStringQuery("num:<=20")), 2L); @@ -996,25 +1021,26 @@ public void testBasicQueryById() throws Exception { client().prepareIndex("test").setId("3").setSource("field1", "value3").get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(idsQuery().addIds("1", "2")).get(); - assertHitCount(searchResponse, 2L); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - - searchResponse = prepareSearch().setQuery(idsQuery().addIds("1")).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - - searchResponse = prepareSearch().setQuery(idsQuery().addIds("1", "2")).get(); - assertHitCount(searchResponse, 2L); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - - searchResponse = prepareSearch().setQuery(idsQuery().addIds("1")).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - - searchResponse = prepareSearch().setQuery(idsQuery().addIds("1", "2", "3", "4")).get(); - assertHitCount(searchResponse, 3L); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); + assertResponse(prepareSearch().setQuery(idsQuery().addIds("1", "2")), response -> { + assertHitCount(response, 2L); + assertThat(response.getHits().getHits().length, equalTo(2)); + }); + assertResponse(prepareSearch().setQuery(idsQuery().addIds("1")), response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getHits().length, equalTo(1)); + }); + assertResponse(prepareSearch().setQuery(idsQuery().addIds("1", "2")), response -> { + assertHitCount(response, 2L); + assertThat(response.getHits().getHits().length, equalTo(2)); + }); + assertResponse(prepareSearch().setQuery(idsQuery().addIds("1")), response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getHits().length, equalTo(1)); + }); + assertResponse(prepareSearch().setQuery(idsQuery().addIds("1", "2", "3", "4")), response -> { + assertHitCount(response, 3L); + assertThat(response.getHits().getHits().length, equalTo(3)); + }); } public void testNumericTermsAndRanges() throws Exception { @@ -1051,86 +1077,106 @@ public void testNumericTermsAndRanges() throws Exception { .get(); refresh(); - SearchResponse searchResponse; logger.info("--> term query on 1"); - searchResponse = prepareSearch("test").setQuery(termQuery("num_byte", 1)).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(termQuery("num_short", 1)).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(termQuery("num_integer", 1)).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(termQuery("num_long", 1)).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(termQuery("num_float", 1)).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(termQuery("num_double", 1)).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - + assertResponse(prepareSearch("test").setQuery(termQuery("num_byte", 1)), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(termQuery("num_short", 1)), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(termQuery("num_integer", 1)), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(termQuery("num_long", 1)), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(termQuery("num_float", 1)), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(termQuery("num_double", 1)), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); logger.info("--> terms query on 1"); - searchResponse = prepareSearch("test").setQuery(termsQuery("num_byte", new int[] { 1 })).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(termsQuery("num_short", new int[] { 1 })).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(termsQuery("num_integer", new int[] { 1 })).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(termsQuery("num_long", new int[] { 1 })).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(termsQuery("num_float", new double[] { 1 })).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(termsQuery("num_double", new double[] { 1 })).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - + assertResponse(prepareSearch("test").setQuery(termsQuery("num_byte", new int[] { 1 })), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(termsQuery("num_short", new int[] { 1 })), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(termsQuery("num_integer", new int[] { 1 })), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(termsQuery("num_long", new int[] { 1 })), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(termsQuery("num_float", new double[] { 1 })), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(termsQuery("num_double", new double[] { 1 })), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); logger.info("--> term filter on 1"); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_byte", 1))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_short", 1))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_integer", 1))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_long", 1))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_float", 1))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_double", 1))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_byte", 1))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_short", 1))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_integer", 1))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_long", 1))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_float", 1))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_double", 1))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); logger.info("--> terms filter on 1"); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_byte", new int[] { 1 }))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_short", new int[] { 1 }))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_integer", new int[] { 1 }))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_long", new int[] { 1 }))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_float", new int[] { 1 }))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_double", new int[] { 1 }))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_byte", new int[] { 1 }))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_short", new int[] { 1 }))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_integer", new int[] { 1 }))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_long", new int[] { 1 }))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_float", new int[] { 1 }))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); + assertResponse(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_double", new int[] { 1 }))), response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + }); } public void testNumericRangeFilter_2826() throws Exception { @@ -1238,8 +1284,7 @@ public void testIntervals() throws InterruptedException { } } }"""; - SearchResponse response = prepareSearch("test").setQuery(wrapperQuery(json)).get(); - assertHitCount(response, 1L); + assertHitCount(prepareSearch("test").setQuery(wrapperQuery(json)), 1L); } // see #2994 @@ -1437,12 +1482,16 @@ public void testMinScore() throws ExecutionException, InterruptedException { client().prepareIndex("test").setId("4").setSource("score", 0.5).get(); refresh(); - SearchResponse searchResponse = prepareSearch("test").setQuery( - functionScoreQuery(ScoreFunctionBuilders.fieldValueFactorFunction("score").missing(1.0)).setMinScore(1.5f) - ).get(); - assertHitCount(searchResponse, 2); - assertFirstHit(searchResponse, hasId("3")); - assertSecondHit(searchResponse, hasId("1")); + assertResponse( + prepareSearch("test").setQuery( + functionScoreQuery(ScoreFunctionBuilders.fieldValueFactorFunction("score").missing(1.0)).setMinScore(1.5f) + ), + response -> { + assertHitCount(response, 2); + assertFirstHit(response, hasId("3")); + assertSecondHit(response, hasId("1")); + } + ); } public void testQueryStringWithSlopAndFields() { @@ -1506,65 +1555,96 @@ public void testRangeQueryWithTimeZone() throws Exception { .setSource("date", Instant.now().atZone(ZoneOffset.ofHours(1)).toInstant().toEpochMilli(), "num", 4) ); - SearchResponse searchResponse = prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00").to("2014-01-01T00:59:00") - ).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("1")); - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00").to("2013-12-31T23:59:00") - ).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("2")); - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00").to("2014-01-01T01:59:00") - ).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); - + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00").to("2014-01-01T00:59:00")), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("1")); + } + ); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00").to("2013-12-31T23:59:00")), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("2")); + } + ); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00").to("2014-01-01T01:59:00")), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("3")); + } + ); // We explicitly define a time zone in the from/to dates so whatever the time zone is, it won't be used - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00Z").to("2014-01-01T00:59:00Z").timeZone("+10:00") - ).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("1")); - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00Z").to("2013-12-31T23:59:00Z").timeZone("+10:00") - ).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("2")); - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00Z").to("2014-01-01T01:59:00Z").timeZone("+10:00") - ).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); - + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00Z").to("2014-01-01T00:59:00Z").timeZone("+10:00") + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("1")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00Z").to("2013-12-31T23:59:00Z").timeZone("+10:00") + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("2")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00Z").to("2014-01-01T01:59:00Z").timeZone("+10:00") + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("3")); + } + ); // We define a time zone to be applied to the filter and from/to have no time zone - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01T03:00:00").to("2014-01-01T03:59:00").timeZone("+03:00") - ).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("1")); - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01T02:00:00").to("2014-01-01T02:59:00").timeZone("+03:00") - ).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("2")); - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01T04:00:00").to("2014-01-01T04:59:00").timeZone("+03:00") - ).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); - - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01").to("2014-01-01T00:59:00").timeZone("-01:00") - ).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); - - searchResponse = prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("now/d-1d").timeZone("+01:00")).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), is("4")); + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T03:00:00").to("2014-01-01T03:59:00").timeZone("+03:00") + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("1")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T02:00:00").to("2014-01-01T02:59:00").timeZone("+03:00") + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("2")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T04:00:00").to("2014-01-01T04:59:00").timeZone("+03:00") + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("3")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01").to("2014-01-01T00:59:00").timeZone("-01:00") + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("3")); + } + ); + assertResponse(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("now/d-1d").timeZone("+01:00")), response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("4")); + }); } /** @@ -1636,23 +1716,27 @@ public void testMatchPhrasePrefixQuery() throws ExecutionException, InterruptedE public void testQueryStringParserCache() throws Exception { createIndex("test"); indexRandom(true, false, client().prepareIndex("test").setId("1").setSource("nameTokens", "xyz")); - - SearchResponse response = prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)) - .get(); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - - float first = response.getHits().getAt(0).getScore(); + final float[] first = new float[1]; + assertResponse( + prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + first[0] = response.getHits().getAt(0).getScore(); + } + ); for (int i = 0; i < 100; i++) { - response = prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)) - .get(); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - float actual = response.getHits().getAt(0).getScore(); - assertThat(i + " expected: " + first + " actual: " + actual, Float.compare(first, actual), equalTo(0)); + final int finalI = i; + assertResponse( + prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + float actual = response.getHits().getAt(0).getScore(); + assertThat(finalI + " expected: " + first[0] + " actual: " + actual, Float.compare(first[0], actual), equalTo(0)); + } + ); } } @@ -1666,8 +1750,7 @@ public void testRangeQueryRangeFields_24744() throws Exception { refresh(); RangeQueryBuilder range = new RangeQueryBuilder("int_range").relation("intersects").from(Integer.MIN_VALUE).to(Integer.MAX_VALUE); - SearchResponse searchResponse = prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 1); + assertHitCount(prepareSearch("test").setQuery(range), 1L); } public void testNestedQueryWithFieldAlias() throws Exception { @@ -1732,17 +1815,16 @@ public void testFieldAliasesForMetaFields() throws Exception { indexRandom(true, false, indexRequest); updateClusterSettings(Settings.builder().put(IndicesService.INDICES_ID_FIELD_DATA_ENABLED_SETTING.getKey(), true)); try { - SearchResponse searchResponse = prepareSearch().setQuery(termQuery("routing-alias", "custom")) - .addDocValueField("id-alias") - .get(); - assertHitCount(searchResponse, 1L); + assertResponse(prepareSearch().setQuery(termQuery("routing-alias", "custom")).addDocValueField("id-alias"), response -> { + assertHitCount(response, 1L); - SearchHit hit = searchResponse.getHits().getAt(0); - assertEquals(2, hit.getFields().size()); - assertTrue(hit.getFields().containsKey("id-alias")); + SearchHit hit = response.getHits().getAt(0); + assertEquals(2, hit.getFields().size()); + assertTrue(hit.getFields().containsKey("id-alias")); - DocumentField field = hit.getFields().get("id-alias"); - assertThat(field.getValue().toString(), equalTo("1")); + DocumentField field = hit.getFields().get("id-alias"); + assertThat(field.getValue().toString(), equalTo("1")); + }); } finally { // unset cluster setting updateClusterSettings(Settings.builder().putNull(IndicesService.INDICES_ID_FIELD_DATA_ENABLED_SETTING.getKey())); @@ -1886,11 +1968,12 @@ public void testFetchIdFieldQuery() { ensureGreen(); refresh(); - SearchResponse response = prepareSearch("test").addFetchField("_id").setSize(docCount).get(); - SearchHit[] hits = response.getHits().getHits(); - assertEquals(docCount, hits.length); - for (SearchHit hit : hits) { - assertNotNull(hit.getFields().get("_id").getValue()); - } + assertResponse(prepareSearch("test").addFetchField("_id").setSize(docCount), response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(docCount, hits.length); + for (SearchHit hit : hits) { + assertNotNull(hit.getFields().get("_id").getValue()); + } + }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 78d98b76b9bc8..cc41ac0089a51 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; @@ -49,7 +48,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; @@ -89,24 +89,31 @@ public void testSimpleQueryString() throws ExecutionException, InterruptedExcept // Tests boost value setting. In this case doc 1 should always be ranked above the other // two matches. - SearchResponse searchResponse = prepareSearch().setQuery( - boolQuery().should(simpleQueryStringQuery("\"foo bar\"").boost(10.0f)).should(termQuery("body", "eggplant")) - ).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasId("3")); - + assertResponse( + prepareSearch().setQuery( + boolQuery().should(simpleQueryStringQuery("\"foo bar\"").boost(10.0f)).should(termQuery("body", "eggplant")) + ), + response -> { + assertHitCount(response, 2L); + assertFirstHit(response, hasId("3")); + } + ); assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("foo bar").defaultOperator(Operator.AND)), "3"); assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("\"quux baz\" +(eggplant | spaghetti)")), "4", "5"); assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("eggplants").analyzer("mock_snowball")), "4"); - searchResponse = prepareSearch().setQuery( - simpleQueryStringQuery("spaghetti").field("body", 1000.0f).field("otherbody", 2.0f).queryName("myquery") - ).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasId("5")); - assertSearchHits(searchResponse, "5", "6"); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("myquery")); + assertResponse( + prepareSearch().setQuery( + simpleQueryStringQuery("spaghetti").field("body", 1000.0f).field("otherbody", 2.0f).queryName("myquery") + ), + response -> { + assertHitCount(response, 2L); + assertFirstHit(response, hasId("5")); + assertSearchHits(response, "5", "6"); + assertThat(response.getHits().getAt(0).getMatchedQueries()[0], equalTo("myquery")); + } + ); assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("spaghetti").field("*body")), "5", "6"); } @@ -281,12 +288,14 @@ public void testSimpleQueryStringLenient() throws ExecutionException, Interrupte ); refresh(); - SearchResponse searchResponse = prepareSearch().setAllowPartialSearchResults(true) - .setQuery(simpleQueryStringQuery("foo").field("field")) - .get(); - assertFailures(searchResponse); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); + assertResponse( + prepareSearch().setAllowPartialSearchResults(true).setQuery(simpleQueryStringQuery("foo").field("field")), + response -> { + assertFailures(response); + assertHitCount(response, 1L); + assertSearchHits(response, "1"); + } + ); assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("foo").field("field").lenient(true)), "1"); } @@ -373,17 +382,18 @@ public void testBasicAllQuery() throws Exception { reqs.add(client().prepareIndex("test").setId("3").setSource("f3", "foo bar baz")); indexRandom(true, false, reqs); - SearchResponse resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); - assertHitCount(resp, 2L); - assertHits(resp.getHits(), "1", "3"); - - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("bar")).get(); - assertHitCount(resp, 2L); - assertHits(resp.getHits(), "1", "3"); - - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")).get(); - assertHitCount(resp, 3L); - assertHits(resp.getHits(), "1", "2", "3"); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("foo")), response -> { + assertHitCount(response, 2L); + assertHits(response.getHits(), "1", "3"); + }); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("bar")), response -> { + assertHitCount(response, 2L); + assertHits(response.getHits(), "1", "3"); + }); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")), response -> { + assertHitCount(response, 3L); + assertHits(response.getHits(), "1", "2", "3"); + }); } public void testWithDate() throws Exception { @@ -396,21 +406,22 @@ public void testWithDate() throws Exception { reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01")); indexRandom(true, false, reqs); - SearchResponse resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); - - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")).get(); - assertHits(resp.getHits(), "1"); - assertHitCount(resp, 1L); - - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("bar \"2015/09/02\"")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); - - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\" \"2015/09/01\"")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")), response -> { + assertHits(response.getHits(), "1"); + assertHitCount(response, 1L); + }); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("bar \"2015/09/02\"")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\" \"2015/09/01\"")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); } public void testWithLotsOfTypes() throws Exception { @@ -427,21 +438,22 @@ public void testWithLotsOfTypes() throws Exception { ); indexRandom(true, false, reqs); - SearchResponse resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); - - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")).get(); - assertHits(resp.getHits(), "1"); - assertHitCount(resp, 1L); - - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.2 \"2015/09/02\"")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); - - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1 1.8")).get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")), response -> { + assertHits(response.getHits(), "1"); + assertHitCount(response, 1L); + }); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.2 \"2015/09/02\"")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1 1.8")), response -> { + assertHits(response.getHits(), "1", "2"); + assertHitCount(response, 2L); + }); } public void testDocWithAllTypes() throws Exception { @@ -454,39 +466,35 @@ public void testDocWithAllTypes() throws Exception { reqs.add(client().prepareIndex("test").setId("1").setSource(docBody, XContentType.JSON)); indexRandom(true, false, reqs); - SearchResponse resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("Baz")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("19")).get(); - assertHits(resp.getHits(), "1"); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("foo")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("Baz")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("19")), response -> assertHits(response.getHits(), "1")); // nested doesn't match because it's hidden - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("1476383971")).get(); - assertHits(resp.getHits(), "1"); + assertResponse( + prepareSearch("test").setQuery(simpleQueryStringQuery("1476383971")), + response -> assertHits(response.getHits(), "1") + ); // bool doesn't match - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("7")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("23")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("1293")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("42")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("1.7")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("1.5")).get(); - assertHits(resp.getHits(), "1"); - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1")).get(); - assertHits(resp.getHits(), "1"); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("7")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("23")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("1293")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("42")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("1.7")), response -> assertHits(response.getHits(), "1")); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("1.5")), response -> assertHits(response.getHits(), "1")); + assertResponse( + prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1")), + response -> assertHits(response.getHits(), "1") + ); // binary doesn't match // suggest doesn't match // geo_point doesn't match // geo_shape doesn't match - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo Bar 19 127.0.0.1").defaultOperator(Operator.AND)).get(); - assertHits(resp.getHits(), "1"); + assertResponse( + prepareSearch("test").setQuery(simpleQueryStringQuery("foo Bar 19 127.0.0.1").defaultOperator(Operator.AND)), + response -> assertHits(response.getHits(), "1") + ); } public void testKeywordWithWhitespace() throws Exception { @@ -500,13 +508,14 @@ public void testKeywordWithWhitespace() throws Exception { reqs.add(client().prepareIndex("test").setId("3").setSource("f1", "foo bar")); indexRandom(true, false, reqs); - SearchResponse resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); - assertHits(resp.getHits(), "3"); - assertHitCount(resp, 1L); - - resp = prepareSearch("test").setQuery(simpleQueryStringQuery("bar")).get(); - assertHits(resp.getHits(), "2", "3"); - assertHitCount(resp, 2L); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("foo")), response -> { + assertHits(response.getHits(), "3"); + assertHitCount(response, 1L); + }); + assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("bar")), response -> { + assertHits(response.getHits(), "2", "3"); + assertHitCount(response, 2L); + }); } public void testAllFieldsWithSpecifiedLeniency() throws Exception { @@ -536,11 +545,10 @@ public void testFieldAlias() throws Exception { indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); - SearchResponse response = prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_alias")).get(); - - assertNoFailures(response); - assertHitCount(response, 2); - assertHits(response.getHits(), "2", "3"); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_alias")), response -> { + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + }); } public void testFieldAliasWithWildcardField() throws Exception { @@ -554,11 +562,10 @@ public void testFieldAliasWithWildcardField() throws Exception { indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); - SearchResponse response = prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_*")).get(); - - assertNoFailures(response); - assertHitCount(response, 2); - assertHits(response.getHits(), "2", "3"); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_*")), response -> { + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + }); } public void testFieldAliasOnDisallowedFieldType() throws Exception { @@ -572,11 +579,10 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { // The wildcard field matches aliases for both a text and boolean field. // By default, the boolean field should be ignored when building the query. - SearchResponse response = prepareSearch("test").setQuery(queryStringQuery("text").field("f*_alias")).get(); - - assertNoFailures(response); - assertHitCount(response, 1); - assertHits(response.getHits(), "1"); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(queryStringQuery("text").field("f*_alias")), response -> { + assertHitCount(response, 1); + assertHits(response.getHits(), "1"); + }); } private void assertHits(SearchHits hits, String... ids) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java index 1d13bea9e0639..ee87b868f280d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -32,6 +31,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -67,21 +67,25 @@ public void testStopOneNodePreferenceWithRedState() throws IOException { "_prefer_nodes:somenode,server2" }; for (String pref : preferences) { logger.info("--> Testing out preference={}", pref); - SearchResponse searchResponse = prepareSearch().setSize(0).setPreference(pref).get(); - assertThat(RestStatus.OK, equalTo(searchResponse.status())); - assertThat(pref, searchResponse.getFailedShards(), greaterThanOrEqualTo(0)); - searchResponse = prepareSearch().setPreference(pref).get(); - assertThat(RestStatus.OK, equalTo(searchResponse.status())); - assertThat(pref, searchResponse.getFailedShards(), greaterThanOrEqualTo(0)); + assertResponse(prepareSearch().setSize(0).setPreference(pref), response -> { + assertThat(RestStatus.OK, equalTo(response.status())); + assertThat(pref, response.getFailedShards(), greaterThanOrEqualTo(0)); + }); + assertResponse(prepareSearch().setPreference(pref), response -> { + assertThat(RestStatus.OK, equalTo(response.status())); + assertThat(pref, response.getFailedShards(), greaterThanOrEqualTo(0)); + }); } // _only_local is a stricter preference, we need to send the request to a data node - SearchResponse searchResponse = dataNodeClient().prepareSearch().setSize(0).setPreference("_only_local").get(); - assertThat(RestStatus.OK, equalTo(searchResponse.status())); - assertThat("_only_local", searchResponse.getFailedShards(), greaterThanOrEqualTo(0)); - searchResponse = dataNodeClient().prepareSearch().setPreference("_only_local").get(); - assertThat(RestStatus.OK, equalTo(searchResponse.status())); - assertThat("_only_local", searchResponse.getFailedShards(), greaterThanOrEqualTo(0)); + assertResponse(dataNodeClient().prepareSearch().setSize(0).setPreference("_only_local"), response -> { + assertThat(RestStatus.OK, equalTo(response.status())); + assertThat("_only_local", response.getFailedShards(), greaterThanOrEqualTo(0)); + }); + assertResponse(dataNodeClient().prepareSearch().setPreference("_only_local"), response -> { + assertThat(RestStatus.OK, equalTo(response.status())); + assertThat("_only_local", response.getFailedShards(), greaterThanOrEqualTo(0)); + }); } public void testNoPreferenceRandom() { @@ -97,12 +101,16 @@ public void testNoPreferenceRandom() { refresh(); final Client client = internalCluster().smartClient(); - SearchResponse searchResponse = client.prepareSearch("test").setQuery(matchAllQuery()).get(); - String firstNodeId = searchResponse.getHits().getAt(0).getShard().getNodeId(); - searchResponse = client.prepareSearch("test").setQuery(matchAllQuery()).get(); - String secondNodeId = searchResponse.getHits().getAt(0).getShard().getNodeId(); - - assertThat(firstNodeId, not(equalTo(secondNodeId))); + assertResponse( + client.prepareSearch("test").setQuery(matchAllQuery()), + fist -> assertResponse( + client.prepareSearch("test").setQuery(matchAllQuery()), + second -> assertThat( + fist.getHits().getAt(0).getShard().getNodeId(), + not(equalTo(second.getHits().getAt(0).getShard().getNodeId())) + ) + ) + ); } public void testSimplePreference() { @@ -112,14 +120,20 @@ public void testSimplePreference() { client().prepareIndex("test").setSource("field1", "value1").get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertResponse( + prepareSearch().setQuery(matchAllQuery()), + response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + ); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setPreference("_local").get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).setPreference("_local"), + response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + ); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setPreference("1234").get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).setPreference("1234"), + response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + ); } public void testThatSpecifyingNonExistingNodesReturnsUsefulError() { @@ -188,9 +202,10 @@ public void testNodesOnlyRandom() { private void assertSearchOnRandomNodes(SearchRequestBuilder request) { Set hitNodes = new HashSet<>(); for (int i = 0; i < 2; i++) { - SearchResponse searchResponse = request.get(); - assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); - hitNodes.add(searchResponse.getHits().getAt(0).getShard().getNodeId()); + assertResponse(request, response -> { + assertThat(response.getHits().getHits().length, greaterThan(0)); + hitNodes.add(response.getHits().getAt(0).getShard().getNodeId()); + }); } assertThat(hitNodes.size(), greaterThan(1)); } @@ -259,8 +274,9 @@ public void testCustomPreferenceUnaffectedByOtherShardMovements() { } private static void assertSearchesSpecificNode(String index, String customPreference, String nodeId) { - final SearchResponse searchResponse = prepareSearch(index).setQuery(matchAllQuery()).setPreference(customPreference).get(); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getShard().getNodeId(), equalTo(nodeId)); + assertResponse(prepareSearch(index).setQuery(matchAllQuery()).setPreference(customPreference), response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getShard().getNodeId(), equalTo(nodeId)); + }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java index 35ea9614d182a..1362b0166a709 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.OperationRouting; @@ -23,6 +22,7 @@ import java.util.Set; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -49,18 +49,18 @@ public void testNodeSelection() { // Before we've gathered stats for all nodes, we should try each node once. Set nodeIds = new HashSet<>(); - SearchResponse searchResponse = client.prepareSearch().setQuery(matchAllQuery()).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - nodeIds.add(searchResponse.getHits().getAt(0).getShard().getNodeId()); - - searchResponse = client.prepareSearch().setQuery(matchAllQuery()).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - nodeIds.add(searchResponse.getHits().getAt(0).getShard().getNodeId()); - - searchResponse = client.prepareSearch().setQuery(matchAllQuery()).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - nodeIds.add(searchResponse.getHits().getAt(0).getShard().getNodeId()); - + assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); + }); + assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); + }); + assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); + }); assertEquals(3, nodeIds.size()); // Now after more searches, we should select a node with the lowest ARS rank. @@ -78,13 +78,14 @@ public void testNodeSelection() { assertNotNull(nodeStats); assertEquals(3, nodeStats.getAdaptiveSelectionStats().getComputedStats().size()); - searchResponse = client.prepareSearch().setQuery(matchAllQuery()).get(); - String selectedNodeId = searchResponse.getHits().getAt(0).getShard().getNodeId(); - double selectedRank = nodeStats.getAdaptiveSelectionStats().getRanks().get(selectedNodeId); + assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { + String selectedNodeId = response.getHits().getAt(0).getShard().getNodeId(); + double selectedRank = nodeStats.getAdaptiveSelectionStats().getRanks().get(selectedNodeId); - for (Map.Entry entry : nodeStats.getAdaptiveSelectionStats().getRanks().entrySet()) { - double rank = entry.getValue(); - assertThat(rank, greaterThanOrEqualTo(selectedRank)); - } + for (Map.Entry entry : nodeStats.getAdaptiveSelectionStats().getRanks().entrySet()) { + double rank = entry.getValue(); + assertThat(rank, greaterThanOrEqualTo(selectedRank)); + } + }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index dc460468db605..9e7ea2fed8aa2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.scriptfilter; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.fielddata.ScriptDocValues; @@ -37,6 +36,7 @@ import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -114,19 +114,20 @@ public void testCustomScriptBinaryField() throws Exception { flush(); refresh(); - SearchResponse response = prepareSearch().setQuery( - scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length > 15", emptyMap())) - ) - .addScriptField( - "sbinaryData", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length", emptyMap()) + assertResponse( + prepareSearch().setQuery( + scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length > 15", emptyMap())) ) - .get(); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getFields().get("sbinaryData").getValues().get(0), equalTo(16)); - + .addScriptField( + "sbinaryData", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length", emptyMap()) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + assertThat(response.getHits().getAt(0).getFields().get("sbinaryData").getValues().get(0), equalTo(16)); + } + ); } private byte[] getRandomBytes(int len) { @@ -168,51 +169,64 @@ public void testCustomScriptBoost() throws Exception { refresh(); logger.info("running doc['num1'].value > 1"); - SearchResponse response = prepareSearch().setQuery( - scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > 1", Collections.emptyMap())) - ) - .addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) - .get(); - - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); - assertThat(response.getHits().getAt(1).getId(), equalTo("3")); - assertThat(response.getHits().getAt(1).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); - + assertResponse( + prepareSearch().setQuery( + scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > 1", Collections.emptyMap())) + ) + .addSort("num1", SortOrder.ASC) + .addScriptField( + "sNum1", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); + assertThat(response.getHits().getAt(1).getId(), equalTo("3")); + assertThat(response.getHits().getAt(1).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); + } + ); Map params = new HashMap<>(); params.put("param1", 2); logger.info("running doc['num1'].value > param1"); - response = prepareSearch().setQuery( - scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > param1", params)) - ) - .addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) - .get(); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("3")); - assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); - + assertResponse( + prepareSearch().setQuery( + scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > param1", params)) + ) + .addSort("num1", SortOrder.ASC) + .addScriptField( + "sNum1", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("3")); + assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); + } + ); params = new HashMap<>(); params.put("param1", -1); logger.info("running doc['num1'].value > param1"); - response = prepareSearch().setQuery( - scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > param1", params)) - ) - .addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) - .get(); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(1.0)); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(1).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); - assertThat(response.getHits().getAt(2).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); + assertResponse( + prepareSearch().setQuery( + scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > param1", params)) + ) + .addSort("num1", SortOrder.ASC) + .addScriptField( + "sNum1", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(1.0)); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(1).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + assertThat(response.getHits().getAt(2).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); + } + ); } public void testDisallowExpensiveQueries() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index 61490cac43e45..4189482a73f33 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -47,7 +46,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -210,12 +210,12 @@ public void testSimpleDateRange() throws Exception { assertHitCountAndNoFailures(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt("1000")), 3L); // a numeric value of 1000 should be parsed as 1000 millis since epoch and return only docs after 1970 - SearchResponse searchResponse = prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt(1000)).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 2L); - String[] expectedIds = new String[] { "1", "2" }; - assertThat(searchResponse.getHits().getHits()[0].getId(), is(oneOf(expectedIds))); - assertThat(searchResponse.getHits().getHits()[1].getId(), is(oneOf(expectedIds))); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt(1000)), response -> { + assertHitCount(response, 2L); + String[] expectedIds = new String[] { "1", "2" }; + assertThat(response.getHits().getHits()[0].getId(), is(oneOf(expectedIds))); + assertThat(response.getHits().getHits()[1].getId(), is(oneOf(expectedIds))); + }); } public void testRangeQueryKeyword() throws Exception { @@ -255,17 +255,23 @@ public void testSimpleTerminateAfterCount() throws Exception { ensureGreen(); refresh(); - SearchResponse searchResponse; for (int i = 1; i < max; i++) { - searchResponse = prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte(1).lte(max)).setTerminateAfter(i).get(); - assertHitCount(searchResponse, i); - assertTrue(searchResponse.isTerminatedEarly()); + final int finalI = i; + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte(1).lte(max)).setTerminateAfter(i), + response -> { + assertHitCount(response, finalI); + assertTrue(response.isTerminatedEarly()); + } + ); } - - searchResponse = prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte(1).lte(max)).setTerminateAfter(2 * max).get(); - - assertHitCount(searchResponse, max); - assertFalse(searchResponse.isTerminatedEarly()); + assertResponse( + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte(1).lte(max)).setTerminateAfter(2 * max), + response -> { + assertHitCount(response, max); + assertFalse(response.isTerminatedEarly()); + } + ); } public void testSimpleIndexSortEarlyTerminate() throws Exception { @@ -283,17 +289,17 @@ public void testSimpleIndexSortEarlyTerminate() throws Exception { ensureGreen(); refresh(); - SearchResponse searchResponse; for (int i = 1; i < max; i++) { - searchResponse = prepareSearch("test").addDocValueField("rank") - .setTrackTotalHits(false) - .addSort("rank", SortOrder.ASC) - .setSize(i) - .get(); - assertNull(searchResponse.getHits().getTotalHits()); - for (int j = 0; j < i; j++) { - assertThat(searchResponse.getHits().getAt(j).field("rank").getValue(), equalTo((long) j)); - } + final int finalI = i; + assertResponse( + prepareSearch("test").addDocValueField("rank").setTrackTotalHits(false).addSort("rank", SortOrder.ASC).setSize(i), + response -> { + assertNull(response.getHits().getTotalHits()); + for (int j = 0; j < finalI; j++) { + assertThat(response.getHits().getAt(j).field("rank").getValue(), equalTo((long) j)); + } + } + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index 2926d36becb4a..f5dd2182e0551 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -63,6 +63,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -120,30 +122,32 @@ public void testIssue8226() { } refresh(); // sort DESC - SearchResponse searchResponse = prepareSearch().addSort( - new FieldSortBuilder("entry").order(SortOrder.DESC).unmappedType(useMapping ? null : "long") - ).setSize(10).get(); - logClusterState(); - assertNoFailures(searchResponse); - - for (int j = 1; j < searchResponse.getHits().getHits().length; j++) { - Number current = (Number) searchResponse.getHits().getHits()[j].getSourceAsMap().get("entry"); - Number previous = (Number) searchResponse.getHits().getHits()[j - 1].getSourceAsMap().get("entry"); - assertThat(searchResponse.toString(), current.intValue(), lessThan(previous.intValue())); - } + assertNoFailuresAndResponse( + prepareSearch().addSort(new FieldSortBuilder("entry").order(SortOrder.DESC).unmappedType(useMapping ? null : "long")) + .setSize(10), + response -> { + logClusterState(); + for (int j = 1; j < response.getHits().getHits().length; j++) { + Number current = (Number) response.getHits().getHits()[j].getSourceAsMap().get("entry"); + Number previous = (Number) response.getHits().getHits()[j - 1].getSourceAsMap().get("entry"); + assertThat(response.toString(), current.intValue(), lessThan(previous.intValue())); + } + } + ); // sort ASC - searchResponse = prepareSearch().addSort( - new FieldSortBuilder("entry").order(SortOrder.ASC).unmappedType(useMapping ? null : "long") - ).setSize(10).get(); - logClusterState(); - assertNoFailures(searchResponse); - - for (int j = 1; j < searchResponse.getHits().getHits().length; j++) { - Number current = (Number) searchResponse.getHits().getHits()[j].getSourceAsMap().get("entry"); - Number previous = (Number) searchResponse.getHits().getHits()[j - 1].getSourceAsMap().get("entry"); - assertThat(searchResponse.toString(), current.intValue(), greaterThan(previous.intValue())); - } + assertNoFailuresAndResponse( + prepareSearch().addSort(new FieldSortBuilder("entry").order(SortOrder.ASC).unmappedType(useMapping ? null : "long")) + .setSize(10), + response -> { + logClusterState(); + for (int j = 1; j < response.getHits().getHits().length; j++) { + Number current = (Number) response.getHits().getHits()[j].getSourceAsMap().get("entry"); + Number previous = (Number) response.getHits().getHits()[j - 1].getSourceAsMap().get("entry"); + assertThat(response.toString(), current.intValue(), greaterThan(previous.intValue())); + } + } + ); } public void testIssue6614() throws ExecutionException, InterruptedException { @@ -172,33 +176,40 @@ public void testIssue6614() throws ExecutionException, InterruptedException { docs += builders.size(); builders.clear(); } - SearchResponse allDocsResponse = prepareSearch().setQuery( - QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery("foo", "bar")) - .must(QueryBuilders.rangeQuery("timeUpdated").gte("2014/0" + randomIntBetween(1, 7) + "/01")) - ).addSort(new FieldSortBuilder("timeUpdated").order(SortOrder.ASC).unmappedType("date")).setSize(docs).get(); - assertNoFailures(allDocsResponse); - - final int numiters = randomIntBetween(1, 20); - for (int i = 0; i < numiters; i++) { - SearchResponse searchResponse = prepareSearch().setQuery( + final int finalDocs = docs; + assertNoFailuresAndResponse( + prepareSearch().setQuery( QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("foo", "bar")) - .must(QueryBuilders.rangeQuery("timeUpdated").gte("2014/" + Strings.format("%02d", randomIntBetween(1, 7)) + "/01")) - ) - .addSort(new FieldSortBuilder("timeUpdated").order(SortOrder.ASC).unmappedType("date")) - .setSize(scaledRandomIntBetween(1, docs)) - .get(); - assertNoFailures(searchResponse); - for (int j = 0; j < searchResponse.getHits().getHits().length; j++) { - assertThat( - searchResponse.toString() + "\n vs. \n" + allDocsResponse.toString(), - searchResponse.getHits().getHits()[j].getId(), - equalTo(allDocsResponse.getHits().getHits()[j].getId()) - ); + .must(QueryBuilders.rangeQuery("timeUpdated").gte("2014/0" + randomIntBetween(1, 7) + "/01")) + ).addSort(new FieldSortBuilder("timeUpdated").order(SortOrder.ASC).unmappedType("date")).setSize(docs), + allDocsResponse -> { + final int numiters = randomIntBetween(1, 20); + for (int i = 0; i < numiters; i++) { + assertNoFailuresAndResponse( + prepareSearch().setQuery( + QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("foo", "bar")) + .must( + QueryBuilders.rangeQuery("timeUpdated") + .gte("2014/" + Strings.format("%02d", randomIntBetween(1, 7)) + "/01") + ) + ) + .addSort(new FieldSortBuilder("timeUpdated").order(SortOrder.ASC).unmappedType("date")) + .setSize(scaledRandomIntBetween(1, finalDocs)), + response -> { + for (int j = 0; j < response.getHits().getHits().length; j++) { + assertThat( + response.toString() + "\n vs. \n" + allDocsResponse.toString(), + response.getHits().getHits()[j].getId(), + equalTo(allDocsResponse.getHits().getHits()[j].getId()) + ); + } + } + ); + } } - } - + ); } public void testTrackScores() throws Exception { @@ -214,20 +225,19 @@ public void testTrackScores() throws Exception { ); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).addSort("svalue", SortOrder.ASC).get(); - - assertThat(searchResponse.getHits().getMaxScore(), equalTo(Float.NaN)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getScore(), equalTo(Float.NaN)); - } - + assertResponse(prepareSearch().setQuery(matchAllQuery()).addSort("svalue", SortOrder.ASC), response -> { + assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); + for (SearchHit hit : response.getHits()) { + assertThat(hit.getScore(), equalTo(Float.NaN)); + } + }); // now check with score tracking - searchResponse = prepareSearch().setQuery(matchAllQuery()).addSort("svalue", SortOrder.ASC).setTrackScores(true).get(); - - assertThat(searchResponse.getHits().getMaxScore(), not(equalTo(Float.NaN))); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getScore(), not(equalTo(Float.NaN))); - } + assertResponse(prepareSearch().setQuery(matchAllQuery()).addSort("svalue", SortOrder.ASC).setTrackScores(true), response -> { + assertThat(response.getHits().getMaxScore(), not(equalTo(Float.NaN))); + for (SearchHit hit : response.getHits()) { + assertThat(hit.getScore(), not(equalTo(Float.NaN))); + } + }); } public void testRandomSorting() throws IOException, InterruptedException, ExecutionException { @@ -273,40 +283,42 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut indexRandom(true, builders); { int size = between(1, denseBytes.size()); - SearchResponse searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .setSize(size) - .addSort("dense_bytes", SortOrder.ASC) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs)); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - Set> entrySet = denseBytes.entrySet(); - Iterator> iterator = entrySet.iterator(); - for (int i = 0; i < size; i++) { - assertThat(iterator.hasNext(), equalTo(true)); - Entry next = iterator.next(); - assertThat("pos: " + i, searchResponse.getHits().getAt(i).getId(), equalTo(next.getValue())); - assertThat(searchResponse.getHits().getAt(i).getSortValues()[0].toString(), equalTo(next.getKey().utf8ToString())); - } + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(matchAllQuery()).setSize(size).addSort("dense_bytes", SortOrder.ASC), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getHits().length, equalTo(size)); + Set> entrySet = denseBytes.entrySet(); + Iterator> iterator = entrySet.iterator(); + for (int i = 0; i < size; i++) { + assertThat(iterator.hasNext(), equalTo(true)); + Entry next = iterator.next(); + assertThat("pos: " + i, response.getHits().getAt(i).getId(), equalTo(next.getValue())); + assertThat(response.getHits().getAt(i).getSortValues()[0].toString(), equalTo(next.getKey().utf8ToString())); + } + } + ); } if (sparseBytes.isEmpty() == false) { int size = between(1, sparseBytes.size()); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.existsQuery("sparse_bytes")) - .setSize(size) - .addSort("sparse_bytes", SortOrder.ASC) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) sparseBytes.size())); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - Set> entrySet = sparseBytes.entrySet(); - Iterator> iterator = entrySet.iterator(); - for (int i = 0; i < size; i++) { - assertThat(iterator.hasNext(), equalTo(true)); - Entry next = iterator.next(); - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(next.getValue())); - assertThat(searchResponse.getHits().getAt(i).getSortValues()[0].toString(), equalTo(next.getKey().utf8ToString())); - } + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.existsQuery("sparse_bytes")) + .setSize(size) + .addSort("sparse_bytes", SortOrder.ASC), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo((long) sparseBytes.size())); + assertThat(response.getHits().getHits().length, equalTo(size)); + Set> entrySet = sparseBytes.entrySet(); + Iterator> iterator = entrySet.iterator(); + for (int i = 0; i < size; i++) { + assertThat(iterator.hasNext(), equalTo(true)); + Entry next = iterator.next(); + assertThat(response.getHits().getAt(i).getId(), equalTo(next.getValue())); + assertThat(response.getHits().getAt(i).getSortValues()[0].toString(), equalTo(next.getKey().utf8ToString())); + } + } + ); } } @@ -318,53 +330,59 @@ public void test3078() { client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", Integer.toString(i)).get(); } refresh(); - SearchResponse searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) - .get(); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(searchResponse.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), + response -> { + assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); + assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); + } + ); // reindex and refresh client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); refresh(); - searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) - .get(); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(searchResponse.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), + response -> { + assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); + assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); + } + ); // reindex - no refresh client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); - searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) - .get(); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(searchResponse.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), + response -> { + assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); + assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); + } + ); // force merge forceMerge(); refresh(); client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); - searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) - .get(); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(searchResponse.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), + response -> { + assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); + assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); + } + ); refresh(); - searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) - .get(); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(searchResponse.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), + response -> { + assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); + assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); + } + ); } public void testScoreSortDirection() throws Exception { @@ -377,30 +395,40 @@ public void testScoreSortDirection() throws Exception { refresh(); - SearchResponse searchResponse = prepareSearch("test").setQuery( - QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) - ).get(); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(0).getScore())); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(2).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(1).getScore())); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) - ).addSort("_score", SortOrder.DESC).get(); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(0).getScore())); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(2).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(1).getScore())); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - - searchResponse = prepareSearch("test").setQuery( - QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) - ).addSort("_score", SortOrder.DESC).get(); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) + ), + response -> { + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) + ).addSort("_score", SortOrder.DESC), + response -> { + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + } + ); + assertResponse( + prepareSearch("test").setQuery( + QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) + ).addSort("_score", SortOrder.DESC), + response -> { + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); } public void testScoreSortDirectionWithFunctionScore() throws Exception { @@ -413,30 +441,33 @@ public void testScoreSortDirectionWithFunctionScore() throws Exception { refresh(); - SearchResponse searchResponse = prepareSearch("test").setQuery( - functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field")) - ).get(); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(0).getScore())); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(2).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(1).getScore())); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - - searchResponse = prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field"))) - .addSort("_score", SortOrder.DESC) - .get(); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(0).getScore())); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(2).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(1).getScore())); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - - searchResponse = prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field"))) - .addSort("_score", SortOrder.DESC) - .get(); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); + assertResponse(prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field"))), response -> { + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + }); + assertResponse( + prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field"))) + .addSort("_score", SortOrder.DESC), + response -> { + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + } + ); + assertResponse( + prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field"))) + .addSort("_score", SortOrder.DESC), + response -> { + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); } public void testIssue2986() { @@ -446,11 +477,11 @@ public void testIssue2986() { client().prepareIndex("test").setId("2").setSource("{\"field1\":\"value2\"}", XContentType.JSON).get(); client().prepareIndex("test").setId("3").setSource("{\"field1\":\"value3\"}", XContentType.JSON).get(); refresh(); - SearchResponse result = prepareSearch("test").setQuery(matchAllQuery()).setTrackScores(true).addSort("field1", SortOrder.ASC).get(); - - for (SearchHit hit : result.getHits()) { - assertFalse(Float.isNaN(hit.getScore())); - } + assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setTrackScores(true).addSort("field1", SortOrder.ASC), response -> { + for (SearchHit hit : response.getHits()) { + assertFalse(Float.isNaN(hit.getScore())); + } + }); } public void testIssue2991() { @@ -474,23 +505,24 @@ public void testIssue2991() { client().prepareIndex("test").setId("2").setSource("tag", "beta").get(); refresh(); - SearchResponse resp = prepareSearch("test").setSize(2) - .setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("tag").order(SortOrder.ASC)) - .get(); - assertHitCount(resp, 4); - assertThat(resp.getHits().getHits().length, equalTo(2)); - assertFirstHit(resp, hasId("1")); - assertSecondHit(resp, hasId("2")); - - resp = prepareSearch("test").setSize(2) - .setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("tag").order(SortOrder.DESC)) - .get(); - assertHitCount(resp, 4); - assertThat(resp.getHits().getHits().length, equalTo(2)); - assertFirstHit(resp, hasId("3")); - assertSecondHit(resp, hasId("4")); + assertResponse( + prepareSearch("test").setSize(2).setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("tag").order(SortOrder.ASC)), + response -> { + assertHitCount(response, 4); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("2")); + } + ); + assertResponse( + prepareSearch("test").setSize(2).setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("tag").order(SortOrder.DESC)), + response -> { + assertHitCount(response, 4); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertFirstHit(response, hasId("3")); + assertSecondHit(response, hasId("4")); + } + ); } } @@ -565,172 +597,190 @@ public void testSimpleSorts() throws Exception { refresh(); // STRING - int size = 1 + random.nextInt(10); - - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.ASC).get(); - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); - assertThat( - searchResponse.getHits().getAt(i).getSortValues()[0].toString(), - equalTo(new String(new char[] { (char) (97 + i), (char) (97 + i) })) - ); + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.ASC), response -> { + assertHitCount(response, 10); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + assertThat( + response.getHits().getAt(i).getSortValues()[0].toString(), + equalTo(new String(new char[] { (char) (97 + i), (char) (97 + i) })) + ); + } + }); } - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC).get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); - assertThat( - searchResponse.getHits().getAt(i).getSortValues()[0].toString(), - equalTo(new String(new char[] { (char) (97 + (9 - i)), (char) (97 + (9 - i)) })) - ); + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC), response -> { + assertHitCount(response, 10); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); + assertThat( + response.getHits().getAt(i).getSortValues()[0].toString(), + equalTo(new String(new char[] { (char) (97 + (9 - i)), (char) (97 + (9 - i)) })) + ); + } + assertThat(response.toString(), not(containsString("error"))); + }); } - - assertThat(searchResponse.toString(), not(containsString("error"))); - // BYTE - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.ASC).get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).byteValue(), equalTo((byte) i)); + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.ASC), response -> { + assertHitCount(response, 10); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).byteValue(), equalTo((byte) i)); + } + }); } - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.DESC).get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).byteValue(), equalTo((byte) (9 - i))); + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.DESC), response -> { + assertHitCount(response, 10); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).byteValue(), equalTo((byte) (9 - i))); + } + assertThat(response.toString(), not(containsString("error"))); + }); } - - assertThat(searchResponse.toString(), not(containsString("error"))); - // SHORT - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.ASC).get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).shortValue(), equalTo((short) i)); + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.ASC), response -> { + assertHitCount(response, 10); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).shortValue(), equalTo((short) i)); + } + }); } - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.DESC).get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).shortValue(), equalTo((short) (9 - i))); + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.DESC), response -> { + assertHitCount(response, 10); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).shortValue(), equalTo((short) (9 - i))); + } + assertThat(response.toString(), not(containsString("error"))); + }); } - - assertThat(searchResponse.toString(), not(containsString("error"))); - // INTEGER - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.ASC).get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).intValue(), equalTo(i)); + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.ASC), response -> { + assertHitCount(response, 10); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).intValue(), equalTo(i)); + } + assertThat(response.toString(), not(containsString("error"))); + }); } + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.DESC), response -> { + assertHitCount(response, 10); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).intValue(), equalTo((9 - i))); + } - assertThat(searchResponse.toString(), not(containsString("error"))); - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.DESC).get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).intValue(), equalTo((9 - i))); + assertThat(response.toString(), not(containsString("error"))); + }); } - - assertThat(searchResponse.toString(), not(containsString("error"))); - // LONG - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.ASC).get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).longValue(), equalTo((long) i)); - } + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.ASC), response -> { + assertHitCount(response, 10); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).longValue(), equalTo((long) i)); + } - assertThat(searchResponse.toString(), not(containsString("error"))); - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.DESC).get(); - assertHitCount(searchResponse, 10L); - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).longValue(), equalTo((long) (9 - i))); + assertThat(response.toString(), not(containsString("error"))); + }); + } + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.DESC), response -> { + assertHitCount(response, 10L); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).longValue(), equalTo((long) (9 - i))); + } + assertThat(response.toString(), not(containsString("error"))); + }); } - - assertThat(searchResponse.toString(), not(containsString("error"))); - // FLOAT - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.ASC).get(); - - assertHitCount(searchResponse, 10L); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).doubleValue(), closeTo(0.1d * i, 0.000001d)); + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.ASC), response -> { + assertHitCount(response, 10L); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).doubleValue(), closeTo(0.1d * i, 0.000001d)); + } + assertThat(response.toString(), not(containsString("error"))); + }); } - - assertThat(searchResponse.toString(), not(containsString("error"))); - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.DESC).get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).doubleValue(), closeTo(0.1d * (9 - i), 0.000001d)); + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.DESC), response -> { + assertHitCount(response, 10); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).doubleValue(), closeTo(0.1d * (9 - i), 0.000001d)); + } + assertThat(response.toString(), not(containsString("error"))); + }); } - - assertThat(searchResponse.toString(), not(containsString("error"))); - // DOUBLE - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.ASC).get(); - - assertHitCount(searchResponse, 10L); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).doubleValue(), closeTo(0.1d * i, 0.000001d)); + { + int size = 1 + random.nextInt(10); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.ASC), response -> { + assertHitCount(response, 10L); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + assertThat(((Number) response.getHits().getAt(i).getSortValues()[0]).doubleValue(), closeTo(0.1d * i, 0.000001d)); + } + assertThat(response.toString(), not(containsString("error"))); + }); } - - assertThat(searchResponse.toString(), not(containsString("error"))); - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.DESC).get(); - - assertHitCount(searchResponse, 10L); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); - assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).doubleValue(), closeTo(0.1d * (9 - i), 0.000001d)); + { + int size = 1 + random.nextInt(10); + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.DESC), + response -> { + assertHitCount(response, 10L); + assertThat(response.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(9 - i))); + assertThat( + ((Number) response.getHits().getAt(i).getSortValues()[0]).doubleValue(), + closeTo(0.1d * (9 - i), 0.000001d) + ); + } + } + ); } - - assertNoFailures(searchResponse); } public void testSortMissingNumbers() throws Exception { @@ -768,37 +818,35 @@ public void testSortMissingNumbers() throws Exception { refresh(); logger.info("--> sort with no missing (same as missing _last)"); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC)) - .get(); - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("2")); - + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("3")); + assertThat(response.getHits().getAt(2).getId(), equalTo("2")); + } + ); logger.info("--> sort with missing _last"); - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_last")) - .get(); - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("2")); - + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_last")), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("3")); + assertThat(response.getHits().getAt(2).getId(), equalTo("2")); + } + ); logger.info("--> sort with missing _first"); - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_first")) - .get(); - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_first")), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + assertThat(response.getHits().getAt(1).getId(), equalTo("1")); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + } + ); } public void testSortMissingStrings() throws IOException { @@ -840,48 +888,53 @@ public void testSortMissingStrings() throws IOException { } logger.info("--> sort with no missing (same as missing _last)"); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)) - .get(); - assertThat(Arrays.toString(searchResponse.getShardFailures()), searchResponse.getFailedShards(), equalTo(0)); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("2")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)), + response -> { + assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); + + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("3")); + assertThat(response.getHits().getAt(2).getId(), equalTo("2")); + } + ); logger.info("--> sort with missing _last"); - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_last")) - .get(); - assertThat(Arrays.toString(searchResponse.getShardFailures()), searchResponse.getFailedShards(), equalTo(0)); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("2")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_last")), + response -> { + assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); + + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("3")); + assertThat(response.getHits().getAt(2).getId(), equalTo("2")); + } + ); logger.info("--> sort with missing _first"); - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_first")) - .get(); - assertThat(Arrays.toString(searchResponse.getShardFailures()), searchResponse.getFailedShards(), equalTo(0)); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - + assertResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_first")), + response -> { + assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); + + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + assertThat(response.getHits().getAt(1).getId(), equalTo("1")); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + } + ); logger.info("--> sort with missing b"); - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("b")) - .get(); - assertThat(Arrays.toString(searchResponse.getShardFailures()), searchResponse.getFailedShards(), equalTo(0)); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("b")), + response -> { + assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); + + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + } + ); } public void testSortMissingDates() throws IOException { @@ -914,24 +967,27 @@ public void testSortMissingDates() throws IOException { format = type.equals("date") ? "strict_date_optional_time" : "strict_date_optional_time_nanos"; } - SearchResponse searchResponse = prepareSearch(index).addSort( - SortBuilders.fieldSort("mydate").order(SortOrder.ASC).setFormat(format) - ).get(); - assertHitsInOrder(searchResponse, new String[] { "1", "2", "3" }); + assertResponse( + prepareSearch(index).addSort(SortBuilders.fieldSort("mydate").order(SortOrder.ASC).setFormat(format)), + response -> assertHitsInOrder(response, new String[] { "1", "2", "3" }) + ); - searchResponse = prepareSearch(index).addSort( - SortBuilders.fieldSort("mydate").order(SortOrder.ASC).missing("_first").setFormat(format) - ).get(); - assertHitsInOrder(searchResponse, new String[] { "3", "1", "2" }); + assertResponse( + prepareSearch(index).addSort(SortBuilders.fieldSort("mydate").order(SortOrder.ASC).missing("_first").setFormat(format)), + response -> assertHitsInOrder(response, new String[] { "3", "1", "2" }) + ); - searchResponse = prepareSearch(index).addSort(SortBuilders.fieldSort("mydate").order(SortOrder.DESC).setFormat(format)) - .get(); - assertHitsInOrder(searchResponse, new String[] { "2", "1", "3" }); + assertResponse( + prepareSearch(index).addSort(SortBuilders.fieldSort("mydate").order(SortOrder.DESC).setFormat(format)), + response -> assertHitsInOrder(response, new String[] { "2", "1", "3" }) + ); - searchResponse = prepareSearch(index).addSort( - SortBuilders.fieldSort("mydate").order(SortOrder.DESC).missing("_first").setFormat(format) - ).get(); - assertHitsInOrder(searchResponse, new String[] { "3", "2", "1" }); + assertResponse( + prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.DESC).missing("_first").setFormat(format) + ), + response -> assertHitsInOrder(response, new String[] { "3", "2", "1" }) + ); } } } @@ -975,25 +1031,33 @@ public void testSortMissingDatesMixedTypes() throws IOException { } String index = "test*"; - SearchResponse searchResponse = prepareSearch(index).addSort( - SortBuilders.fieldSort("mydate").order(SortOrder.ASC).setFormat(format).setNumericType("date_nanos") - ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)).get(); - assertHitsInOrder(searchResponse, new String[] { "1", "2", "4", "5", "3", "6" }); - - searchResponse = prepareSearch(index).addSort( - SortBuilders.fieldSort("mydate").order(SortOrder.ASC).missing("_first").setFormat(format).setNumericType("date_nanos") - ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)).get(); - assertHitsInOrder(searchResponse, new String[] { "3", "6", "1", "2", "4", "5" }); - - searchResponse = prepareSearch(index).addSort( - SortBuilders.fieldSort("mydate").order(SortOrder.DESC).setFormat(format).setNumericType("date_nanos") - ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)).get(); - assertHitsInOrder(searchResponse, new String[] { "5", "4", "2", "1", "3", "6" }); - - searchResponse = prepareSearch(index).addSort( - SortBuilders.fieldSort("mydate").order(SortOrder.DESC).missing("_first").setFormat(format).setNumericType("date_nanos") - ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)).get(); - assertHitsInOrder(searchResponse, new String[] { "3", "6", "5", "4", "2", "1" }); + assertResponse( + prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.ASC).setFormat(format).setNumericType("date_nanos") + ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)), + response -> assertHitsInOrder(response, new String[] { "1", "2", "4", "5", "3", "6" }) + ); + + assertResponse( + prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.ASC).missing("_first").setFormat(format).setNumericType("date_nanos") + ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)), + response -> assertHitsInOrder(response, new String[] { "3", "6", "1", "2", "4", "5" }) + ); + + assertResponse( + prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.DESC).setFormat(format).setNumericType("date_nanos") + ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)), + response -> assertHitsInOrder(response, new String[] { "5", "4", "2", "1", "3", "6" }) + ); + + assertResponse( + prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.DESC).missing("_first").setFormat(format).setNumericType("date_nanos") + ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)), + response -> assertHitsInOrder(response, new String[] { "3", "6", "5", "4", "2", "1" }) + ); } } @@ -1017,8 +1081,10 @@ public void testIgnoreUnmapped() throws Exception { logger.info("--> sort with an unmapped field, verify it fails"); try { - SearchResponse result = prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("kkk")).get(); - assertThat("Expected exception but returned with", result, nullValue()); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("kkk")), + response -> assertThat("Expected exception but returned with", response, nullValue()) + ); } catch (SearchPhaseExecutionException e) { // we check that it's a parse failure rather than a different shard failure for (ShardSearchFailure shardSearchFailure : e.shardFailures()) { @@ -1129,252 +1195,243 @@ public void testSortMVField() throws Exception { refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.ASC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).longValue(), equalTo(-4L)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).longValue(), equalTo(1L)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(7L)); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.ASC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.DESC).get(); + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).longValue(), equalTo(-4L)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).longValue(), equalTo(20L)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).longValue(), equalTo(10L)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(3L)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(10) - .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.SUM)) - .get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).longValue(), equalTo(53L)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).longValue(), equalTo(24L)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(2L)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(10) - .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.AVG)) - .get(); + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).longValue(), equalTo(1L)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(7L)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.DESC), response -> { - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).longValue(), equalTo(13L)); + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).longValue(), equalTo(6L)); + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).longValue(), equalTo(20L)); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(1L)); + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).longValue(), equalTo(10L)); - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(10) - .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.MEDIAN)) - .get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).longValue(), equalTo(13L)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).longValue(), equalTo(7L)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(2L)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).floatValue(), equalTo(-4f)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).floatValue(), equalTo(1f)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(7f)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.DESC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).floatValue(), equalTo(20f)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).floatValue(), equalTo(10f)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(3f)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.ASC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), equalTo(-4d)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(1d)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(7d)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.DESC).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), equalTo(20d)); - - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(10d)); - - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(3d)); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.ASC).get(); + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(3L)); + }); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .setSize(10) + .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.SUM)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).longValue(), equalTo(53L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo("!4")); + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).longValue(), equalTo(24L)); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo("01")); + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(2L)); + } + ); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .setSize(10) + .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.AVG)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(searchResponse.getHits().getAt(2).getSortValues()[0], equalTo("07")); + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).longValue(), equalTo(13L)); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.DESC).get(); + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).longValue(), equalTo(6L)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(1L)); + } + ); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .setSize(10) + .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.MEDIAN)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo("20")); + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).longValue(), equalTo(13L)); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo("10")); + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).longValue(), equalTo(7L)); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(searchResponse.getHits().getAt(2).getSortValues()[0], equalTo("03")); + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(2L)); + } + ); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).floatValue(), equalTo(-4f)); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).floatValue(), equalTo(1f)); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(7f)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.DESC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).floatValue(), equalTo(20f)); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).floatValue(), equalTo(10f)); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(3f)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.ASC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), equalTo(-4d)); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(1d)); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(7d)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.DESC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), equalTo(20d)); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(10d)); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(3d)); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.ASC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); + assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo("!4")); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(response.getHits().getAt(1).getSortValues()[0], equalTo("01")); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); + assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("07")); + }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.DESC), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits().length, equalTo(3)); + + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo("20")); + + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(response.getHits().getAt(1).getSortValues()[0], equalTo("10")); + + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("03")); + }); } public void testSortOnRareField() throws IOException { @@ -1399,13 +1456,12 @@ public void testSortOnRareField() throws IOException { .get(); refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC).get(); - - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(1))); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo("10")); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(1))); + assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo("10")); + }); client().prepareIndex("test") .setId(Integer.toString(2)) .setSource(jsonBuilder().startObject().array("string_values", "11", "15", "20", "07").endObject()) @@ -1418,16 +1474,16 @@ public void testSortOnRareField() throws IOException { } refresh(); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(2).addSort("string_values", SortOrder.DESC).get(); - - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(2).addSort("string_values", SortOrder.DESC), response -> { - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo("20")); + assertThat(response.getHits().getHits().length, equalTo(2)); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo("10")); + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo("20")); + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(response.getHits().getAt(1).getSortValues()[0], equalTo("10")); + }); client().prepareIndex("test") .setId(Integer.toString(3)) .setSource(jsonBuilder().startObject().array("string_values", "02", "01", "03", "!4").endObject()) @@ -1440,19 +1496,19 @@ public void testSortOnRareField() throws IOException { } refresh(); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC).get(); - - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), response -> { - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo("20")); + assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo("10")); + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo("20")); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(searchResponse.getHits().getAt(2).getSortValues()[0], equalTo("03")); + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(response.getHits().getAt(1).getSortValues()[0], equalTo("10")); + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("03")); + }); for (int i = 0; i < 15; i++) { client().prepareIndex("test") .setId(Integer.toString(300 + i)) @@ -1461,18 +1517,19 @@ public void testSortOnRareField() throws IOException { refresh(); } - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC).get(); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), response -> { - assertThat(searchResponse.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo("20")); + assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); + assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo("20")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo("10")); + assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); + assertThat(response.getHits().getAt(1).getSortValues()[0], equalTo("10")); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(searchResponse.getHits().getAt(2).getSortValues()[0], equalTo("03")); + assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); + assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("03")); + }); } public void testSortMetaField() throws Exception { @@ -1488,20 +1545,20 @@ public void testSortMetaField() throws Exception { indexRandom(true, indexReqs); SortOrder order = randomFrom(SortOrder.values()); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(randomIntBetween(1, numDocs + 5)) - .addSort("_id", order) - .get(); - assertNoFailures(searchResponse); - SearchHit[] hits = searchResponse.getHits().getHits(); - BytesRef previous = order == SortOrder.ASC ? new BytesRef() : UnicodeUtil.BIG_TERM; - for (int i = 0; i < hits.length; ++i) { - String idString = hits[i].getId(); - final BytesRef id = new BytesRef(idString); - assertEquals(idString, hits[i].getSortValues()[0]); - assertThat(previous, order == SortOrder.ASC ? lessThan(id) : greaterThan(id)); - previous = id; - } + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(randomIntBetween(1, numDocs + 5)).addSort("_id", order), + response -> { + SearchHit[] hits = response.getHits().getHits(); + BytesRef previous = order == SortOrder.ASC ? new BytesRef() : UnicodeUtil.BIG_TERM; + for (int i = 0; i < hits.length; ++i) { + String idString = hits[i].getId(); + final BytesRef id = new BytesRef(idString); + assertEquals(idString, hits[i].getSortValues()[0]); + assertThat(previous, order == SortOrder.ASC ? lessThan(id) : greaterThan(id)); + previous = id; + } + } + ); // assertWarnings(ID_FIELD_DATA_DEPRECATION_MESSAGE); } finally { // unset cluster setting @@ -1588,59 +1645,64 @@ public void testNestedSort() throws IOException, InterruptedException, Execution // We sort on nested field - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("nested.foo").setNestedSort(new NestedSortBuilder("nested")).order(SortOrder.DESC)) - .get(); - assertNoFailures(searchResponse); - SearchHit[] hits = searchResponse.getHits().getHits(); - assertThat(hits.length, is(2)); - assertThat(hits[0].getSortValues().length, is(1)); - assertThat(hits[1].getSortValues().length, is(1)); - assertThat(hits[0].getSortValues()[0], is("cba")); - assertThat(hits[1].getSortValues()[0], is("bar")); - + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(SortBuilders.fieldSort("nested.foo").setNestedSort(new NestedSortBuilder("nested")).order(SortOrder.DESC)), + response -> { + SearchHit[] hits = response.getHits().getHits(); + assertThat(hits.length, is(2)); + assertThat(hits[0].getSortValues().length, is(1)); + assertThat(hits[1].getSortValues().length, is(1)); + assertThat(hits[0].getSortValues()[0], is("cba")); + assertThat(hits[1].getSortValues()[0], is("bar")); + } + ); // We sort on nested fields with max_children limit - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("nested.foo").setNestedSort(new NestedSortBuilder("nested").setMaxChildren(1)).order(SortOrder.DESC) - ) - .get(); - assertNoFailures(searchResponse); - hits = searchResponse.getHits().getHits(); - assertThat(hits.length, is(2)); - assertThat(hits[0].getSortValues().length, is(1)); - assertThat(hits[1].getSortValues().length, is(1)); - assertThat(hits[0].getSortValues()[0], is("bar")); - assertThat(hits[1].getSortValues()[0], is("abc")); - - { - SearchPhaseExecutionException exc = expectThrows( - SearchPhaseExecutionException.class, - () -> prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("nested.bar.foo") - .setNestedSort( - new NestedSortBuilder("nested").setNestedSort(new NestedSortBuilder("nested.bar").setMaxChildren(1)) + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("nested.foo") + .setNestedSort(new NestedSortBuilder("nested").setMaxChildren(1)) + .order(SortOrder.DESC) + ), + response -> { + SearchHit[] hits = response.getHits().getHits(); + assertThat(hits.length, is(2)); + assertThat(hits[0].getSortValues().length, is(1)); + assertThat(hits[1].getSortValues().length, is(1)); + assertThat(hits[0].getSortValues()[0], is("bar")); + assertThat(hits[1].getSortValues()[0], is("abc")); + + { + SearchPhaseExecutionException exc = expectThrows( + SearchPhaseExecutionException.class, + () -> prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("nested.bar.foo") + .setNestedSort( + new NestedSortBuilder("nested").setNestedSort(new NestedSortBuilder("nested.bar").setMaxChildren(1)) + ) + .order(SortOrder.DESC) ) - .order(SortOrder.DESC) - ) - .get() - ); - assertThat(exc.toString(), containsString("max_children is only supported on top level of nested sort")); - } - + .get() + ); + assertThat(exc.toString(), containsString("max_children is only supported on top level of nested sort")); + } + } + ); // We sort on nested sub field - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("nested.foo.sub").setNestedSort(new NestedSortBuilder("nested")).order(SortOrder.DESC)) - .get(); - assertNoFailures(searchResponse); - hits = searchResponse.getHits().getHits(); - assertThat(hits.length, is(2)); - assertThat(hits[0].getSortValues().length, is(1)); - assertThat(hits[1].getSortValues().length, is(1)); - assertThat(hits[0].getSortValues()[0], is("cba bca")); - assertThat(hits[1].getSortValues()[0], is("bar bar")); - + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(SortBuilders.fieldSort("nested.foo.sub").setNestedSort(new NestedSortBuilder("nested")).order(SortOrder.DESC)), + response -> { + SearchHit[] hits = response.getHits().getHits(); + assertThat(hits.length, is(2)); + assertThat(hits[0].getSortValues().length, is(1)); + assertThat(hits[1].getSortValues().length, is(1)); + assertThat(hits[0].getSortValues()[0], is("cba bca")); + assertThat(hits[1].getSortValues()[0], is("bar bar")); + } + ); // missing nested path SearchPhaseExecutionException exc = expectThrows( SearchPhaseExecutionException.class, @@ -1673,20 +1735,26 @@ public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception SortOrder order = randomBoolean() ? SortOrder.ASC : SortOrder.DESC; int from = between(0, 256); int size = between(0, 256); - SearchResponse multiShardResponse = prepareSearch("test1").setFrom(from).setSize(size).addSort(sortField, order).get(); - assertNoFailures(multiShardResponse); - SearchResponse singleShardResponse = prepareSearch("test2").setFrom(from).setSize(size).addSort(sortField, order).get(); - assertNoFailures(singleShardResponse); - - assertThat(multiShardResponse.getHits().getTotalHits().value, equalTo(singleShardResponse.getHits().getTotalHits().value)); - assertThat(multiShardResponse.getHits().getHits().length, equalTo(singleShardResponse.getHits().getHits().length)); - for (int i = 0; i < multiShardResponse.getHits().getHits().length; i++) { - assertThat( - multiShardResponse.getHits().getAt(i).getSortValues()[0], - equalTo(singleShardResponse.getHits().getAt(i).getSortValues()[0]) - ); - assertThat(multiShardResponse.getHits().getAt(i).getId(), equalTo(singleShardResponse.getHits().getAt(i).getId())); - } + assertNoFailuresAndResponse( + prepareSearch("test1").setFrom(from).setSize(size).addSort(sortField, order), + multiShardResponse -> assertNoFailuresAndResponse( + prepareSearch("test2").setFrom(from).setSize(size).addSort(sortField, order), + singleShardResponse -> { + assertThat( + multiShardResponse.getHits().getTotalHits().value, + equalTo(singleShardResponse.getHits().getTotalHits().value) + ); + assertThat(multiShardResponse.getHits().getHits().length, equalTo(singleShardResponse.getHits().getHits().length)); + for (int i = 0; i < multiShardResponse.getHits().getHits().length; i++) { + assertThat( + multiShardResponse.getHits().getAt(i).getSortValues()[0], + equalTo(singleShardResponse.getHits().getAt(i).getSortValues()[0]) + ); + assertThat(multiShardResponse.getHits().getAt(i).getId(), equalTo(singleShardResponse.getHits().getAt(i).getId())); + } + } + ) + ); } public void testCustomFormat() throws Exception { @@ -1700,17 +1768,19 @@ public void testCustomFormat() throws Exception { client().prepareIndex("test").setId("2").setSource("ip", "2001:db8::ff00:42:8329") ); - SearchResponse response = prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).get(); - assertNoFailures(response); - assertEquals(2, response.getHits().getTotalHits().value); - assertArrayEquals(new String[] { "192.168.1.7" }, response.getHits().getAt(0).getSortValues()); - assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(1).getSortValues()); - - response = prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).searchAfter(new Object[] { "192.168.1.7" }).get(); - assertNoFailures(response); - assertEquals(2, response.getHits().getTotalHits().value); - assertEquals(1, response.getHits().getHits().length); - assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(0).getSortValues()); + assertNoFailuresAndResponse(prepareSearch("test").addSort(SortBuilders.fieldSort("ip")), response -> { + assertEquals(2, response.getHits().getTotalHits().value); + assertArrayEquals(new String[] { "192.168.1.7" }, response.getHits().getAt(0).getSortValues()); + assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(1).getSortValues()); + }); + assertNoFailuresAndResponse( + prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).searchAfter(new Object[] { "192.168.1.7" }), + response -> { + assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getHits().length); + assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(0).getSortValues()); + } + ); } public void testScriptFieldSort() throws Exception { @@ -1728,34 +1798,38 @@ public void testScriptFieldSort() throws Exception { { Script script = new Script(ScriptType.INLINE, NAME, "doc['number'].value", Collections.emptyMap()); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(randomIntBetween(1, numDocs + 5)) - .addSort(SortBuilders.scriptSort(script, ScriptSortBuilder.ScriptSortType.NUMBER)) - .addSort(SortBuilders.scoreSort()) - .get(); - - double expectedValue = 0; - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getSortValues().length, equalTo(2)); - assertThat(hit.getSortValues()[0], equalTo(expectedValue++)); - assertThat(hit.getSortValues()[1], equalTo(1f)); - } + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .setSize(randomIntBetween(1, numDocs + 5)) + .addSort(SortBuilders.scriptSort(script, ScriptSortBuilder.ScriptSortType.NUMBER)) + .addSort(SortBuilders.scoreSort()), + response -> { + double expectedValue = 0; + for (SearchHit hit : response.getHits()) { + assertThat(hit.getSortValues().length, equalTo(2)); + assertThat(hit.getSortValues()[0], equalTo(expectedValue++)); + assertThat(hit.getSortValues()[1], equalTo(1f)); + } + } + ); } { Script script = new Script(ScriptType.INLINE, NAME, "doc['keyword'].value", Collections.emptyMap()); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(randomIntBetween(1, numDocs + 5)) - .addSort(SortBuilders.scriptSort(script, ScriptSortBuilder.ScriptSortType.STRING)) - .addSort(SortBuilders.scoreSort()) - .get(); - - int expectedValue = 0; - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getSortValues().length, equalTo(2)); - assertThat(hit.getSortValues()[0], equalTo(keywords.get(expectedValue++))); - assertThat(hit.getSortValues()[1], equalTo(1f)); - } + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .setSize(randomIntBetween(1, numDocs + 5)) + .addSort(SortBuilders.scriptSort(script, ScriptSortBuilder.ScriptSortType.STRING)) + .addSort(SortBuilders.scoreSort()), + response -> { + int expectedValue = 0; + for (SearchHit hit : response.getHits()) { + assertThat(hit.getSortValues().length, equalTo(2)); + assertThat(hit.getSortValues()[0], equalTo(keywords.get(expectedValue++))); + assertThat(hit.getSortValues()[1], equalTo(1f)); + } + } + ); } } @@ -1772,16 +1846,17 @@ public void testFieldAlias() throws Exception { builders.add(client().prepareIndex("new_index").setSource("route_length_miles", 100.2)); indexRandom(true, true, builders); - SearchResponse response = prepareSearch().setQuery(matchAllQuery()) - .setSize(builders.size()) - .addSort(SortBuilders.fieldSort("route_length_miles")) - .get(); - SearchHits hits = response.getHits(); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(builders.size()).addSort(SortBuilders.fieldSort("route_length_miles")), + response -> { + SearchHits hits = response.getHits(); - assertEquals(3, hits.getHits().length); - assertEquals(42.0, hits.getAt(0).getSortValues()[0]); - assertEquals(50.5, hits.getAt(1).getSortValues()[0]); - assertEquals(100.2, hits.getAt(2).getSortValues()[0]); + assertEquals(3, hits.getHits().length); + assertEquals(42.0, hits.getAt(0).getSortValues()[0]); + assertEquals(50.5, hits.getAt(1).getSortValues()[0]); + assertEquals(100.2, hits.getAt(2).getSortValues()[0]); + } + ); } public void testFieldAliasesWithMissingValues() throws Exception { @@ -1797,16 +1872,19 @@ public void testFieldAliasesWithMissingValues() throws Exception { builders.add(client().prepareIndex("new_index").setSource("route_length_miles", 100.2)); indexRandom(true, true, builders); - SearchResponse response = prepareSearch().setQuery(matchAllQuery()) - .setSize(builders.size()) - .addSort(SortBuilders.fieldSort("route_length_miles").missing(120.3)) - .get(); - SearchHits hits = response.getHits(); - - assertEquals(3, hits.getHits().length); - assertEquals(42.0, hits.getAt(0).getSortValues()[0]); - assertEquals(100.2, hits.getAt(1).getSortValues()[0]); - assertEquals(120.3, hits.getAt(2).getSortValues()[0]); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .setSize(builders.size()) + .addSort(SortBuilders.fieldSort("route_length_miles").missing(120.3)), + response -> { + SearchHits hits = response.getHits(); + + assertEquals(3, hits.getHits().length); + assertEquals(42.0, hits.getAt(0).getSortValues()[0]); + assertEquals(100.2, hits.getAt(1).getSortValues()[0]); + assertEquals(120.3, hits.getAt(2).getSortValues()[0]); + } + ); } public void testCastNumericType() throws Exception { @@ -1822,34 +1900,40 @@ public void testCastNumericType() throws Exception { indexRandom(true, true, builders); { - SearchResponse response = prepareSearch().setQuery(matchAllQuery()) - .setSize(builders.size()) - .addSort(SortBuilders.fieldSort("field").setNumericType("long")) - .get(); - SearchHits hits = response.getHits(); - - assertEquals(3, hits.getHits().length); - for (int i = 0; i < 3; i++) { - assertThat(hits.getAt(i).getSortValues()[0].getClass(), equalTo(Long.class)); - } - assertEquals(12L, hits.getAt(0).getSortValues()[0]); - assertEquals(12L, hits.getAt(1).getSortValues()[0]); - assertEquals(12L, hits.getAt(2).getSortValues()[0]); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .setSize(builders.size()) + .addSort(SortBuilders.fieldSort("field").setNumericType("long")), + response -> { + SearchHits hits = response.getHits(); + + assertEquals(3, hits.getHits().length); + for (int i = 0; i < 3; i++) { + assertThat(hits.getAt(i).getSortValues()[0].getClass(), equalTo(Long.class)); + } + assertEquals(12L, hits.getAt(0).getSortValues()[0]); + assertEquals(12L, hits.getAt(1).getSortValues()[0]); + assertEquals(12L, hits.getAt(2).getSortValues()[0]); + } + ); } { - SearchResponse response = prepareSearch().setQuery(matchAllQuery()) - .setSize(builders.size()) - .addSort(SortBuilders.fieldSort("field").setNumericType("double")) - .get(); - SearchHits hits = response.getHits(); - assertEquals(3, hits.getHits().length); - for (int i = 0; i < 3; i++) { - assertThat(hits.getAt(i).getSortValues()[0].getClass(), equalTo(Double.class)); - } - assertEquals(12D, hits.getAt(0).getSortValues()[0]); - assertEquals(12.1D, (double) hits.getAt(1).getSortValues()[0], 0.001f); - assertEquals(12.6D, hits.getAt(2).getSortValues()[0]); + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .setSize(builders.size()) + .addSort(SortBuilders.fieldSort("field").setNumericType("double")), + response -> { + SearchHits hits = response.getHits(); + assertEquals(3, hits.getHits().length); + for (int i = 0; i < 3; i++) { + assertThat(hits.getAt(i).getSortValues()[0].getClass(), equalTo(Double.class)); + } + assertEquals(12D, hits.getAt(0).getSortValues()[0]); + assertEquals(12.1D, (double) hits.getAt(1).getSortValues()[0], 0.001f); + assertEquals(12.6D, hits.getAt(2).getSortValues()[0]); + } + ); } } @@ -1864,100 +1948,114 @@ public void testCastDate() throws Exception { indexRandom(true, true, builders); { - SearchResponse response = prepareSearch().setQuery(matchAllQuery()) - .setSize(2) - .addSort(SortBuilders.fieldSort("field").setNumericType("date")) - .get(); - SearchHits hits = response.getHits(); - - assertEquals(2, hits.getHits().length); - for (int i = 0; i < 2; i++) { - assertThat(hits.getAt(i).getSortValues()[0].getClass(), equalTo(Long.class)); - } - assertEquals(1712879236854L, hits.getAt(0).getSortValues()[0]); - assertEquals(1712879237000L, hits.getAt(1).getSortValues()[0]); - - response = prepareSearch().setMaxConcurrentShardRequests(1) - .setQuery(matchAllQuery()) - .setSize(1) - .addSort(SortBuilders.fieldSort("field").setNumericType("date")) - .get(); - hits = response.getHits(); - - assertEquals(1, hits.getHits().length); - assertThat(hits.getAt(0).getSortValues()[0].getClass(), equalTo(Long.class)); - assertEquals(1712879236854L, hits.getAt(0).getSortValues()[0]); - - response = prepareSearch().setMaxConcurrentShardRequests(1) - .setQuery(matchAllQuery()) - .setSize(1) - .addSort(SortBuilders.fieldSort("field").order(SortOrder.DESC).setNumericType("date")) - .get(); - hits = response.getHits(); - - assertEquals(1, hits.getHits().length); - assertThat(hits.getAt(0).getSortValues()[0].getClass(), equalTo(Long.class)); - assertEquals(1712879237000L, hits.getAt(0).getSortValues()[0]); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(2).addSort(SortBuilders.fieldSort("field").setNumericType("date")), + response -> { + SearchHits hits = response.getHits(); + + assertEquals(2, hits.getHits().length); + for (int i = 0; i < 2; i++) { + assertThat(hits.getAt(i).getSortValues()[0].getClass(), equalTo(Long.class)); + } + assertEquals(1712879236854L, hits.getAt(0).getSortValues()[0]); + assertEquals(1712879237000L, hits.getAt(1).getSortValues()[0]); + } + ); + assertResponse( + prepareSearch().setMaxConcurrentShardRequests(1) + .setQuery(matchAllQuery()) + .setSize(1) + .addSort(SortBuilders.fieldSort("field").setNumericType("date")), + response -> { + SearchHits hits = response.getHits(); + + assertEquals(1, hits.getHits().length); + assertThat(hits.getAt(0).getSortValues()[0].getClass(), equalTo(Long.class)); + assertEquals(1712879236854L, hits.getAt(0).getSortValues()[0]); + } + ); + assertResponse( + prepareSearch().setMaxConcurrentShardRequests(1) + .setQuery(matchAllQuery()) + .setSize(1) + .addSort(SortBuilders.fieldSort("field").order(SortOrder.DESC).setNumericType("date")), + response -> { + SearchHits hits = response.getHits(); + + assertEquals(1, hits.getHits().length); + assertThat(hits.getAt(0).getSortValues()[0].getClass(), equalTo(Long.class)); + assertEquals(1712879237000L, hits.getAt(0).getSortValues()[0]); + } + ); } { - SearchResponse response = prepareSearch().setQuery(matchAllQuery()) - .setSize(2) - .addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")) - .get(); - SearchHits hits = response.getHits(); - assertEquals(2, hits.getHits().length); - for (int i = 0; i < 2; i++) { - assertThat(hits.getAt(i).getSortValues()[0].getClass(), equalTo(Long.class)); - } - assertEquals(1712879236854775807L, hits.getAt(0).getSortValues()[0]); - assertEquals(1712879237000000000L, hits.getAt(1).getSortValues()[0]); - - response = prepareSearch().setMaxConcurrentShardRequests(1) - .setQuery(matchAllQuery()) - .setSize(1) - .addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")) - .get(); - hits = response.getHits(); - assertEquals(1, hits.getHits().length); - assertThat(hits.getAt(0).getSortValues()[0].getClass(), equalTo(Long.class)); - assertEquals(1712879236854775807L, hits.getAt(0).getSortValues()[0]); - - response = prepareSearch().setMaxConcurrentShardRequests(1) - .setQuery(matchAllQuery()) - .setSize(1) - .addSort(SortBuilders.fieldSort("field").order(SortOrder.DESC).setNumericType("date_nanos")) - .get(); - hits = response.getHits(); - assertEquals(1, hits.getHits().length); - assertThat(hits.getAt(0).getSortValues()[0].getClass(), equalTo(Long.class)); - assertEquals(1712879237000000000L, hits.getAt(0).getSortValues()[0]); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(2).addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")), + response -> { + SearchHits hits = response.getHits(); + assertEquals(2, hits.getHits().length); + for (int i = 0; i < 2; i++) { + assertThat(hits.getAt(i).getSortValues()[0].getClass(), equalTo(Long.class)); + } + assertEquals(1712879236854775807L, hits.getAt(0).getSortValues()[0]); + assertEquals(1712879237000000000L, hits.getAt(1).getSortValues()[0]); + } + ); + assertResponse( + prepareSearch().setMaxConcurrentShardRequests(1) + .setQuery(matchAllQuery()) + .setSize(1) + .addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")), + response -> { + SearchHits hits = response.getHits(); + assertEquals(1, hits.getHits().length); + assertThat(hits.getAt(0).getSortValues()[0].getClass(), equalTo(Long.class)); + assertEquals(1712879236854775807L, hits.getAt(0).getSortValues()[0]); + } + ); + assertResponse( + prepareSearch().setMaxConcurrentShardRequests(1) + .setQuery(matchAllQuery()) + .setSize(1) + .addSort(SortBuilders.fieldSort("field").order(SortOrder.DESC).setNumericType("date_nanos")), + response -> { + SearchHits hits = response.getHits(); + assertEquals(1, hits.getHits().length); + assertThat(hits.getAt(0).getSortValues()[0].getClass(), equalTo(Long.class)); + assertEquals(1712879237000000000L, hits.getAt(0).getSortValues()[0]); + } + ); } { builders.clear(); builders.add(client().prepareIndex("index_date").setSource("field", "1905-04-11T23:47:17")); indexRandom(true, true, builders); - SearchResponse response = prepareSearch().setQuery(matchAllQuery()) - .setSize(1) - .addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")) - .get(); - assertNotNull(response.getShardFailures()); - assertThat(response.getShardFailures().length, equalTo(1)); - assertThat(response.getShardFailures()[0].toString(), containsString("are before the epoch in 1970")); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(1).addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")), + response -> { + assertNotNull(response.getShardFailures()); + assertThat(response.getShardFailures().length, equalTo(1)); + assertThat(response.getShardFailures()[0].toString(), containsString("are before the epoch in 1970")); + } + ); } { builders.clear(); builders.add(client().prepareIndex("index_date").setSource("field", "2346-04-11T23:47:17")); indexRandom(true, true, builders); - SearchResponse response = prepareSearch().setQuery(QueryBuilders.rangeQuery("field").gt("1970-01-01")) - .setSize(10) - .addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")) - .get(); - assertNotNull(response.getShardFailures()); - assertThat(response.getShardFailures().length, equalTo(1)); - assertThat(response.getShardFailures()[0].toString(), containsString("are after 2262")); + assertResponse( + prepareSearch().setQuery(QueryBuilders.rangeQuery("field").gt("1970-01-01")) + .setSize(10) + .addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")), + response -> { + assertNotNull(response.getShardFailures()); + assertThat(response.getShardFailures().length, equalTo(1)); + assertThat(response.getShardFailures()[0].toString(), containsString("are after 2262")); + } + ); } } @@ -1996,28 +2094,34 @@ public void testLongSortOptimizationCorrectResults() { refresh(); // *** 1. sort DESC on long_field - SearchResponse searchResponse = prepareSearch().addSort(new FieldSortBuilder("long_field").order(SortOrder.DESC)).setSize(10).get(); - assertNoFailures(searchResponse); - long previousLong = Long.MAX_VALUE; - for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { - // check the correct sort order - SearchHit hit = searchResponse.getHits().getHits()[i]; - long currentLong = (long) hit.getSortValues()[0]; - assertThat("sort order is incorrect", currentLong, lessThanOrEqualTo(previousLong)); - previousLong = currentLong; - } + assertNoFailuresAndResponse( + prepareSearch().addSort(new FieldSortBuilder("long_field").order(SortOrder.DESC)).setSize(10), + response -> { + long previousLong = Long.MAX_VALUE; + for (int i = 0; i < response.getHits().getHits().length; i++) { + // check the correct sort order + SearchHit hit = response.getHits().getHits()[i]; + long currentLong = (long) hit.getSortValues()[0]; + assertThat("sort order is incorrect", currentLong, lessThanOrEqualTo(previousLong)); + previousLong = currentLong; + } + } + ); // *** 2. sort ASC on long_field - searchResponse = prepareSearch().addSort(new FieldSortBuilder("long_field").order(SortOrder.ASC)).setSize(10).get(); - assertNoFailures(searchResponse); - previousLong = Long.MIN_VALUE; - for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { - // check the correct sort order - SearchHit hit = searchResponse.getHits().getHits()[i]; - long currentLong = (long) hit.getSortValues()[0]; - assertThat("sort order is incorrect", currentLong, greaterThanOrEqualTo(previousLong)); - previousLong = currentLong; - } + assertNoFailuresAndResponse( + prepareSearch().addSort(new FieldSortBuilder("long_field").order(SortOrder.ASC)).setSize(10), + response -> { + long previousLong = Long.MIN_VALUE; + for (int i = 0; i < response.getHits().getHits().length; i++) { + // check the correct sort order + SearchHit hit = response.getHits().getHits()[i]; + long currentLong = (long) hit.getSortValues()[0]; + assertThat("sort order is incorrect", currentLong, greaterThanOrEqualTo(previousLong)); + previousLong = currentLong; + } + } + ); } public void testSortMixedFieldTypes() { @@ -2033,10 +2137,7 @@ public void testSortMixedFieldTypes() { refresh(); { // mixing long and integer types is ok, as we convert integer sort to long sort - SearchResponse searchResponse = prepareSearch("index_long", "index_integer").addSort(new FieldSortBuilder("foo")) - .setSize(10) - .get(); - assertNoFailures(searchResponse); + assertNoFailures(prepareSearch("index_long", "index_integer").addSort(new FieldSortBuilder("foo")).setSize(10)); } String errMsg = "Can't sort on field [foo]; the field has incompatible sort types"; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceIT.java index 777db15b596ec..e09eb3d73b848 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.sort; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.settings.Settings; @@ -29,6 +28,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -148,81 +148,87 @@ public void testDistanceSortingMVFields() throws Exception { indicesAdmin().prepareRefresh().get(); // Order: Asc - SearchResponse searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC)) - .get(); - - assertHitCount(searchResponse, 5); - assertOrderedSearchHits(searchResponse, "1", "2", "3", "4", "5"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); - + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC)), + response -> { + assertHitCount(response, 5); + assertOrderedSearchHits(response, "1", "2", "3", "4", "5"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); + assertThat(((Number) response.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); + } + ); // Order: Asc, Mode: max - searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode(SortMode.MAX)) - .get(); - - assertHitCount(searchResponse, 5); - assertOrderedSearchHits(searchResponse, "1", "2", "4", "3", "5"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); - + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode(SortMode.MAX)), + response -> { + assertHitCount(response, 5); + assertOrderedSearchHits(response, "1", "2", "4", "3", "5"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); + assertThat(((Number) response.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); + } + ); // Order: Desc - searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)) - .get(); - - assertHitCount(searchResponse, 5); - assertOrderedSearchHits(searchResponse, "5", "3", "4", "2", "1"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)), + response -> { + assertHitCount(response, 5); + assertOrderedSearchHits(response, "5", "3", "4", "2", "1"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); + assertThat(((Number) response.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + } + ); // Order: Desc, Mode: min - searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode(SortMode.MIN)) - .get(); - - assertHitCount(searchResponse, 5); - assertOrderedSearchHits(searchResponse, "5", "4", "3", "2", "1"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - - searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.ASC)) - .get(); - - assertHitCount(searchResponse, 5); - assertOrderedSearchHits(searchResponse, "1", "2", "4", "3", "5"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1157d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(2874d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(5301d, 10d)); - - searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.DESC)) - .get(); - - assertHitCount(searchResponse, 5); - assertOrderedSearchHits(searchResponse, "5", "3", "4", "2", "1"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode(SortMode.MIN)), + response -> { + assertHitCount(response, 5); + assertOrderedSearchHits(response, "5", "4", "3", "2", "1"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); + assertThat(((Number) response.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + } + ); + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.ASC)), + response -> { + assertHitCount(response, 5); + assertOrderedSearchHits(response, "1", "2", "4", "3", "5"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1157d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(2874d, 10d)); + assertThat(((Number) response.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(5301d, 10d)); + } + ); + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.DESC)), + response -> { + assertHitCount(response, 5); + assertOrderedSearchHits(response, "5", "3", "4", "2", "1"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); + assertThat(((Number) response.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + } + ); try { prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.SUM)); @@ -276,25 +282,28 @@ public void testDistanceSortingWithMissingGeoPoint() throws Exception { refresh(); // Order: Asc - SearchResponse searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC)) - .get(); - - assertHitCount(searchResponse, 2); - assertOrderedSearchHits(searchResponse, "1", "2"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); - + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC)), + response -> { + assertHitCount(response, 2); + assertOrderedSearchHits(response, "1", "2"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); + } + ); // Order: Desc - searchResponse = prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)) - .get(); - - // Doc with missing geo point is first, is consistent with 0.20.x - assertHitCount(searchResponse, 2); - assertOrderedSearchHits(searchResponse, "2", "1"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(5286d, 10d)); + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)), + response -> { + // Doc with missing geo point is first, is consistent with 0.20.x + assertHitCount(response, 2); + assertOrderedSearchHits(response, "2", "1"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(5286d, 10d)); + } + ); } public void testDistanceSortingNestedFields() throws Exception { @@ -416,119 +425,127 @@ public void testDistanceSortingNestedFields() throws Exception { ); // Order: Asc - SearchResponse searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) - .addSort( - SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) - .order(SortOrder.ASC) - .setNestedSort(new NestedSortBuilder("branches")) - ) - .get(); - - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "1", "2", "3", "4"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); - + assertResponse( + prepareSearch("companies").setQuery(matchAllQuery()) + .addSort( + SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) + .order(SortOrder.ASC) + .setNestedSort(new NestedSortBuilder("branches")) + ), + response -> { + assertHitCount(response, 4); + assertOrderedSearchHits(response, "1", "2", "3", "4"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); + } + ); // Order: Asc, Mode: max - searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) - .addSort( - SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) - .order(SortOrder.ASC) - .sortMode(SortMode.MAX) - .setNestedSort(new NestedSortBuilder("branches")) - ) - .get(); - - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "1", "3", "2", "4"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); - + assertResponse( + prepareSearch("companies").setQuery(matchAllQuery()) + .addSort( + SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) + .order(SortOrder.ASC) + .sortMode(SortMode.MAX) + .setNestedSort(new NestedSortBuilder("branches")) + ), + response -> { + assertHitCount(response, 4); + assertOrderedSearchHits(response, "1", "3", "2", "4"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); + } + ); // Order: Desc - searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) - .addSort( - SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) - .order(SortOrder.DESC) - .setNestedSort(new NestedSortBuilder("branches")) - ) - .get(); - - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "4", "2", "3", "1"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - + assertResponse( + prepareSearch("companies").setQuery(matchAllQuery()) + .addSort( + SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) + .order(SortOrder.DESC) + .setNestedSort(new NestedSortBuilder("branches")) + ), + response -> { + assertHitCount(response, 4); + assertOrderedSearchHits(response, "4", "2", "3", "1"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + } + ); // Order: Desc, Mode: min - searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) - .addSort( - SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) - .order(SortOrder.DESC) - .sortMode(SortMode.MIN) - .setNestedSort(new NestedSortBuilder("branches")) - ) - .get(); - - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "4", "3", "2", "1"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - - searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) - .addSort( - SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) - .sortMode(SortMode.AVG) - .order(SortOrder.ASC) - .setNestedSort(new NestedSortBuilder("branches")) - ) - .get(); - - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "1", "3", "2", "4"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); - - searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) - .addSort( - SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) - .setNestedSort(new NestedSortBuilder("branches")) - .sortMode(SortMode.AVG) - .order(SortOrder.DESC) - ) - .get(); - - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "4", "2", "3", "1"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - - searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) - .addSort( - SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) - .setNestedSort(new NestedSortBuilder("branches").setFilter(termQuery("branches.name", "brooklyn"))) - .sortMode(SortMode.AVG) - .order(SortOrder.ASC) - ) - .get(); - assertHitCount(searchResponse, 4); - assertFirstHit(searchResponse, hasId("4")); - assertSearchHits(searchResponse, "1", "2", "3", "4"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); - assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); - assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); - + assertResponse( + prepareSearch("companies").setQuery(matchAllQuery()) + .addSort( + SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) + .order(SortOrder.DESC) + .sortMode(SortMode.MIN) + .setNestedSort(new NestedSortBuilder("branches")) + ), + response -> { + assertHitCount(response, 4); + assertOrderedSearchHits(response, "4", "3", "2", "1"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + } + ); + assertResponse( + prepareSearch("companies").setQuery(matchAllQuery()) + .addSort( + SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) + .sortMode(SortMode.AVG) + .order(SortOrder.ASC) + .setNestedSort(new NestedSortBuilder("branches")) + ), + response -> { + assertHitCount(response, 4); + assertOrderedSearchHits(response, "1", "3", "2", "4"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); + } + ); + assertResponse( + prepareSearch("companies").setQuery(matchAllQuery()) + .addSort( + SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) + .setNestedSort(new NestedSortBuilder("branches")) + .sortMode(SortMode.AVG) + .order(SortOrder.DESC) + ), + response -> { + assertHitCount(response, 4); + assertOrderedSearchHits(response, "4", "2", "3", "1"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); + } + ); + assertResponse( + prepareSearch("companies").setQuery(matchAllQuery()) + .addSort( + SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) + .setNestedSort(new NestedSortBuilder("branches").setFilter(termQuery("branches.name", "brooklyn"))) + .sortMode(SortMode.AVG) + .order(SortOrder.ASC) + ), + response -> { + assertHitCount(response, 4); + assertFirstHit(response, hasId("4")); + assertSearchHits(response, "1", "2", "3", "4"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); + assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); + assertThat(((Number) response.getHits().getAt(3).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); + } + ); try { prepareSearch("companies").setQuery(matchAllQuery()) .addSort( @@ -614,32 +631,36 @@ public void testDistanceSortingWithUnmappedField() throws Exception { refresh(); // Order: Asc - SearchResponse searchResponse = prepareSearch("test1", "test2").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).ignoreUnmapped(true).order(SortOrder.ASC)) - .get(); - - assertHitCount(searchResponse, 2); - assertOrderedSearchHits(searchResponse, "1", "2"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); - + assertResponse( + prepareSearch("test1", "test2").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).ignoreUnmapped(true).order(SortOrder.ASC)), + response -> { + assertHitCount(response, 2); + assertOrderedSearchHits(response, "1", "2"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); + } + ); // Order: Desc - searchResponse = prepareSearch("test1", "test2").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).ignoreUnmapped(true).order(SortOrder.DESC)) - .get(); - - // Doc with missing geo point is first, is consistent with 0.20.x - assertHitCount(searchResponse, 2); - assertOrderedSearchHits(searchResponse, "2", "1"); - assertThat(((Number) searchResponse.getHits().getAt(0).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); - assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(5286d, 10d)); - + assertResponse( + prepareSearch("test1", "test2").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).ignoreUnmapped(true).order(SortOrder.DESC)), + response -> { + // Doc with missing geo point is first, is consistent with 0.20.x + assertHitCount(response, 2); + assertOrderedSearchHits(response, "2", "1"); + assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); + assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(5286d, 10d)); + } + ); // Make sure that by default the unmapped fields continue to fail - searchResponse = prepareSearch("test1", "test2").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)) - .get(); - assertThat(searchResponse.getFailedShards(), greaterThan(0)); - assertHitCount(searchResponse, 1); + assertResponse( + prepareSearch("test1", "test2").setQuery(matchAllQuery()) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)), + response -> { + assertThat(response.getFailedShards(), greaterThan(0)); + assertHitCount(response, 1); + } + ); } - } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index 54d730cec2bc3..265cd77bdbbbc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -32,6 +32,7 @@ import static org.elasticsearch.search.sort.SortBuilders.fieldSort; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSortValues; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.closeTo; @@ -84,56 +85,65 @@ public void testManyToManyGeoPoints() throws ExecutionException, InterruptedExce q[0] = new GeoPoint(2, 1); } - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MIN).order(SortOrder.ASC)) - .get(); - assertOrderedSearchHits(searchResponse, "d1", "d2"); - assertThat( - (Double) searchResponse.getHits().getAt(0).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(2, 2, 3, 2, DistanceUnit.METERS), 10d) - ); - assertThat( - (Double) searchResponse.getHits().getAt(1).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(2, 1, 5, 1, DistanceUnit.METERS), 10d) - ); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MIN).order(SortOrder.DESC)) - .get(); - assertOrderedSearchHits(searchResponse, "d2", "d1"); - assertThat( - (Double) searchResponse.getHits().getAt(0).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(2, 1, 5, 1, DistanceUnit.METERS), 10d) - ); - assertThat( - (Double) searchResponse.getHits().getAt(1).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(2, 2, 3, 2, DistanceUnit.METERS), 10d) - ); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MAX).order(SortOrder.ASC)) - .get(); - assertOrderedSearchHits(searchResponse, "d1", "d2"); - assertThat( - (Double) searchResponse.getHits().getAt(0).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(2, 2, 4, 1, DistanceUnit.METERS), 10d) + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MIN).order(SortOrder.ASC)), + response -> { + assertOrderedSearchHits(response, "d1", "d2"); + assertThat( + (Double) response.getHits().getAt(0).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(2, 2, 3, 2, DistanceUnit.METERS), 10d) + ); + assertThat( + (Double) response.getHits().getAt(1).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(2, 1, 5, 1, DistanceUnit.METERS), 10d) + ); + } ); - assertThat( - (Double) searchResponse.getHits().getAt(1).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(2, 1, 6, 2, DistanceUnit.METERS), 10d) + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MIN).order(SortOrder.DESC)), + response -> { + assertOrderedSearchHits(response, "d2", "d1"); + assertThat( + (Double) response.getHits().getAt(0).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(2, 1, 5, 1, DistanceUnit.METERS), 10d) + ); + assertThat( + (Double) response.getHits().getAt(1).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(2, 2, 3, 2, DistanceUnit.METERS), 10d) + ); + } ); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MAX).order(SortOrder.DESC)) - .get(); - assertOrderedSearchHits(searchResponse, "d2", "d1"); - assertThat( - (Double) searchResponse.getHits().getAt(0).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(2, 1, 6, 2, DistanceUnit.METERS), 10d) + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MAX).order(SortOrder.ASC)), + response -> { + assertOrderedSearchHits(response, "d1", "d2"); + assertThat( + (Double) response.getHits().getAt(0).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(2, 2, 4, 1, DistanceUnit.METERS), 10d) + ); + assertThat( + (Double) response.getHits().getAt(1).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(2, 1, 6, 2, DistanceUnit.METERS), 10d) + ); + } ); - assertThat( - (Double) searchResponse.getHits().getAt(1).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(2, 2, 4, 1, DistanceUnit.METERS), 10d) + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MAX).order(SortOrder.DESC)), + response -> { + assertOrderedSearchHits(response, "d2", "d1"); + assertThat( + (Double) response.getHits().getAt(0).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(2, 1, 6, 2, DistanceUnit.METERS), 10d) + ); + assertThat( + (Double) response.getHits().getAt(1).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(2, 2, 4, 1, DistanceUnit.METERS), 10d) + ); + } ); } @@ -164,30 +174,35 @@ public void testSingeToManyAvgMedian() throws ExecutionException, InterruptedExc ); GeoPoint q = new GeoPoint(0, 0); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.AVG).order(SortOrder.ASC)) - .get(); - assertOrderedSearchHits(searchResponse, "d2", "d1"); - assertThat( - (Double) searchResponse.getHits().getAt(0).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(0, 0, 0, 4, DistanceUnit.METERS), 10d) - ); - assertThat( - (Double) searchResponse.getHits().getAt(1).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(0, 0, 0, 5, DistanceUnit.METERS), 10d) - ); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MEDIAN).order(SortOrder.ASC)) - .get(); - assertOrderedSearchHits(searchResponse, "d1", "d2"); - assertThat( - (Double) searchResponse.getHits().getAt(0).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(0, 0, 0, 4, DistanceUnit.METERS), 10d) + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.AVG).order(SortOrder.ASC)), + response -> { + assertOrderedSearchHits(response, "d2", "d1"); + assertThat( + (Double) response.getHits().getAt(0).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(0, 0, 0, 4, DistanceUnit.METERS), 10d) + ); + assertThat( + (Double) response.getHits().getAt(1).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(0, 0, 0, 5, DistanceUnit.METERS), 10d) + ); + } ); - assertThat( - (Double) searchResponse.getHits().getAt(1).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(0, 0, 0, 5, DistanceUnit.METERS), 10d) + assertResponse( + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MEDIAN).order(SortOrder.ASC)), + response -> { + assertOrderedSearchHits(response, "d1", "d2"); + assertThat( + (Double) response.getHits().getAt(0).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(0, 0, 0, 4, DistanceUnit.METERS), 10d) + ); + assertThat( + (Double) response.getHits().getAt(1).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(0, 0, 0, 5, DistanceUnit.METERS), 10d) + ); + } ); } @@ -245,30 +260,33 @@ public void testManyToManyGeoPointsWithDifferentFormats() throws ExecutionExcept } } - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)) - .get(); - assertOrderedSearchHits(searchResponse, "d1", "d2"); - assertThat( - (Double) searchResponse.getHits().getAt(0).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(2.5, 1, 2, 1, DistanceUnit.METERS), 1.e-1) - ); - assertThat( - (Double) searchResponse.getHits().getAt(1).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(4.5, 1, 2, 1, DistanceUnit.METERS), 1.e-1) - ); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(geoDistanceSortBuilder.sortMode(SortMode.MAX).order(SortOrder.ASC)) - .get(); - assertOrderedSearchHits(searchResponse, "d1", "d2"); - assertThat( - (Double) searchResponse.getHits().getAt(0).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(3.25, 4, 2, 1, DistanceUnit.METERS), 1.e-1) + assertResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), + response -> { + assertOrderedSearchHits(response, "d1", "d2"); + assertThat( + (Double) response.getHits().getAt(0).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(2.5, 1, 2, 1, DistanceUnit.METERS), 1.e-1) + ); + assertThat( + (Double) response.getHits().getAt(1).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(4.5, 1, 2, 1, DistanceUnit.METERS), 1.e-1) + ); + } ); - assertThat( - (Double) searchResponse.getHits().getAt(1).getSortValues()[0], - closeTo(GeoDistance.ARC.calculate(5.25, 4, 2, 1, DistanceUnit.METERS), 1.e-1) + assertResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MAX).order(SortOrder.ASC)), + response -> { + assertOrderedSearchHits(response, "d1", "d2"); + assertThat( + (Double) response.getHits().getAt(0).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(3.25, 4, 2, 1, DistanceUnit.METERS), 1.e-1) + ); + assertThat( + (Double) response.getHits().getAt(1).getSortValues()[0], + closeTo(GeoDistance.ARC.calculate(5.25, 4, 2, 1, DistanceUnit.METERS), 1.e-1) + ); + } ); } @@ -289,42 +307,48 @@ public void testSinglePointGeoDistanceSort() throws ExecutionException, Interrup GeoDistanceSortBuilder geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, hashPoint); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)) - .get(); - checkCorrectSortOrderForGeoSort(searchResponse); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), + response -> checkCorrectSortOrderForGeoSort(response) + ); geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, new GeoPoint(2, 2)); - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)) - .get(); - checkCorrectSortOrderForGeoSort(searchResponse); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), + response -> checkCorrectSortOrderForGeoSort(response) + ); geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, 2, 2); - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)) - .get(); - checkCorrectSortOrderForGeoSort(searchResponse); + assertResponse( + prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), + response -> checkCorrectSortOrderForGeoSort(response) + ); - searchResponse = prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))) - .get(); - checkCorrectSortOrderForGeoSort(searchResponse); + assertResponse( + prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))), + response -> checkCorrectSortOrderForGeoSort(response) + ); - searchResponse = prepareSearch().setSource( - new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, "s037ms06g7h0")) - ).get(); - checkCorrectSortOrderForGeoSort(searchResponse); + assertResponse( + prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, "s037ms06g7h0"))), + response -> checkCorrectSortOrderForGeoSort(response) + ); - searchResponse = prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))) - .get(); - checkCorrectSortOrderForGeoSort(searchResponse); + assertResponse( + prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))), + response -> checkCorrectSortOrderForGeoSort(response) + ); - searchResponse = prepareSearch().setSource( - new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0).validation(GeoValidationMethod.COERCE)) - ).get(); - checkCorrectSortOrderForGeoSort(searchResponse); + assertResponse( + prepareSearch().setSource( + new SearchSourceBuilder().sort( + SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0).validation(GeoValidationMethod.COERCE) + ) + ), + response -> checkCorrectSortOrderForGeoSort(response) + ); } private static void checkCorrectSortOrderForGeoSort(SearchResponse searchResponse) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 1860082c833ad..179778240cc34 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -10,7 +10,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.NestedQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; @@ -22,6 +21,7 @@ import java.util.Collections; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -34,14 +34,17 @@ public void testSimple() { client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); - SearchResponse response = prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true).get(); - assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - assertThat(response.getHits().getAt(0).getVersion(), notNullValue()); - - response = prepareSearch("test").storedFields("_none_").get(); - assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + assertResponse(prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true), response -> { + assertThat(response.getHits().getAt(0).getId(), nullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + assertThat(response.getHits().getAt(0).getVersion(), notNullValue()); + }); + + assertResponse(prepareSearch("test").storedFields("_none_"), response -> { + assertThat(response.getHits().getAt(0).getId(), nullValue()); + assertThat(response.getHits().getAt(0).getId(), nullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + }); } public void testInnerHits() { @@ -50,23 +53,26 @@ public void testInnerHits() { client().prepareIndex("test").setId("1").setSource("field", "value", "nested", Collections.singletonMap("title", "foo")).get(); refresh(); - SearchResponse response = prepareSearch("test").storedFields("_none_") - .setFetchSource(false) - .setQuery( - new NestedQueryBuilder("nested", new TermQueryBuilder("nested.title", "foo"), ScoreMode.Total).innerHit( - new InnerHitBuilder().setStoredFieldNames(Collections.singletonList("_none_")) - .setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) - ) - ) - .get(); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested"); - assertThat(hits.getTotalHits().value, equalTo(1L)); - assertThat(hits.getAt(0).getId(), nullValue()); - assertThat(hits.getAt(0).getSourceAsString(), nullValue()); + assertResponse( + prepareSearch("test").storedFields("_none_") + .setFetchSource(false) + .setQuery( + new NestedQueryBuilder("nested", new TermQueryBuilder("nested.title", "foo"), ScoreMode.Total).innerHit( + new InnerHitBuilder().setStoredFieldNames(Collections.singletonList("_none_")) + .setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) + ) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), nullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested"); + assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getAt(0).getId(), nullValue()); + assertThat(hits.getAt(0).getSourceAsString(), nullValue()); + } + ); } public void testWithRouting() { @@ -76,14 +82,15 @@ public void testWithRouting() { client().prepareIndex("test").setId("1").setSource("field", "value").setRouting("toto").get(); refresh(); - SearchResponse response = prepareSearch("test").storedFields("_none_").setFetchSource(false).get(); - assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); - assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + assertResponse(prepareSearch("test").storedFields("_none_").setFetchSource(false), response -> { + assertThat(response.getHits().getAt(0).getId(), nullValue()); + assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - response = prepareSearch("test").storedFields("_none_").get(); - assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + response = prepareSearch("test").storedFields("_none_").get(); + assertThat(response.getHits().getAt(0).getId(), nullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + }); } public void testInvalid() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java index 3fcbc5cf4add6..5e7847c744040 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java @@ -8,9 +8,9 @@ package org.elasticsearch.search.source; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.test.ESIntegTestCase; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.IsEqual.equalTo; @@ -23,14 +23,17 @@ public void testSourceDefaultBehavior() { indexDoc("test", "1", "field", "value"); refresh(); - SearchResponse response = prepareSearch("test").get(); - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + assertResponse(prepareSearch("test"), response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue())); - response = prepareSearch("test").addStoredField("bla").get(); - assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + assertResponse( + prepareSearch("test").addStoredField("bla"), + response -> assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()) + ); - response = prepareSearch("test").addStoredField("_source").get(); - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + assertResponse( + prepareSearch("test").addStoredField("_source"), + response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()) + ); } @@ -41,26 +44,30 @@ public void testSourceFiltering() { client().prepareIndex("test").setId("1").setSource("field1", "value", "field2", "value2").get(); refresh(); - SearchResponse response = prepareSearch("test").setFetchSource(false).get(); - assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - - response = prepareSearch("test").setFetchSource(true).get(); - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - - response = prepareSearch("test").setFetchSource("field1", null).get(); - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); - - response = prepareSearch("test").setFetchSource("hello", null).get(); - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(0)); - - response = prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }).get(); - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); - + assertResponse( + prepareSearch("test").setFetchSource(false), + response -> assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()) + ); + + assertResponse( + prepareSearch("test").setFetchSource(true), + response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()) + ); + + assertResponse(prepareSearch("test").setFetchSource("field1", null), response -> { + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); + assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); + }); + assertResponse(prepareSearch("test").setFetchSource("hello", null), response -> { + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(0)); + }); + assertResponse(prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }), response -> { + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); + assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); + }); } /** @@ -74,14 +81,15 @@ public void testSourceWithWildcardFiltering() { client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); - SearchResponse response = prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null).get(); - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); - - response = prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null).get(); - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); + assertResponse(prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null), response -> { + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); + assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); + }); + assertResponse(prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null), response -> { + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); + assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); + }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java index 32f5e14b944a2..e3b5301cb1999 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.indices.stats.FieldUsageShardResponse; import org.elasticsearch.action.admin.indices.stats.FieldUsageStatsAction; import org.elasticsearch.action.admin.indices.stats.FieldUsageStatsRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; @@ -30,6 +29,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; public class FieldUsageStatsIT extends ESIntegTestCase { @@ -73,16 +73,18 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio assertFalse(stats.hasField("field2")); assertFalse(stats.hasField("date_field")); - SearchResponse searchResponse = prepareSearch().setSearchType(SearchType.DEFAULT) - .setQuery(QueryBuilders.termQuery("field", "value")) - .addAggregation(AggregationBuilders.terms("agg1").field("field.keyword")) - .addAggregation(AggregationBuilders.filter("agg2", QueryBuilders.spanTermQuery("field2", "value2"))) - .setSize(between(5, 100)) - .setPreference("fixed") - .get(); - - assertHitCount(searchResponse, 30); - assertAllSuccessful(searchResponse); + assertResponse( + prepareSearch().setSearchType(SearchType.DEFAULT) + .setQuery(QueryBuilders.termQuery("field", "value")) + .addAggregation(AggregationBuilders.terms("agg1").field("field.keyword")) + .addAggregation(AggregationBuilders.filter("agg2", QueryBuilders.spanTermQuery("field2", "value2"))) + .setSize(between(5, 100)) + .setPreference("fixed"), + response -> { + assertHitCount(response, 30); + assertAllSuccessful(response); + } + ); stats = aggregated(client().execute(FieldUsageStatsAction.INSTANCE, new FieldUsageStatsRequest()).get().getStats().get("test")); logger.info("Stats after first query: {}", stats); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java index 07e8c516eda41..81c776d0893c1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; @@ -39,7 +38,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -103,16 +103,22 @@ public void testSimpleStats() throws Exception { refresh(); int iters = scaledRandomIntBetween(100, 150); for (int i = 0; i < iters; i++) { - SearchResponse searchResponse = internalCluster().coordOnlyNodeClient() - .prepareSearch() - .setQuery(QueryBuilders.termQuery("field", "value")) - .setStats("group1", "group2") - .highlighter(new HighlightBuilder().field("field")) - .addScriptField("script1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.field", Collections.emptyMap())) - .setSize(100) - .get(); - assertHitCount(searchResponse, docsTest1 + docsTest2); - assertAllSuccessful(searchResponse); + assertResponse( + internalCluster().coordOnlyNodeClient() + .prepareSearch() + .setQuery(QueryBuilders.termQuery("field", "value")) + .setStats("group1", "group2") + .highlighter(new HighlightBuilder().field("field")) + .addScriptField( + "script1", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.field", Collections.emptyMap()) + ) + .setSize(100), + response -> { + assertHitCount(response, docsTest1 + docsTest2); + assertAllSuccessful(response); + } + ); } IndicesStatsResponse indicesStats = indicesAdmin().prepareStats().get(); @@ -188,11 +194,15 @@ public void testOpenContexts() { assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0L)); int size = scaledRandomIntBetween(1, docs); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(size) - .setScroll(TimeValue.timeValueMinutes(2)) - .get(); - assertNoFailures(searchResponse); + final String[] scroll = new String[1]; + final int[] total = new int[1]; + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(size).setScroll(TimeValue.timeValueMinutes(2)), + response -> { + scroll[0] = response.getScrollId(); + total[0] = response.getHits().getHits().length; + } + ); // refresh the stats now that scroll contexts are opened indicesStats = indicesAdmin().prepareStats(index).get(); @@ -202,11 +212,14 @@ public void testOpenContexts() { int hits = 0; while (true) { - if (searchResponse.getHits().getHits().length == 0) { + if (total[0] == 0) { break; } - hits += searchResponse.getHits().getHits().length; - searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); + hits += total[0]; + assertResponse(client().prepareSearchScroll(scroll[0]).setScroll(TimeValue.timeValueMinutes(2)), response -> { + scroll[0] = response.getScrollId(); + total[0] = response.getHits().getHits().length; + }); } long expected = 0; @@ -220,7 +233,7 @@ public void testOpenContexts() { assertEquals(hits, docs * numAssignedShards(index)); assertThat(stats.getQueryCount(), greaterThanOrEqualTo(expected)); - clearScroll(searchResponse.getScrollId()); + clearScroll(scroll[0]); indicesStats = indicesAdmin().prepareStats().get(); stats = indicesStats.getTotal().getSearch().getTotal(); diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index edc4805c4d8ce..5bdc74b8f2545 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -163,6 +163,13 @@ static TransportVersion def(int id) { public static final TransportVersion DEPRECATED_COMPONENT_TEMPLATES_ADDED = def(8_532_00_0); public static final TransportVersion UPDATE_NON_DYNAMIC_SETTINGS_ADDED = def(8_533_00_0); public static final TransportVersion REPO_ANALYSIS_REGISTER_OP_COUNT_ADDED = def(8_534_00_0); + public static final TransportVersion ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED = def(8_535_00_0); + public static final TransportVersion COUNTED_KEYWORD_ADDED = def(8_536_00_0); + public static final TransportVersion SHAPE_VALUE_SERIALIZATION_ADDED = def(8_537_00_0); + public static final TransportVersion INFERENCE_MULTIPLE_INPUTS = def(8_538_00_0); + public static final TransportVersion ADDITIONAL_DESIRED_BALANCE_RECONCILIATION_STATS = def(8_539_00_0); + public static final TransportVersion ML_STATE_CHANGE_TIMESTAMPS = def(8_540_00_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index da0945a1cfcfa..5dd9a3a055043 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -115,6 +115,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_7_17_13 = new Version(7_17_13_99); public static final Version V_7_17_14 = new Version(7_17_14_99); public static final Version V_7_17_15 = new Version(7_17_15_99); + public static final Version V_7_17_16 = new Version(7_17_16_99); public static final Version V_8_0_0 = new Version(8_00_00_99); public static final Version V_8_0_1 = new Version(8_00_01_99); public static final Version V_8_1_0 = new Version(8_01_00_99); diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index 30ad4fdeaf04f..b0e18d5ef9b55 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -327,7 +327,12 @@ private void assertFirstRun() { @Override public void onResponse(Response response) { assertFirstRun(); - delegate.onResponse(response); + try { + delegate.onResponse(response); + } catch (Exception e) { + assert false : new AssertionError("listener [" + delegate + "] must handle its own exceptions", e); + throw e; + } } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index 4ab0b6bd221e9..af40637db6703 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -53,6 +53,7 @@ import java.util.stream.Collectors; import static java.util.Collections.emptyMap; +import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.isDataStreamsLifecycleOnlyMode; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.findConflictingV1Templates; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.findConflictingV2Templates; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.findV2Template; @@ -69,6 +70,7 @@ public class TransportSimulateIndexTemplateAction extends TransportMasterNodeRea private final SystemIndices systemIndices; private final Set indexSettingProviders; private final ClusterSettings clusterSettings; + private final boolean isDslOnlyMode; @Inject public TransportSimulateIndexTemplateAction( @@ -100,6 +102,7 @@ public TransportSimulateIndexTemplateAction( this.systemIndices = systemIndices; this.indexSettingProviders = indexSettingProviders.getIndexSettingProviders(); this.clusterSettings = clusterService.getClusterSettings(); + this.isDslOnlyMode = isDataStreamsLifecycleOnlyMode(clusterService.getSettings()); } @Override @@ -146,6 +149,7 @@ protected void masterOperation( matchingTemplate, request.getIndexName(), stateWithTemplate, + isDslOnlyMode, xContentRegistry, indicesService, systemIndices, @@ -218,6 +222,7 @@ public static Template resolveTemplate( final String matchingTemplate, final String indexName, final ClusterState simulatedState, + final boolean isDslOnlyMode, final NamedXContentRegistry xContentRegistry, final IndicesService indicesService, final SystemIndices systemIndices, @@ -304,6 +309,9 @@ public static Template resolveTemplate( Settings settings = Settings.builder().put(templateSettings).put(additionalSettings.build()).build(); DataStreamLifecycle lifecycle = resolveLifecycle(simulatedState.metadata(), matchingTemplate); + if (template.getDataStreamTemplate() != null && lifecycle == null && isDslOnlyMode) { + lifecycle = DataStreamLifecycle.DEFAULT; + } return new Template(settings, mergedMapping, aliasesByName, lifecycle); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java index b99f436dd86f9..1f35d0b8a1268 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java @@ -39,6 +39,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.isDataStreamsLifecycleOnlyMode; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.findConflictingV1Templates; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.findConflictingV2Templates; @@ -56,6 +57,7 @@ public class TransportSimulateTemplateAction extends TransportMasterNodeReadActi private final SystemIndices systemIndices; private final Set indexSettingProviders; private final ClusterSettings clusterSettings; + private final boolean isDslOnlyMode; @Inject public TransportSimulateTemplateAction( @@ -87,6 +89,7 @@ public TransportSimulateTemplateAction( this.systemIndices = systemIndices; this.indexSettingProviders = indexSettingProviders.getIndexSettingProviders(); this.clusterSettings = clusterService.getClusterSettings(); + this.isDslOnlyMode = isDataStreamsLifecycleOnlyMode(clusterService.getSettings()); } @Override @@ -162,6 +165,7 @@ protected void masterOperation( matchingTemplate, temporaryIndexName, stateWithTemplate, + isDslOnlyMode, xContentRegistry, indicesService, systemIndices, diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java index 3b6e69d16bae3..f1280587a0c55 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.Version; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -19,6 +18,7 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.rest.action.document.RestBulkAction; @@ -430,32 +430,32 @@ public void parse( } } + @UpdateForV9 + // Warnings will need to be replaced with XContentEOFException from 9.x + private static void warnBulkActionNotProperlyClosed(String message) { + deprecationLogger.compatibleCritical(STRICT_ACTION_PARSING_WARNING_KEY, message); + } + private static void checkBulkActionIsProperlyClosed(XContentParser parser) throws IOException { XContentParser.Token token; try { token = parser.nextToken(); } catch (XContentEOFException ignore) { - assert Version.CURRENT.major == Version.V_7_17_0.major + 1; - deprecationLogger.compatibleCritical( - STRICT_ACTION_PARSING_WARNING_KEY, + warnBulkActionNotProperlyClosed( "A bulk action wasn't closed properly with the closing brace. Malformed objects are currently accepted but will be " + "rejected in a future version." ); return; } if (token != XContentParser.Token.END_OBJECT) { - assert Version.CURRENT.major == Version.V_7_17_0.major + 1; - deprecationLogger.compatibleCritical( - STRICT_ACTION_PARSING_WARNING_KEY, + warnBulkActionNotProperlyClosed( "A bulk action object contained multiple keys. Additional keys are currently ignored but will be rejected in a " + "future version." ); return; } if (parser.nextToken() != null) { - assert Version.CURRENT.major == Version.V_7_17_0.major + 1; - deprecationLogger.compatibleCritical( - STRICT_ACTION_PARSING_WARNING_KEY, + warnBulkActionNotProperlyClosed( "A bulk action contained trailing data after the closing brace. This is currently ignored but will be rejected in a " + "future version." ); diff --git a/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java index ee956b5179902..73061298d8f7e 100644 --- a/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java @@ -39,7 +39,6 @@ import java.util.function.Consumer; import java.util.function.Supplier; -import static java.util.stream.Collectors.toCollection; import static org.elasticsearch.action.search.SearchPhaseController.getTopDocsSize; import static org.elasticsearch.action.search.SearchPhaseController.mergeTopDocs; import static org.elasticsearch.action.search.SearchPhaseController.setShardIndex; @@ -106,7 +105,7 @@ public QueryPhaseResultConsumer( @Override public void close() { - Releasables.close(pendingMerges); + pendingMerges.close(); } @Override @@ -269,12 +268,9 @@ public synchronized void close() { assert circuitBreakerBytes >= 0; } - List toRelease = buffer.stream().map(b -> b::releaseAggs).collect(toCollection(ArrayList::new)); - toRelease.add(() -> { - circuitBreaker.addWithoutBreaking(-circuitBreakerBytes); - circuitBreakerBytes = 0; - }); - Releasables.close(toRelease); + releaseBuffer(); + circuitBreaker.addWithoutBreaking(-circuitBreakerBytes); + circuitBreakerBytes = 0; if (hasPendingMerges()) { // This is a theoretically unreachable exception. @@ -350,8 +346,7 @@ public void consume(QuerySearchResult result, Runnable next) { addEstimateAndMaybeBreak(aggsSize); } catch (Exception exc) { result.releaseAggs(); - buffer.forEach(QuerySearchResult::releaseAggs); - buffer.clear(); + releaseBuffer(); onMergeFailure(exc); next.run(); return; @@ -379,6 +374,11 @@ public void consume(QuerySearchResult result, Runnable next) { } } + private void releaseBuffer() { + buffer.forEach(QuerySearchResult::releaseAggs); + buffer.clear(); + } + private synchronized void onMergeFailure(Exception exc) { if (hasFailure()) { assert circuitBreakerBytes == 0; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java index 4c8ade4d78ead..47b98270dfbfc 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java @@ -79,7 +79,6 @@ public TransportSearchShardsAction( @Override protected void doExecute(Task task, SearchShardsRequest searchShardsRequest, ActionListener listener) { - assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.SEARCH_COORDINATION); final long relativeStartNanos = System.nanoTime(); SearchRequest original = new SearchRequest(searchShardsRequest.indices()).indicesOptions(searchShardsRequest.indicesOptions()) .routing(searchShardsRequest.routing()) diff --git a/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java b/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java index f4d580a44621f..ff5c3115e569b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java @@ -10,13 +10,13 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasable; import java.util.Objects; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Consumer; /** * A mechanism to complete a listener on the completion of some (dynamic) collection of other actions. Basic usage is as follows: @@ -176,7 +176,7 @@ public String toString() { * It is also invalid to complete the returned listener more than once. Doing so will trip an assertion if assertions are enabled, but * will be ignored otherwise. */ - public ActionListener acquire(Consumer consumer) { + public ActionListener acquire(CheckedConsumer consumer) { final var ref = refs.acquire(); final var consumerRef = new AtomicReference<>(Objects.requireNonNull(consumer)); return new ActionListener<>() { @@ -187,10 +187,12 @@ public void onResponse(Response response) { if (acquiredConsumer == null) { assert false : "already closed"; } else { - acquiredConsumer.accept(response); + try { + acquiredConsumer.accept(response); + } catch (Exception e) { + addException(e); + } } - } catch (Exception e) { - addException(e); } } diff --git a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java index 0228dc7cc61ea..f75997d92b678 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java @@ -110,7 +110,7 @@ public Task exe transportAction(action), request, localConnection, - new SafelyWrappedActionListener<>(listener) + ActionListener.assertOnce(listener) ); } @@ -148,27 +148,4 @@ public NamedWriteableRegistry getNamedWriteableRegistry() { return namedWriteableRegistry; } - private record SafelyWrappedActionListener(ActionListener listener) implements ActionListener { - - @Override - public void onResponse(Response response) { - try { - listener.onResponse(response); - } catch (Exception e) { - assert false : new AssertionError("callback must handle its own exceptions", e); - throw e; - } - } - - @Override - public void onFailure(Exception e) { - try { - listener.onFailure(e); - } catch (Exception ex) { - ex.addSuppressed(e); - assert false : new AssertionError("callback must handle its own exceptions", ex); - throw ex; - } - } - } } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java b/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java index ae68bfafdd6c5..95cc53376af59 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java @@ -12,12 +12,14 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; @@ -50,24 +52,24 @@ public ClusterFeatures(Map> nodeFeatures) { .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, e -> Set.copyOf(e.getValue()))); } - private Set calculateAllNodeFeatures() { + public static Set calculateAllNodeFeatures(Collection> nodeFeatures) { if (nodeFeatures.isEmpty()) { return Set.of(); } Set allNodeFeatures = null; - for (Set featureSet : nodeFeatures.values()) { + for (Set featureSet : nodeFeatures) { if (allNodeFeatures == null) { allNodeFeatures = new HashSet<>(featureSet); } else { allNodeFeatures.retainAll(featureSet); } } - return Set.copyOf(allNodeFeatures); + return allNodeFeatures; } /** - * Returns the features reported by each node in the cluster. + * The features reported by each node in the cluster. *

* NOTE: This should not be used directly. * Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead. @@ -76,17 +78,28 @@ public Map> nodeFeatures() { return nodeFeatures; } + /** + * The features in all nodes in the cluster. + *

+ * NOTE: This should not be used directly. + * Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead. + */ + public Set allNodeFeatures() { + if (allNodeFeatures == null) { + allNodeFeatures = Set.copyOf(calculateAllNodeFeatures(nodeFeatures.values())); + } + return allNodeFeatures; + } + /** * {@code true} if {@code feature} is present on all nodes in the cluster. *

* NOTE: This should not be used directly, as it does not read historical features. * Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead. */ + @SuppressForbidden(reason = "directly reading cluster features") public boolean clusterHasFeature(NodeFeature feature) { - if (allNodeFeatures == null) { - allNodeFeatures = calculateAllNodeFeatures(); - } - return allNodeFeatures.contains(feature.id()); + return allNodeFeatures().contains(feature.id()); } /** diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java index ee94008372dab..b109f67b7fa41 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -8,7 +8,6 @@ package org.elasticsearch.cluster; -import org.elasticsearch.Version; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.ComponentTemplateMetadata; @@ -65,6 +64,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.health.metadata.HealthMetadataService; import org.elasticsearch.health.node.selection.HealthNodeTaskExecutor; @@ -373,6 +373,7 @@ private static void addAllocationDecider(Map, AllocationDecider> decide } } + @UpdateForV9 // in v9 there is only one allocator private static ShardsAllocator createShardsAllocator( Settings settings, ClusterSettings clusterSettings, @@ -404,7 +405,6 @@ private static ShardsAllocator createShardsAllocator( }); } String allocatorName = SHARDS_ALLOCATOR_TYPE_SETTING.get(settings); - assert Version.CURRENT.major == Version.V_7_17_0.major + 1; // in v9 there is only one allocator Supplier allocatorSupplier = allocators.get(allocatorName); if (allocatorSupplier == null) { throw new IllegalArgumentException("Unknown ShardsAllocator [" + allocatorName + "]"); diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index 565e43455d8d7..884441aa41798 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -1047,7 +1047,7 @@ private static TransportVersion inferTransportVersion(DiscoveryNode node) { TransportVersion tv; if (node.getVersion().before(Version.V_8_8_0)) { // 1-to-1 mapping between Version and TransportVersion - tv = TransportVersion.fromId(node.getVersion().id); + tv = TransportVersion.fromId(node.getPre811VersionId().getAsInt()); } else { // use the lowest value it could be for now tv = INFERRED_TRANSPORT_VERSION; diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java index 2ec3dc7998f4d..402e170f1ea53 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java @@ -219,10 +219,10 @@ public Join handleStartJoin(StartJoinRequest startJoinRequest) { public boolean handleJoin(Join join) { assert join.masterCandidateMatches(localNode) : "handling join " + join + " for the wrong node " + localNode; - if (join.getTerm() != getCurrentTerm()) { - logger.debug("handleJoin: ignored join due to term mismatch (expected: [{}], actual: [{}])", getCurrentTerm(), join.getTerm()); + if (join.term() != getCurrentTerm()) { + logger.debug("handleJoin: ignored join due to term mismatch (expected: [{}], actual: [{}])", getCurrentTerm(), join.term()); throw new CoordinationStateRejectedException( - "incoming term " + join.getTerm() + " does not match current term " + getCurrentTerm() + "incoming term " + join.term() + " does not match current term " + getCurrentTerm() ); } @@ -232,30 +232,30 @@ public boolean handleJoin(Join join) { } final long lastAcceptedTerm = getLastAcceptedTerm(); - if (join.getLastAcceptedTerm() > lastAcceptedTerm) { + if (join.lastAcceptedTerm() > lastAcceptedTerm) { logger.debug( "handleJoin: ignored join as joiner has a better last accepted term (expected: <=[{}], actual: [{}])", lastAcceptedTerm, - join.getLastAcceptedTerm() + join.lastAcceptedTerm() ); throw new CoordinationStateRejectedException( "incoming last accepted term " - + join.getLastAcceptedTerm() + + join.lastAcceptedTerm() + " of join higher than current last accepted term " + lastAcceptedTerm ); } - if (join.getLastAcceptedTerm() == lastAcceptedTerm && join.getLastAcceptedVersion() > getLastAcceptedVersion()) { + if (join.lastAcceptedTerm() == lastAcceptedTerm && join.lastAcceptedVersion() > getLastAcceptedVersion()) { logger.debug( "handleJoin: ignored join as joiner has a better last accepted version (expected: <=[{}], actual: [{}]) in term {}", getLastAcceptedVersion(), - join.getLastAcceptedVersion(), + join.lastAcceptedVersion(), lastAcceptedTerm ); throw new CoordinationStateRejectedException( "incoming last accepted version " - + join.getLastAcceptedVersion() + + join.lastAcceptedVersion() + " of join higher than current last accepted version " + getLastAcceptedVersion() + " in term " @@ -280,7 +280,7 @@ public boolean handleJoin(Join join) { logger.debug( "handleJoin: added join {} from [{}] for election, electionWon={} lastAcceptedTerm={} lastAcceptedVersion={}", join, - join.getVotingNode(), + join.votingNode(), electionWon, lastAcceptedTerm, getLastAcceptedVersion() @@ -598,7 +598,7 @@ public boolean addVote(DiscoveryNode sourceNode) { } public boolean addJoinVote(Join join) { - final boolean added = addVote(join.getVotingNode()); + final boolean added = addVote(join.votingNode()); if (added) { joins.add(join); } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index d59ffc95ee0da..c3c757bb335e4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -63,6 +63,7 @@ import org.elasticsearch.discovery.SeedHostsProvider; import org.elasticsearch.discovery.SeedHostsResolver; import org.elasticsearch.discovery.TransportAddressConnector; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.monitor.NodeHealthService; import org.elasticsearch.monitor.StatusInfo; @@ -212,7 +213,7 @@ public Coordinator( LeaderHeartbeatService leaderHeartbeatService, PreVoteCollector.Factory preVoteCollectorFactory, CompatibilityVersions compatibilityVersions, - Set features + FeatureService featureService ) { this.settings = settings; this.transportService = transportService; @@ -238,7 +239,7 @@ public Coordinator( reconfigurator::maybeReconfigureAfterNewMasterIsElected, this::getLatestStoredStateAfterWinningAnElection, compatibilityVersions, - features + featureService ); this.joinValidationService = new JoinValidationService( settings, @@ -488,7 +489,7 @@ PublishWithJoinResponse handlePublishRequest(PublishRequest publishRequest) { } private static Optional joinWithDestination(Optional lastJoin, DiscoveryNode leader, long term) { - if (lastJoin.isPresent() && lastJoin.get().masterCandidateMatches(leader) && lastJoin.get().getTerm() == term) { + if (lastJoin.isPresent() && lastJoin.get().masterCandidateMatches(leader) && lastJoin.get().term() == term) { return lastJoin; } @@ -779,7 +780,7 @@ private void processJoinRequest(JoinRequest joinRequest, ActionListener jo final CoordinationState coordState = coordinationState.get(); final boolean prevElectionWon = coordState.electionWon() - && optionalJoin.stream().allMatch(j -> j.getTerm() <= getCurrentTerm()); + && optionalJoin.stream().allMatch(j -> j.term() <= getCurrentTerm()); optionalJoin.ifPresent(this::handleJoin); joinAccumulator.handleJoinRequest( @@ -1398,7 +1399,7 @@ boolean missingJoinVoteFrom(DiscoveryNode node) { private void handleJoin(Join join) { synchronized (mutex) { - ensureTermAtLeast(getLocalNode(), join.getTerm()).ifPresent(this::handleJoin); + ensureTermAtLeast(getLocalNode(), join.term()).ifPresent(this::handleJoin); if (coordinationState.get().electionWon()) { // If we have already won the election then the actual join does not matter for election purposes, so swallow any exception @@ -2074,7 +2075,7 @@ private void cancelTimeoutHandlers() { } private void handleAssociatedJoin(Join join) { - if (join.getTerm() == getCurrentTerm() && missingJoinVoteFrom(join.getVotingNode())) { + if (join.term() == getCurrentTerm() && missingJoinVoteFrom(join.votingNode())) { logger.trace("handling {}", join); handleJoin(join); } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Join.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Join.java index a963095910701..d1fe472278f12 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Join.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Join.java @@ -24,36 +24,24 @@ * A voting node will only cast a single vote per term. The vote includes information about * the current state of the node casting the vote, so that the candidate for the vote can * determine whether it has a more up-to-date state than the voting node. + * + * @param votingNode The node casting a vote for a master candidate. + * @param masterCandidateNode The master candidate node receiving the vote for election. + * @param term + * @param lastAcceptedTerm + * @param lastAcceptedVersion */ -public class Join implements Writeable { - // The node casting a vote for a master candidate. - private final DiscoveryNode votingNode; - - // The master candidate node receiving the vote for election. - private final DiscoveryNode masterCandidateNode; - - private final long term; - private final long lastAcceptedTerm; - private final long lastAcceptedVersion; - - public Join(DiscoveryNode votingNode, DiscoveryNode masterCandidateNode, long term, long lastAcceptedTerm, long lastAcceptedVersion) { +public record Join(DiscoveryNode votingNode, DiscoveryNode masterCandidateNode, long term, long lastAcceptedTerm, long lastAcceptedVersion) + implements + Writeable { + public Join { assert term >= 0; assert lastAcceptedTerm >= 0; assert lastAcceptedVersion >= 0; - - this.votingNode = votingNode; - this.masterCandidateNode = masterCandidateNode; - this.term = term; - this.lastAcceptedTerm = lastAcceptedTerm; - this.lastAcceptedVersion = lastAcceptedVersion; } public Join(StreamInput in) throws IOException { - votingNode = new DiscoveryNode(in); - masterCandidateNode = new DiscoveryNode(in); - term = in.readLong(); - lastAcceptedTerm = in.readLong(); - lastAcceptedVersion = in.readLong(); + this(new DiscoveryNode(in), new DiscoveryNode(in), in.readLong(), in.readLong(), in.readLong()); } @Override @@ -65,74 +53,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(lastAcceptedVersion); } - public DiscoveryNode getVotingNode() { - return votingNode; - } - - public DiscoveryNode getMasterCandidateNode() { - return masterCandidateNode; - } - - /** - * Temporary compatibility with serverless code repository. - */ - public DiscoveryNode getSourceNode() { - return masterCandidateNode; - } - public boolean masterCandidateMatches(DiscoveryNode matchingNode) { return masterCandidateNode.getId().equals(matchingNode.getId()); } - - public long getLastAcceptedVersion() { - return lastAcceptedVersion; - } - - public long getTerm() { - return term; - } - - public long getLastAcceptedTerm() { - return lastAcceptedTerm; - } - - @Override - public String toString() { - return "Join{" - + "term=" - + term - + ", lastAcceptedTerm=" - + lastAcceptedTerm - + ", lastAcceptedVersion=" - + lastAcceptedVersion - + ", votingNode=" - + votingNode - + ", masterCandidateNode=" - + masterCandidateNode - + '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Join join = (Join) o; - - if (votingNode.equals(join.votingNode) == false) return false; - if (masterCandidateNode.equals(join.masterCandidateNode) == false) return false; - if (lastAcceptedVersion != join.lastAcceptedVersion) return false; - if (term != join.term) return false; - return lastAcceptedTerm == join.lastAcceptedTerm; - } - - @Override - public int hashCode() { - int result = (int) (lastAcceptedVersion ^ (lastAcceptedVersion >>> 32)); - result = 31 * result + votingNode.hashCode(); - result = 31 * result + masterCandidateNode.hashCode(); - result = 31 * result + (int) (term ^ (term >>> 32)); - result = 31 * result + (int) (lastAcceptedTerm ^ (lastAcceptedTerm >>> 32)); - return result; - } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index 326ee5d33a32d..d11d8ade2a036 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -33,6 +33,7 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.monitor.NodeHealthService; import org.elasticsearch.monitor.StatusInfo; @@ -106,12 +107,12 @@ public class JoinHelper { Function maybeReconfigureAfterMasterElection, ObjLongConsumer> latestStoredStateSupplier, CompatibilityVersions compatibilityVersions, - Set features + FeatureService featureService ) { this.joinTaskQueue = masterService.createTaskQueue( "node-join", Priority.URGENT, - new NodeJoinExecutor(allocationService, rerouteService, maybeReconfigureAfterMasterElection) + new NodeJoinExecutor(allocationService, rerouteService, featureService, maybeReconfigureAfterMasterElection) ); this.clusterApplier = clusterApplier; this.transportService = transportService; @@ -121,7 +122,7 @@ public class JoinHelper { this.joinReasonService = joinReasonService; this.latestStoredStateSupplier = latestStoredStateSupplier; this.compatibilityVersions = compatibilityVersions; - this.features = features; + this.features = featureService.getNodeFeatures().keySet(); transportService.registerRequestHandler( JOIN_ACTION_NAME, diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java index c7bba867c5555..a6a2f454694ae 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java @@ -60,7 +60,7 @@ public JoinRequest( long minimumTerm, Optional optionalJoin ) { - assert optionalJoin.isPresent() == false || optionalJoin.get().getVotingNode().equals(sourceNode); + assert optionalJoin.isPresent() == false || optionalJoin.get().votingNode().equals(sourceNode); this.sourceNode = sourceNode; this.compatibilityVersions = compatibilityVersions; this.features = features; @@ -76,7 +76,10 @@ public JoinRequest(StreamInput in) throws IOException { } else { // there's a 1-1 mapping from Version to TransportVersion before 8.8.0 // no known mapping versions here - compatibilityVersions = new CompatibilityVersions(TransportVersion.fromId(sourceNode.getVersion().id), Map.of()); + compatibilityVersions = new CompatibilityVersions( + TransportVersion.fromId(sourceNode.getPre811VersionId().getAsInt()), + Map.of() + ); } if (in.getTransportVersion().onOrAfter(TransportVersions.CLUSTER_FEATURES_ADDED)) { features = in.readCollectionAsSet(StreamInput::readString); @@ -121,7 +124,7 @@ public long getTerm() { // If the join is also present then its term will normally equal the corresponding term, but we do not require callers to // obtain the term and the join in a synchronized fashion so it's possible that they disagree. Also older nodes do not share the // minimum term, so for BWC we can take it from the join if present. - return Math.max(minimumTerm, optionalJoin.map(Join::getTerm).orElse(0L)); + return Math.max(minimumTerm, optionalJoin.map(Join::term).orElse(0L)); } public Optional getOptionalJoin() { diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java index 00086c42ed4ae..480f1d5503d61 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java @@ -12,6 +12,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterFeatures; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.NotMasterException; @@ -25,6 +26,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.Priority; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; @@ -34,6 +36,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -55,19 +58,22 @@ public class NodeJoinExecutor implements ClusterStateTaskExecutor { private final AllocationService allocationService; private final RerouteService rerouteService; + private final FeatureService featureService; private final Function maybeReconfigureAfterMasterElection; - public NodeJoinExecutor(AllocationService allocationService, RerouteService rerouteService) { - this(allocationService, rerouteService, Function.identity()); + public NodeJoinExecutor(AllocationService allocationService, RerouteService rerouteService, FeatureService featureService) { + this(allocationService, rerouteService, featureService, Function.identity()); } public NodeJoinExecutor( AllocationService allocationService, RerouteService rerouteService, + FeatureService featureService, Function maybeReconfigureAfterMasterElection ) { this.allocationService = allocationService; this.rerouteService = rerouteService; + this.featureService = featureService; this.maybeReconfigureAfterMasterElection = maybeReconfigureAfterMasterElection; } @@ -123,6 +129,7 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(newState.nodes()); Map compatibilityVersionsMap = new HashMap<>(newState.compatibilityVersions()); Map> nodeFeatures = new HashMap<>(newState.nodeFeatures()); + Set allNodesFeatures = ClusterFeatures.calculateAllNodeFeatures(nodeFeatures.values()); assert nodesBuilder.isLocalNodeElectedMaster(); @@ -155,16 +162,17 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex if (enforceVersionBarrier) { ensureVersionBarrier(node.getVersion(), minClusterNodeVersion); CompatibilityVersions.ensureVersionsCompatibility(compatibilityVersions, compatibilityVersionsMap.values()); - // TODO: enforce feature ratchet barrier } blockForbiddenVersions(compatibilityVersions.transportVersion()); ensureNodesCompatibility(node.getVersion(), minClusterNodeVersion, maxClusterNodeVersion); + enforceNodeFeatureBarrier(node.getId(), allNodesFeatures, features); // we do this validation quite late to prevent race conditions between nodes joining and importing dangling indices // we have to reject nodes that don't support all indices we have in this cluster ensureIndexCompatibility(node.getMinIndexVersion(), node.getMaxIndexVersion(), initialState.getMetadata()); nodesBuilder.add(node); compatibilityVersionsMap.put(node.getId(), compatibilityVersions); nodeFeatures.put(node.getId(), features); + allNodesFeatures.retainAll(features); nodesChanged = true; minClusterNodeVersion = Version.min(minClusterNodeVersion, node.getVersion()); maxClusterNodeVersion = Version.max(maxClusterNodeVersion, node.getVersion()); @@ -444,6 +452,16 @@ public static void ensureVersionBarrier(Version joiningNodeVersion, Version minC } } + private void enforceNodeFeatureBarrier(String nodeId, Set existingNodesFeatures, Set newNodeFeatures) { + // prevent join if it does not have one or more features that all other nodes have + Set missingFeatures = new HashSet<>(existingNodesFeatures); + missingFeatures.removeAll(newNodeFeatures); + + if (missingFeatures.isEmpty() == false) { + throw new IllegalStateException("Node " + nodeId + " is missing required features " + missingFeatures); + } + } + public static Collection> addBuiltInJoinValidators( Collection> onJoinValidators ) { diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Publication.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Publication.java index dc79d5079d072..6afb85bdf629e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Publication.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Publication.java @@ -366,8 +366,8 @@ public void onResponse(PublishWithJoinResponse response) { if (response.getJoin().isPresent()) { final Join join = response.getJoin().get(); - assert discoveryNode.equals(join.getVotingNode()); - assert join.getTerm() == response.getPublishResponse().getTerm() : response; + assert discoveryNode.equals(join.votingNode()); + assert join.term() == response.getPublishResponse().getTerm() : response; logger.trace("handling join within publish response: {}", join); onJoin(join); } else { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index faa3010adbf72..4a97d79380013 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -111,59 +111,7 @@ public static Builder builder() { return new Builder(); } - /** - * @deprecated use {@link Builder} instead - */ - @Deprecated(forRemoval = true) - public ComposableIndexTemplate( - List indexPatterns, - @Nullable Template template, - @Nullable List componentTemplates, - @Nullable Long priority, - @Nullable Long version, - @Nullable Map metadata - ) { - this(indexPatterns, template, componentTemplates, priority, version, metadata, null, null, null, null); - } - - /** - * @deprecated use {@link Builder} instead - */ - @Deprecated(forRemoval = true) - public ComposableIndexTemplate( - List indexPatterns, - @Nullable Template template, - @Nullable List componentTemplates, - @Nullable Long priority, - @Nullable Long version, - @Nullable Map metadata, - @Nullable DataStreamTemplate dataStreamTemplate - ) { - this(indexPatterns, template, componentTemplates, priority, version, metadata, dataStreamTemplate, null, null, null); - } - - /** - * @deprecated use {@link Builder} instead - */ - @Deprecated(forRemoval = true) - public ComposableIndexTemplate( - List indexPatterns, - @Nullable Template template, - @Nullable List componentTemplates, - @Nullable Long priority, - @Nullable Long version, - @Nullable Map metadata, - @Nullable DataStreamTemplate dataStreamTemplate, - @Nullable Boolean allowAutoCreate - ) { - this(indexPatterns, template, componentTemplates, priority, version, metadata, dataStreamTemplate, allowAutoCreate, null, null); - } - - /** - * @deprecated use {@link Builder} instead - */ - @Deprecated(forRemoval = true) - public ComposableIndexTemplate( + private ComposableIndexTemplate( List indexPatterns, @Nullable Template template, @Nullable List componentTemplates, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java index a3a6e34d445d2..8d7ce0525e943 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -49,6 +50,19 @@ public class DataStreamLifecycle implements SimpleDiffable, // Versions over the wire public static final TransportVersion ADDED_ENABLED_FLAG_VERSION = TransportVersions.V_8_500_057; + public static final String DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME = "data_streams.lifecycle_only.mode"; + + /** + * Check if {@link #DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME} is present and set to {@code true}, indicating that + * we're running in a cluster configuration that is only expecting to use data streams lifecycles. + * + * @param settings the node settings + * @return true if {@link #DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME} is present and set + */ + public static boolean isDataStreamsLifecycleOnlyMode(final Settings settings) { + return settings.getAsBoolean(DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME, false); + } + public static final Setting CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING = new Setting<>( "cluster.lifecycle.default.rollover", "max_age=auto,max_primary_shard_size=50gb,min_docs=1,max_primary_shard_docs=200000000", @@ -57,6 +71,8 @@ public class DataStreamLifecycle implements SimpleDiffable, Setting.Property.NodeScope ); + public static final DataStreamLifecycle DEFAULT = new DataStreamLifecycle(); + public static final String DATA_STREAM_LIFECYCLE_ORIGIN = "data_stream_lifecycle"; public static final ParseField ENABLED_FIELD = new ParseField("enabled"); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 52b522ec5ddaa..ca885632a08c4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -46,6 +46,8 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; +import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.isDataStreamsLifecycleOnlyMode; + public class MetadataCreateDataStreamService { private static final Logger logger = LogManager.getLogger(MetadataCreateDataStreamService.class); @@ -53,6 +55,7 @@ public class MetadataCreateDataStreamService { private final ThreadPool threadPool; private final ClusterService clusterService; private final MetadataCreateIndexService metadataCreateIndexService; + private final boolean isDslOnlyMode; public MetadataCreateDataStreamService( ThreadPool threadPool, @@ -62,6 +65,7 @@ public MetadataCreateDataStreamService( this.threadPool = threadPool; this.clusterService = clusterService; this.metadataCreateIndexService = metadataCreateIndexService; + this.isDslOnlyMode = isDataStreamsLifecycleOnlyMode(clusterService.getSettings()); } public void createDataStream(CreateDataStreamClusterStateUpdateRequest request, ActionListener finalListener) { @@ -87,7 +91,13 @@ public void createDataStream(CreateDataStreamClusterStateUpdateRequest request, new AckedClusterStateUpdateTask(Priority.HIGH, request, delegate.clusterStateUpdate()) { @Override public ClusterState execute(ClusterState currentState) throws Exception { - ClusterState clusterState = createDataStream(metadataCreateIndexService, currentState, request, delegate.reroute()); + ClusterState clusterState = createDataStream( + metadataCreateIndexService, + currentState, + isDslOnlyMode, + request, + delegate.reroute() + ); firstBackingIndexRef.set(clusterState.metadata().dataStreams().get(request.name).getIndices().get(0).getName()); return clusterState; } @@ -105,7 +115,7 @@ public ClusterState createDataStream( ClusterState current, ActionListener listener ) throws Exception { - return createDataStream(metadataCreateIndexService, current, request, listener); + return createDataStream(metadataCreateIndexService, current, isDslOnlyMode, request, listener); } public static final class CreateDataStreamClusterStateUpdateRequest extends ClusterStateUpdateRequest< @@ -162,10 +172,11 @@ public SystemDataStreamDescriptor getSystemDataStreamDescriptor() { static ClusterState createDataStream( MetadataCreateIndexService metadataCreateIndexService, ClusterState currentState, + boolean isDslOnlyMode, CreateDataStreamClusterStateUpdateRequest request, ActionListener listener ) throws Exception { - return createDataStream(metadataCreateIndexService, currentState, request, List.of(), null, listener); + return createDataStream(metadataCreateIndexService, currentState, isDslOnlyMode, request, List.of(), null, listener); } /** @@ -181,6 +192,7 @@ static ClusterState createDataStream( static ClusterState createDataStream( MetadataCreateIndexService metadataCreateIndexService, ClusterState currentState, + boolean isDslOnlyMode, CreateDataStreamClusterStateUpdateRequest request, List backingIndices, IndexMetadata writeIndex, @@ -277,7 +289,7 @@ static ClusterState createDataStream( isSystem, template.getDataStreamTemplate().isAllowCustomRouting(), indexMode, - lifecycle + lifecycle == null && isDslOnlyMode ? DataStreamLifecycle.DEFAULT : lifecycle ); Metadata.Builder builder = Metadata.builder(currentState.metadata()).put(newDataStream); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamService.java index b268be27e17ac..f7fa34d76498a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamService.java @@ -41,6 +41,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; +import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.isDataStreamsLifecycleOnlyMode; import static org.elasticsearch.cluster.metadata.MetadataCreateDataStreamService.createDataStream; public class MetadataMigrateToDataStreamService { @@ -63,6 +64,7 @@ public class MetadataMigrateToDataStreamService { private final IndicesService indexServices; private final ThreadContext threadContext; private final MetadataCreateIndexService metadataCreateIndexService; + private final boolean isDslOnlyMode; public MetadataMigrateToDataStreamService( ThreadPool threadPool, @@ -74,6 +76,7 @@ public MetadataMigrateToDataStreamService( this.indexServices = indexServices; this.threadContext = threadPool.getThreadContext(); this.metadataCreateIndexService = metadataCreateIndexService; + this.isDslOnlyMode = isDataStreamsLifecycleOnlyMode(clusterService.getSettings()); } public void migrateToDataStream( @@ -104,7 +107,7 @@ public void migrateToDataStream( @Override public ClusterState execute(ClusterState currentState) throws Exception { - ClusterState clusterState = migrateToDataStream(currentState, indexMetadata -> { + ClusterState clusterState = migrateToDataStream(currentState, isDslOnlyMode, indexMetadata -> { try { return indexServices.createIndexMapperServiceForValidation(indexMetadata); } catch (IOException e) { @@ -125,6 +128,7 @@ private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String static ClusterState migrateToDataStream( ClusterState currentState, + boolean isDslOnlyMode, Function mapperSupplier, MigrateToDataStreamClusterStateUpdateRequest request, MetadataCreateIndexService metadataCreateIndexService, @@ -155,6 +159,7 @@ static ClusterState migrateToDataStream( return createDataStream( metadataCreateIndexService, currentState, + isDslOnlyMode, req, backingIndices, currentState.metadata().index(writeIndex), diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java index 5f477a9ca66df..ea0ee630ef073 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java @@ -8,7 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.elasticsearch.Version; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -16,6 +15,7 @@ import org.elasticsearch.common.unit.RelativeByteSizeValue; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import java.util.Iterator; import java.util.List; @@ -152,7 +152,11 @@ public class DiskThresholdSettings { private volatile TimeValue rerouteInterval; static { - assert Version.CURRENT.major == Version.V_7_0_0.major + 1; // this check is unnecessary in v9 + checkAutoReleaseIndexEnabled(); + } + + @UpdateForV9 // this check is unnecessary in v9 + private static void checkAutoReleaseIndexEnabled() { final String AUTO_RELEASE_INDEX_ENABLED_KEY = "es.disk.auto_release_flood_stage_block"; final String property = System.getProperty(AUTO_RELEASE_INDEX_ENABLED_KEY); if (property != null) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java index 7d24872cf51dc..60a6ec2e49899 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java @@ -51,6 +51,7 @@ public class DesiredBalanceComputer { private final ThreadPool threadPool; private final ShardsAllocator delegateAllocator; + // stats protected final MeanMetric iterations = new MeanMetric(); public static final Setting PROGRESS_LOG_INTERVAL_SETTING = Setting.timeSetting( diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index 6fac97e34d022..8df50196c5d4b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -34,6 +34,7 @@ import java.util.Comparator; import java.util.Iterator; import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -69,6 +70,20 @@ public class DesiredBalanceReconciler { private final NodeAllocationOrdering allocationOrdering = new NodeAllocationOrdering(); private final NodeAllocationOrdering moveOrdering = new NodeAllocationOrdering(); + // stats + /** + * Number of unassigned shards during last reconciliation + */ + protected final AtomicLong unassignedShards = new AtomicLong(); + /** + * Total number of assigned shards during last reconciliation + */ + protected final AtomicLong totalAllocations = new AtomicLong(); + /** + * Number of assigned shards during last reconciliation that are not allocated on desired node and need to be moved + */ + protected final AtomicLong undesiredAllocations = new AtomicLong(); + public DesiredBalanceReconciler(ClusterSettings clusterSettings, ThreadPool threadPool) { this.undesiredAllocationLogInterval = new FrequencyCappedAction(threadPool); clusterSettings.initializeAndWatch(UNDESIRED_ALLOCATIONS_LOG_INTERVAL_SETTING, this.undesiredAllocationLogInterval::setMinInterval); @@ -445,8 +460,9 @@ private void balance() { return; } - long allAllocations = 0; - long undesiredAllocations = 0; + int unassignedShards = routingNodes.unassigned().size() + routingNodes.unassigned().ignored().size(); + int totalAllocations = 0; + int undesiredAllocations = 0; // Iterate over all started shards and try to move any which are on undesired nodes. In the presence of throttling shard // movements, the goal of this iteration order is to achieve a fairer movement of shards from the nodes that are offloading the @@ -454,7 +470,7 @@ private void balance() { for (final var iterator = OrderedShardsIterator.create(routingNodes, moveOrdering); iterator.hasNext();) { final var shardRouting = iterator.next(); - allAllocations++; + totalAllocations++; if (shardRouting.started() == false) { // can only rebalance started shards @@ -504,10 +520,14 @@ private void balance() { } } - maybeLogUndesiredAllocationsWarning(allAllocations, undesiredAllocations, routingNodes.size()); + DesiredBalanceReconciler.this.unassignedShards.set(unassignedShards); + DesiredBalanceReconciler.this.undesiredAllocations.set(undesiredAllocations); + DesiredBalanceReconciler.this.totalAllocations.set(totalAllocations); + + maybeLogUndesiredAllocationsWarning(totalAllocations, undesiredAllocations, routingNodes.size()); } - private void maybeLogUndesiredAllocationsWarning(long allAllocations, long undesiredAllocations, int nodeCount) { + private void maybeLogUndesiredAllocationsWarning(int allAllocations, int undesiredAllocations, int nodeCount) { // more shards than cluster can relocate with one reroute final boolean nonEmptyRelocationBacklog = undesiredAllocations > 2L * nodeCount; final boolean warningThresholdReached = undesiredAllocations > undesiredAllocationsLogThreshold * allAllocations; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index 11d2317f5bcea..2319bcbef3383 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -272,7 +272,10 @@ public DesiredBalanceStats getStats() { desiredBalanceComputer.iterations.sum(), computedShardMovements.sum(), cumulativeComputationTime.count(), - cumulativeReconciliationTime.count() + cumulativeReconciliationTime.count(), + desiredBalanceReconciler.unassignedShards.get(), + desiredBalanceReconciler.totalAllocations.get(), + desiredBalanceReconciler.undesiredAllocations.get() ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStats.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStats.java index b8a1d3e1b899d..6a08b896136d2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStats.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStats.java @@ -19,6 +19,8 @@ import java.io.IOException; +import static org.elasticsearch.TransportVersions.ADDITIONAL_DESIRED_BALANCE_RECONCILIATION_STATS; + public record DesiredBalanceStats( long lastConvergedIndex, boolean computationActive, @@ -28,7 +30,10 @@ public record DesiredBalanceStats( long computationIterations, long computedShardMovements, long cumulativeComputationTime, - long cumulativeReconciliationTime + long cumulativeReconciliationTime, + long unassignedShards, + long totalAllocations, + long undesiredAllocations ) implements Writeable, ToXContentObject { private static final TransportVersion COMPUTED_SHARD_MOVEMENTS_VERSION = TransportVersions.V_8_8_0; @@ -50,7 +55,10 @@ public static DesiredBalanceStats readFrom(StreamInput in) throws IOException { in.readVLong(), in.getTransportVersion().onOrAfter(COMPUTED_SHARD_MOVEMENTS_VERSION) ? in.readVLong() : -1, in.readVLong(), - in.readVLong() + in.readVLong(), + in.getTransportVersion().onOrAfter(ADDITIONAL_DESIRED_BALANCE_RECONCILIATION_STATS) ? in.readVLong() : -1, + in.getTransportVersion().onOrAfter(ADDITIONAL_DESIRED_BALANCE_RECONCILIATION_STATS) ? in.readVLong() : -1, + in.getTransportVersion().onOrAfter(ADDITIONAL_DESIRED_BALANCE_RECONCILIATION_STATS) ? in.readVLong() : -1 ); } @@ -67,6 +75,11 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeVLong(cumulativeComputationTime); out.writeVLong(cumulativeReconciliationTime); + if (out.getTransportVersion().onOrAfter(ADDITIONAL_DESIRED_BALANCE_RECONCILIATION_STATS)) { + out.writeVLong(unassignedShards); + out.writeVLong(totalAllocations); + out.writeVLong(undesiredAllocations); + } } @Override @@ -81,7 +94,21 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("computed_shard_movements", computedShardMovements); builder.humanReadableField("computation_time_in_millis", "computation_time", new TimeValue(cumulativeComputationTime)); builder.humanReadableField("reconciliation_time_in_millis", "reconciliation_time", new TimeValue(cumulativeReconciliationTime)); + builder.field("unassigned_shards", unassignedShards); + builder.field("total_allocations", totalAllocations); + builder.field("undesired_allocations", undesiredAllocations); + builder.field("undesired_allocations_fraction", undesiredAllocationsFraction()); builder.endObject(); return builder; } + + public double undesiredAllocationsFraction() { + if (unassignedShards == -1 || totalAllocations == -1 || undesiredAllocations == -1) { + return -1.0; + } else if (totalAllocations == 0) { + return 0.0; + } else { + return (double) undesiredAllocations / totalAllocations; + } + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index fe001480e5f46..0e0d15a02d042 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.DiskUsage; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -27,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.UpdateForV9; import java.util.Map; @@ -69,6 +69,7 @@ public class DiskThresholdDecider extends AllocationDecider { public static final String NAME = "disk_threshold"; + @UpdateForV9 public static final Setting ENABLE_FOR_SINGLE_DATA_NODE = Setting.boolSetting( "cluster.routing.allocation.disk.watermark.enable_for_single_data_node", true, @@ -98,7 +99,6 @@ public void validate(Boolean value) { public DiskThresholdDecider(Settings settings, ClusterSettings clusterSettings) { this.diskThresholdSettings = new DiskThresholdSettings(settings, clusterSettings); - assert Version.CURRENT.major < 9 : "remove enable_for_single_data_node in 9"; // get deprecation warnings. boolean enabledForSingleDataNode = ENABLE_FOR_SINGLE_DATA_NODE.get(settings); assert enabledForSingleDataNode; diff --git a/server/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java b/server/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java index 6b702f41e7c5d..60e6fa5fff22a 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java @@ -22,6 +22,11 @@ public ModulesBuilder add(Module... newModules) { return this; } + public T bindToInstance(Class cls, T instance) { + modules.add(b -> b.bind(cls).toInstance(instance)); + return instance; + } + @Override public Iterator iterator() { return modules.iterator(); diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 69e61e7e70001..eb81fe01e57c2 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -80,6 +80,7 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexingPressure; +import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.IndicesRequestCache; @@ -577,6 +578,8 @@ public void apply(Settings value, Settings current, Settings previous) { IndicesClusterStateService.SHARD_LOCK_RETRY_TIMEOUT_SETTING, IngestSettings.GROK_WATCHDOG_INTERVAL, IngestSettings.GROK_WATCHDOG_MAX_EXECUTION_TIME, - TDigestExecutionHint.SETTING + TDigestExecutionHint.SETTING, + MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT_SETTING, + MergePolicyConfig.DEFAULT_MAX_TIME_BASED_MERGED_SEGMENT_SETTING ).filter(Objects::nonNull).collect(Collectors.toSet()); } diff --git a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index fabc10e336368..08a07241a9ebb 100644 --- a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.coordination.Coordinator; @@ -35,6 +34,8 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.GatewayMetaState; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.monitor.NodeHealthService; @@ -112,7 +113,7 @@ public DiscoveryModule( NodeHealthService nodeHealthService, CircuitBreakerService circuitBreakerService, CompatibilityVersions compatibilityVersions, - Set features + FeatureService featureService ) { final Collection> joinValidators = new ArrayList<>(); final Map> hostProviders = new HashMap<>(); @@ -172,19 +173,7 @@ public DiscoveryModule( throw new IllegalArgumentException("Unknown election strategy " + ELECTION_STRATEGY_SETTING.get(settings)); } - if (LEGACY_MULTI_NODE_DISCOVERY_TYPE.equals(discoveryType)) { - assert Version.CURRENT.major == Version.V_7_0_0.major + 1; - DeprecationLogger.getLogger(DiscoveryModule.class) - .critical( - DeprecationCategory.SETTINGS, - "legacy-discovery-type", - "Support for setting [{}] to [{}] is deprecated and will be removed in a future version. Set this setting to [{}] " - + "instead.", - DISCOVERY_TYPE_SETTING.getKey(), - LEGACY_MULTI_NODE_DISCOVERY_TYPE, - MULTI_NODE_DISCOVERY_TYPE - ); - } + checkLegacyMultiNodeDiscoveryType(discoveryType); this.reconfigurator = getReconfigurator(settings, clusterSettings, clusterCoordinationPlugins); var preVoteCollectorFactory = getPreVoteCollectorFactory(clusterCoordinationPlugins); @@ -215,7 +204,7 @@ public DiscoveryModule( leaderHeartbeatService, preVoteCollectorFactory, compatibilityVersions, - features + featureService ); } else { throw new IllegalArgumentException("Unknown discovery type [" + discoveryType + "]"); @@ -224,6 +213,22 @@ public DiscoveryModule( logger.info("using discovery type [{}] and seed hosts providers {}", discoveryType, seedProviderNames); } + @UpdateForV9 + private static void checkLegacyMultiNodeDiscoveryType(String discoveryType) { + if (LEGACY_MULTI_NODE_DISCOVERY_TYPE.equals(discoveryType)) { + DeprecationLogger.getLogger(DiscoveryModule.class) + .critical( + DeprecationCategory.SETTINGS, + "legacy-discovery-type", + "Support for setting [{}] to [{}] is deprecated and will be removed in a future version. Set this setting to [{}] " + + "instead.", + DISCOVERY_TYPE_SETTING.getKey(), + LEGACY_MULTI_NODE_DISCOVERY_TYPE, + MULTI_NODE_DISCOVERY_TYPE + ); + } + } + // visible for testing static Reconfigurator getReconfigurator( Settings settings, diff --git a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java index 77415bbaea949..f810612bf4666 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java +++ b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java @@ -10,6 +10,7 @@ import org.elasticsearch.Build; import org.elasticsearch.Version; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -162,15 +163,15 @@ public void setOldestIndexVersion(int oldestIndexVersion) { this.oldestIndexVersion = IndexVersion.fromId(oldestIndexVersion); } + private Version getVersionOrFallbackToEmpty() { + return Objects.requireNonNullElse(this.nodeVersion, Version.V_EMPTY); + } + public NodeMetadata build() { - final Version nodeVersion; + @UpdateForV9 // version is required in the node metadata from v9 onwards + final Version nodeVersion = getVersionOrFallbackToEmpty(); final IndexVersion oldestIndexVersion; - if (this.nodeVersion == null) { - assert Version.CURRENT.major <= Version.V_7_0_0.major + 1 : "version is required in the node metadata from v9 onwards"; - nodeVersion = Version.V_EMPTY; - } else { - nodeVersion = this.nodeVersion; - } + if (this.previousNodeVersion == null) { previousNodeVersion = nodeVersion; } diff --git a/server/src/main/java/org/elasticsearch/features/FeatureService.java b/server/src/main/java/org/elasticsearch/features/FeatureService.java index 5d7632a91b0b8..d88589ac1ede8 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureService.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureService.java @@ -39,13 +39,13 @@ public class FeatureService { public static final Version CLUSTER_FEATURES_ADDED_VERSION = Version.V_8_12_0; private final NavigableMap> historicalFeatures; - private final Set nodeFeatures; + private final Map nodeFeatures; public FeatureService(List specs) { Map allFeatures = new HashMap<>(); NavigableMap> historicalFeatures = new TreeMap<>(); - Set nodeFeatures = new HashSet<>(); + Map nodeFeatures = new HashMap<>(); for (FeatureSpecification spec : specs) { for (var hfe : spec.getHistoricalFeatures().entrySet()) { FeatureSpecification existing = allFeatures.putIfAbsent(hfe.getKey().id(), spec); @@ -78,14 +78,14 @@ public FeatureService(List specs) { ); } - nodeFeatures.add(f.id()); + nodeFeatures.put(f.id(), f); } } this.historicalFeatures = consolidateHistoricalFeatures(historicalFeatures); - this.nodeFeatures = Set.copyOf(nodeFeatures); + this.nodeFeatures = Map.copyOf(nodeFeatures); - logger.info("Registered local node features {}", nodeFeatures.stream().sorted().toList()); + logger.info("Registered local node features {}", nodeFeatures.keySet().stream().sorted().toList()); } private static NavigableMap> consolidateHistoricalFeatures( @@ -104,7 +104,7 @@ private static NavigableMap> consolidateHistoricalFeatures( /** * The non-historical features supported by this node. */ - public Set getNodeFeatures() { + public Map getNodeFeatures() { return nodeFeatures; } diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index a7cf7299a8502..e7b8eadb3f771 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.node.Node; @@ -184,7 +185,7 @@ private PersistedState createOnDiskPersistedState( long currentTerm = onDiskState.currentTerm; if (onDiskState.empty()) { - assert Version.CURRENT.major <= Version.V_7_0_0.major + 1 : "legacy metadata loader is not needed anymore from v9 onwards"; + @UpdateForV9 // legacy metadata loader is not needed anymore from v9 onwards final Tuple legacyState = metaStateService.loadFullState(); if (legacyState.v1().isEmpty() == false) { metadata = legacyState.v2(); diff --git a/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java b/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java index 4ba7c91d411f3..1db0ec7346a32 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -54,6 +55,7 @@ public MetaStateService(NodeEnvironment nodeEnv, NamedXContentRegistry namedXCon * meta state with globalGeneration -1 and empty meta data is returned. * @throws IOException if some IOException when loading files occurs or there is no metadata referenced by manifest file. */ + @UpdateForV9 public Tuple loadFullState() throws IOException { final Manifest manifest = Manifest.FORMAT.loadLatestState(logger, namedXContentRegistry, nodeEnv.nodeDataPaths()); if (manifest == null) { diff --git a/server/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java b/server/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java index 5e8fb556b2089..9991d42e013e3 100644 --- a/server/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java +++ b/server/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java @@ -9,13 +9,11 @@ package org.elasticsearch.index; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; public abstract class AbstractIndexComponent { protected final Logger logger; - protected final DeprecationLogger deprecationLogger; protected final IndexSettings indexSettings; /** @@ -23,7 +21,6 @@ public abstract class AbstractIndexComponent { */ protected AbstractIndexComponent(IndexSettings indexSettings) { this.logger = Loggers.getLogger(getClass(), indexSettings.getIndex()); - this.deprecationLogger = DeprecationLogger.getLogger(getClass()); this.indexSettings = indexSettings; } diff --git a/server/src/main/java/org/elasticsearch/index/MergePolicyConfig.java b/server/src/main/java/org/elasticsearch/index/MergePolicyConfig.java index bd228db91c0e1..e6b2a861458d0 100644 --- a/server/src/main/java/org/elasticsearch/index/MergePolicyConfig.java +++ b/server/src/main/java/org/elasticsearch/index/MergePolicyConfig.java @@ -111,17 +111,33 @@ public final class MergePolicyConfig { private final Logger logger; private final boolean mergesEnabled; private volatile Type mergePolicyType; + private final ByteSizeValue defaultMaxMergedSegment; + private final ByteSizeValue defaultMaxTimeBasedMergedSegment; public static final double DEFAULT_EXPUNGE_DELETES_ALLOWED = 10d; public static final ByteSizeValue DEFAULT_FLOOR_SEGMENT = new ByteSizeValue(2, ByteSizeUnit.MB); public static final int DEFAULT_MAX_MERGE_AT_ONCE = 10; public static final ByteSizeValue DEFAULT_MAX_MERGED_SEGMENT = new ByteSizeValue(5, ByteSizeUnit.GB); + public static final Setting DEFAULT_MAX_MERGED_SEGMENT_SETTING = Setting.byteSizeSetting( + "indices.merge.policy.max_merged_segment", + DEFAULT_MAX_MERGED_SEGMENT, + ByteSizeValue.ofBytes(1L), + ByteSizeValue.ofBytes(Long.MAX_VALUE), + Setting.Property.NodeScope + ); /** * Time-based data generally gets rolled over, so there is not much value in enforcing a maximum segment size, which has the side effect * of merging fewer segments together than the merge factor, which in-turn increases write amplification. So we set an arbitrarily high * roof that serves as a protection that we expect to never hit. */ public static final ByteSizeValue DEFAULT_MAX_TIME_BASED_MERGED_SEGMENT = new ByteSizeValue(100, ByteSizeUnit.GB); + public static final Setting DEFAULT_MAX_TIME_BASED_MERGED_SEGMENT_SETTING = Setting.byteSizeSetting( + "indices.merge.policy.max_time_based_merged_segment", + DEFAULT_MAX_TIME_BASED_MERGED_SEGMENT, + ByteSizeValue.ofBytes(1L), + ByteSizeValue.ofBytes(Long.MAX_VALUE), + Setting.Property.NodeScope + ); public static final double DEFAULT_SEGMENTS_PER_TIER = 10.0d; /** * A default value for {@link LogByteSizeMergePolicy}'s merge factor: 32. This default value differs from the Lucene default of 10 in @@ -262,8 +278,8 @@ MergePolicy getMergePolicy(MergePolicyConfig config, boolean isTimeBasedIndex) { double forceMergeDeletesPctAllowed = indexSettings.getValue(INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING); // percentage ByteSizeValue floorSegment = indexSettings.getValue(INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING); int maxMergeAtOnce = indexSettings.getValue(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING); - // TODO is this really a good default number for max_merge_segment, what happens for large indices, - // won't they end up with many segments? + this.defaultMaxMergedSegment = DEFAULT_MAX_MERGED_SEGMENT_SETTING.get(indexSettings.getNodeSettings()); + this.defaultMaxTimeBasedMergedSegment = DEFAULT_MAX_TIME_BASED_MERGED_SEGMENT_SETTING.get(indexSettings.getNodeSettings()); ByteSizeValue maxMergedSegment = indexSettings.getValue(INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING); double segmentsPerTier = indexSettings.getValue(INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING); int mergeFactor = indexSettings.getValue(INDEX_MERGE_POLICY_MERGE_FACTOR_SETTING); @@ -315,8 +331,8 @@ void setMergeFactor(int mergeFactor) { void setMaxMergedSegment(ByteSizeValue maxMergedSegment) { // We use 0 as a placeholder for "unset". if (maxMergedSegment.getBytes() == 0) { - tieredMergePolicy.setMaxMergedSegmentMB(DEFAULT_MAX_MERGED_SEGMENT.getMbFrac()); - timeBasedMergePolicy.setMaxMergeMB(DEFAULT_MAX_TIME_BASED_MERGED_SEGMENT.getMbFrac()); + tieredMergePolicy.setMaxMergedSegmentMB(defaultMaxMergedSegment.getMbFrac()); + timeBasedMergePolicy.setMaxMergeMB(defaultMaxTimeBasedMergedSegment.getMbFrac()); } else { tieredMergePolicy.setMaxMergedSegmentMB(maxMergedSegment.getMbFrac()); timeBasedMergePolicy.setMaxMergeMB(maxMergedSegment.getMbFrac()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 1949249b9be2d..5a0d9c7c0cf79 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -954,7 +954,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { throw new IllegalArgumentException( "fetching values from a text field [" + name() - + "] is supported because synthetic _source is enabled and we don't have a way to load the fields" + + "] is not supported because synthetic _source is enabled and we don't have a way to load the fields" ); } return BlockSourceReader.bytesRefs(SourceValueFetcher.toString(blContext.sourcePaths(name()))); @@ -1019,7 +1019,7 @@ protected BytesRef storedToBytesRef(Object stored) { throw new IllegalArgumentException( "fetching values from a text field [" + name() - + "] is supported because synthetic _source is enabled and we don't have a way to load the fields" + + "] is not supported because synthetic _source is enabled and we don't have a way to load the fields" ); } return new SourceValueFetcherSortedBinaryIndexFieldData.Builder( diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index 2e39b13b34c78..0b3b15670ef78 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.WriteStateException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersions; @@ -466,13 +467,13 @@ public RetentionLeases loadRetentionLeases(final Path path) throws IOException { synchronized (retentionLeasePersistenceLock) { retentionLeases = RetentionLeases.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, path); } + return emptyIfNull(retentionLeases); + } - // TODO after backporting we expect this never to happen in 8.x, so adjust this to throw an exception instead. - assert Version.CURRENT.major <= 8 : "throw an exception instead of returning EMPTY on null"; - if (retentionLeases == null) { - return RetentionLeases.EMPTY; - } - return retentionLeases; + @UpdateForV9 + private static RetentionLeases emptyIfNull(RetentionLeases retentionLeases) { + // we expect never to see a null in 8.x, so adjust this to throw an exception from v9 onwards. + return retentionLeases == null ? RetentionLeases.EMPTY : retentionLeases; } private final Object retentionLeasePersistenceLock = new Object(); diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 8d10e5a79d065..8b6f6afb72042 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -3823,16 +3823,18 @@ && isSearchIdle() // lets skip this refresh since we are search idle and // don't necessarily need to refresh. the next searcher access will register a refreshListener and that will // cause the next schedule to refresh. + logger.trace("scheduledRefresh: search-idle, skipping refresh"); engine.maybePruneDeletes(); // try to prune the deletes in the engine if we accumulated some setRefreshPending(engine); l.onResponse(false); return; } else { - logger.trace("refresh with source [schedule]"); + logger.trace("scheduledRefresh: refresh with source [schedule]"); engine.maybeRefresh("schedule", l.map(Engine.RefreshResult::refreshed)); return; } } + logger.trace("scheduledRefresh: no refresh needed"); engine.maybePruneDeletes(); // try to prune the deletes in the engine if we accumulated some l.onResponse(false); }); diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index 56b0a07fcbc71..1da3e2dac261a 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; @@ -49,7 +50,6 @@ import java.util.Map.Entry; import java.util.Optional; import java.util.Set; -import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -929,7 +929,7 @@ public static void cleanUpFeature( Metadata metadata = clusterService.state().getMetadata(); final List exceptions = new ArrayList<>(); - final Consumer handleResponse = resetFeatureStateStatus -> { + final CheckedConsumer handleResponse = resetFeatureStateStatus -> { if (resetFeatureStateStatus.getStatus() == ResetFeatureStateStatus.Status.FAILURE) { synchronized (exceptions) { exceptions.add(resetFeatureStateStatus.getException()); diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 2d7ee9f210e64..634b8af38a317 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import java.io.Closeable; +import java.util.List; import java.util.Map; import java.util.Set; @@ -61,7 +62,12 @@ public interface InferenceService extends Closeable { * @param taskSettings Settings in the request to override the model's defaults * @param listener Inference result listener */ - void infer(Model model, String input, Map taskSettings, ActionListener listener); + void infer( + Model model, + List input, + Map taskSettings, + ActionListener> listener + ); /** * Start or prepare the model for use. diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index a982395705251..8f557b4e9db5c 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -245,11 +245,16 @@ static NodeConstruction prepareConstruction( List closeables = new ArrayList<>(); try { NodeConstruction constructor = new NodeConstruction(closeables); + Settings settings = constructor.createEnvironment(initialEnvironment, serviceProvider); + ThreadPool threadPool = constructor.createThreadPool(settings); SettingsModule settingsModule = constructor.validateSettings(initialEnvironment.settings(), settings, threadPool); - constructor.construct(threadPool, settingsModule, serviceProvider, forbidPrivateIndexSettings); + SearchModule searchModule = constructor.createSearchModule(settingsModule.getSettings(), threadPool); + constructor.createClientAndRegistries(settingsModule.getSettings(), threadPool, searchModule); + + constructor.construct(threadPool, settingsModule, searchModule, serviceProvider, forbidPrivateIndexSettings); return constructor; } catch (IOException e) { @@ -268,6 +273,7 @@ static NodeConstruction prepareConstruction( private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(Node.class); private final List resourcesToClose; + private final ModulesBuilder modules = new ModulesBuilder(); /* * References for storing in a Node */ @@ -335,7 +341,7 @@ private Optional getSinglePlugin(Class pluginClass) { return getSinglePlugin(pluginsService.filterPlugins(pluginClass), pluginClass); } - private Optional getSinglePlugin(Stream plugins, Class pluginClass) { + private static Optional getSinglePlugin(Stream plugins, Class pluginClass) { var it = plugins.iterator(); if (it.hasNext() == false) { return Optional.empty(); @@ -345,7 +351,7 @@ private Optional getSinglePlugin(Stream plugins, Class pluginClass) List allPlugins = new ArrayList<>(); allPlugins.add(plugin); it.forEachRemaining(allPlugins::add); - throw new IllegalStateException("A single " + pluginClass.getName() + " was expected but got :" + allPlugins); + throw new IllegalStateException("A single " + pluginClass.getName() + " was expected but got " + allPlugins); } return Optional.of(plugin); } @@ -423,6 +429,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr ); pluginsService = serviceProvider.newPluginService(initialEnvironment, envSettings); + modules.bindToInstance(PluginsService.class, pluginsService); Settings settings = Node.mergePluginSettings(pluginsService.pluginMap(), envSettings); /* @@ -431,6 +438,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr */ environment = new Environment(settings, initialEnvironment.configFile()); Environment.assertEquivalent(initialEnvironment, environment); + modules.bindToInstance(Environment.class, environment); return settings; } @@ -441,6 +449,7 @@ private ThreadPool createThreadPool(Settings settings) throws IOException { pluginsService.flatMap(p -> p.getExecutorBuilders(settings)).toArray(ExecutorBuilder[]::new) ); resourcesToClose.add(() -> ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS)); + modules.bindToInstance(ThreadPool.class, threadPool); // adds the context to the DeprecationLogger so that it does not need to be injected everywhere HeaderWarning.setThreadContext(threadPool.getThreadContext()); @@ -457,7 +466,7 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, } SettingsExtension.load().forEach(e -> additionalSettings.addAll(e.getSettings())); - // this is as early as we can validate settings at this point. we already pass them to ScriptModule as well as ThreadPool + // this is as early as we can validate settings at this point. we already pass them to ThreadPool // so we might be late here already SettingsModule settingsModule = new SettingsModule( settings, @@ -479,13 +488,67 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, .collect(Collectors.toCollection(LinkedHashSet::new)) ); resourcesToClose.add(nodeEnvironment); + modules.bindToInstance(NodeEnvironment.class, nodeEnvironment); return settingsModule; } + private SearchModule createSearchModule(Settings settings, ThreadPool threadPool) { + IndexSearcher.setMaxClauseCount(SearchUtils.calculateMaxClauseValue(threadPool)); + return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList()); + } + + /** + * Create various objects that are stored as member variables. This is so they are accessible as soon as possible. + */ + private void createClientAndRegistries(Settings settings, ThreadPool threadPool, SearchModule searchModule) { + client = new NodeClient(settings, threadPool); + modules.add(b -> { + b.bind(Client.class).toInstance(client); + b.bind(NodeClient.class).toInstance(client); + }); + + localNodeFactory = new Node.LocalNodeFactory(settings, nodeEnvironment.nodeId()); + + InferenceServiceRegistry inferenceServiceRegistry = new InferenceServiceRegistry( + pluginsService.filterPlugins(InferenceServicePlugin.class).toList(), + new InferenceServicePlugin.InferenceServiceFactoryContext(client) + ); + resourcesToClose.add(inferenceServiceRegistry); + modules.bindToInstance(InferenceServiceRegistry.class, inferenceServiceRegistry); + + namedWriteableRegistry = new NamedWriteableRegistry( + Stream.of( + NetworkModule.getNamedWriteables().stream(), + IndicesModule.getNamedWriteables().stream(), + searchModule.getNamedWriteables().stream(), + pluginsService.flatMap(Plugin::getNamedWriteables), + ClusterModule.getNamedWriteables().stream(), + SystemIndexMigrationExecutor.getNamedWriteables().stream(), + inferenceServiceRegistry.getNamedWriteables().stream() + ).flatMap(Function.identity()).toList() + ); + xContentRegistry = new NamedXContentRegistry( + Stream.of( + NetworkModule.getNamedXContents().stream(), + IndicesModule.getNamedXContents().stream(), + searchModule.getNamedXContents().stream(), + pluginsService.flatMap(Plugin::getNamedXContent), + ClusterModule.getNamedXWriteables().stream(), + SystemIndexMigrationExecutor.getNamedXContentParsers().stream(), + HealthNodeTaskExecutor.getNamedXContentParsers().stream() + ).flatMap(Function.identity()).toList() + ); + modules.add(b -> { + b.bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); + b.bind(NamedXContentRegistry.class).toInstance(xContentRegistry); + }); + } + private void construct( ThreadPool threadPool, SettingsModule settingsModule, + SearchModule searchModule, NodeServiceProvider serviceProvider, boolean forbidPrivateIndexSettings ) throws IOException { @@ -507,8 +570,6 @@ private void construct( final TaskManager taskManager = new TaskManager(settings, threadPool, taskHeaders, tracer); - client = new NodeClient(settings, threadPool); - final ScriptModule scriptModule = new ScriptModule(settings, pluginsService.filterPlugins(ScriptPlugin.class).toList()); final ScriptService scriptService = serviceProvider.newScriptService( pluginsService, @@ -522,7 +583,6 @@ private void construct( pluginsService.filterPlugins(AnalysisPlugin.class).toList(), pluginsService.getStablePluginRegistry() ); - localNodeFactory = new Node.LocalNodeFactory(settings, nodeEnvironment.nodeId()); ScriptModule.registerClusterSettingsListeners(scriptService, settingsModule.getClusterSettings()); final NetworkService networkService = new NetworkService( @@ -546,12 +606,6 @@ private void construct( Supplier documentParsingObserverSupplier = getDocumentParsingObserverSupplier(); - var factoryContext = new InferenceServicePlugin.InferenceServiceFactoryContext(client); - final InferenceServiceRegistry inferenceServiceRegistry = new InferenceServiceRegistry( - pluginsService.filterPlugins(InferenceServicePlugin.class).toList(), - factoryContext - ); - final IngestService ingestService = new IngestService( clusterService, threadPool, @@ -573,30 +627,6 @@ private void construct( ); final UsageService usageService = new UsageService(); - SearchModule searchModule = new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList()); - IndexSearcher.setMaxClauseCount(SearchUtils.calculateMaxClauseValue(threadPool)); - namedWriteableRegistry = new NamedWriteableRegistry( - Stream.of( - NetworkModule.getNamedWriteables().stream(), - IndicesModule.getNamedWriteables().stream(), - searchModule.getNamedWriteables().stream(), - pluginsService.flatMap(Plugin::getNamedWriteables), - ClusterModule.getNamedWriteables().stream(), - SystemIndexMigrationExecutor.getNamedWriteables().stream(), - inferenceServiceRegistry.getNamedWriteables().stream() - ).flatMap(Function.identity()).toList() - ); - xContentRegistry = new NamedXContentRegistry( - Stream.of( - NetworkModule.getNamedXContents().stream(), - IndicesModule.getNamedXContents().stream(), - searchModule.getNamedXContents().stream(), - pluginsService.flatMap(Plugin::getNamedXContent), - ClusterModule.getNamedXWriteables().stream(), - SystemIndexMigrationExecutor.getNamedXContentParsers().stream(), - HealthNodeTaskExecutor.getNamedXContentParsers().stream() - ).flatMap(Function.identity()).toList() - ); final List features = pluginsService.filterPlugins(SystemIndexPlugin.class).map(plugin -> { SystemIndices.validateFeatureName(plugin.getFeatureName(), plugin.getClass().getCanonicalName()); return SystemIndices.Feature.fromSystemIndexPlugin(plugin, settings); @@ -604,7 +634,6 @@ private void construct( final SystemIndices systemIndices = new SystemIndices(features); final ExecutorSelector executorSelector = systemIndices.getExecutorSelector(); - ModulesBuilder modules = new ModulesBuilder(); final MonitorService monitorService = new MonitorService(settings, nodeEnvironment, threadPool); final FsHealthService fsHealthService = new FsHealthService( settings, @@ -985,9 +1014,9 @@ record PluginServiceInstances( fsHealthService, circuitBreakerService, compatibilityVersions, - featureService.getNodeFeatures() + featureService ); - this.nodeService = new NodeService( + nodeService = new NodeService( settings, threadPool, monitorService, @@ -1073,8 +1102,8 @@ record PluginServiceInstances( loadDiagnosticServices(settings, discoveryModule.getCoordinator(), clusterService, transportService, featureService, threadPool) ); + RecoveryPlannerService recoveryPlannerService = getRecoveryPlannerService(threadPool, clusterService, repositoryService); modules.add(b -> { - RecoveryPlannerService recoveryPlannerService = getRecoveryPlannerService(threadPool, clusterService, repositoryService); serviceProvider.processRecoverySettings(pluginsService, settingsModule.getClusterSettings(), recoverySettings); SnapshotFilesProvider snapshotFilesProvider = new SnapshotFilesProvider(repositoryService); var peerRecovery = new PeerRecoverySourceService( @@ -1103,13 +1132,6 @@ record PluginServiceInstances( modules.add(b -> { b.bind(NodeService.class).toInstance(nodeService); - b.bind(NamedXContentRegistry.class).toInstance(xContentRegistry); - b.bind(PluginsService.class).toInstance(pluginsService); - b.bind(Client.class).toInstance(client); - b.bind(NodeClient.class).toInstance(client); - b.bind(Environment.class).toInstance(environment); - b.bind(ThreadPool.class).toInstance(threadPool); - b.bind(NodeEnvironment.class).toInstance(nodeEnvironment); b.bind(ResourceWatcherService.class).toInstance(resourceWatcherService); b.bind(CircuitBreakerService.class).toInstance(circuitBreakerService); b.bind(BigArrays.class).toInstance(bigArrays); @@ -1120,7 +1142,6 @@ record PluginServiceInstances( b.bind(IndexingPressure.class).toInstance(indexingLimits); b.bind(UsageService.class).toInstance(usageService); b.bind(AggregationUsageService.class).toInstance(searchModule.getValuesSourceRegistry().getUsageService()); - b.bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); b.bind(MetadataUpgrader.class).toInstance(metadataUpgrader); b.bind(MetaStateService.class).toInstance(metaStateService); b.bind(PersistedClusterStateService.class).toInstance(persistedClusterStateService); @@ -1164,13 +1185,12 @@ record PluginServiceInstances( b.bind(FileSettingsService.class).toInstance(fileSettingsService); b.bind(WriteLoadForecaster.class).toInstance(writeLoadForecaster); b.bind(CompatibilityVersions.class).toInstance(compatibilityVersions); - b.bind(InferenceServiceRegistry.class).toInstance(inferenceServiceRegistry); }); if (ReadinessService.enabled(environment)) { - modules.add( - b -> b.bind(ReadinessService.class) - .toInstance(serviceProvider.newReadinessService(pluginsService, clusterService, environment)) + modules.bindToInstance( + ReadinessService.class, + serviceProvider.newReadinessService(pluginsService, clusterService, environment) ); } diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index 6074cc0e4ea35..6c6573852700c 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -66,14 +66,26 @@ public void setTaskId(String taskId) { this.taskId = taskId; } + public String getTaskId() { + return taskId; + } + public void setAllocationId(long allocationId) { this.allocationId = allocationId; } + public long getAllocationId() { + return allocationId; + } + public void setState(PersistentTaskState state) { this.state = state; } + public PersistentTaskState getState() { + return state; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 4167717e09006..a53674882c84d 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -70,14 +70,15 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.concurrent.ThrottledIterator; import org.elasticsearch.common.util.concurrent.ThrottledTaskRunner; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.Tuple; @@ -455,6 +456,7 @@ protected void doStop() {} @Override protected void doClose() { + activityRefs.decRef(); BlobStore store; // to close blobStore if blobStore initialization is started during close synchronized (lock) { @@ -469,28 +471,14 @@ protected void doClose() { } } - // listeners to invoke when a restore completes and there are no more restores running - @Nullable - private List> emptyListeners; + private final SubscribableListener closedAndIdleListeners = new SubscribableListener<>(); - // Set of shard ids that this repository is currently restoring - private final Set ongoingRestores = new HashSet<>(); + private final RefCounted activityRefs = AbstractRefCounted.of(() -> closedAndIdleListeners.onResponse(null)); @Override public void awaitIdle() { - assert lifecycle.stoppedOrClosed(); - final PlainActionFuture future; - synchronized (ongoingRestores) { - if (ongoingRestores.isEmpty()) { - return; - } - future = new PlainActionFuture<>(); - if (emptyListeners == null) { - emptyListeners = new ArrayList<>(); - } - emptyListeners.add(future); - } - FutureUtils.get(future); + assert lifecycle.closed(); + PlainActionFuture.get(closedAndIdleListeners::addListener); } @SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here @@ -3305,30 +3293,19 @@ public void restoreShard( ); final Executor executor = threadPool.executor(ThreadPool.Names.SNAPSHOT); final BlobContainer container = shardContainer(indexId, snapshotShardId); - synchronized (ongoingRestores) { - if (store.isClosing()) { - restoreListener.onFailure(new AlreadyClosedException("store is closing")); - return; - } - if (lifecycle.started() == false) { - restoreListener.onFailure(new AlreadyClosedException("repository [" + metadata.name() + "] closed")); - return; - } - final boolean added = ongoingRestores.add(shardId); - assert added : "add restore for [" + shardId + "] that already has an existing restore"; + if (store.isClosing()) { + restoreListener.onFailure(new AlreadyClosedException("store is closing")); + return; } - executor.execute(ActionRunnable.wrap(ActionListener.runBefore(restoreListener, () -> { - final List> onEmptyListeners; - synchronized (ongoingRestores) { - if (ongoingRestores.remove(shardId) && ongoingRestores.isEmpty() && emptyListeners != null) { - onEmptyListeners = emptyListeners; - emptyListeners = null; - } else { - return; - } - } - ActionListener.onResponse(onEmptyListeners, null); - }), l -> { + if (lifecycle.started() == false) { + restoreListener.onFailure(new AlreadyClosedException("repository [" + metadata.name() + "] closed")); + return; + } + if (activityRefs.tryIncRef() == false) { + restoreListener.onFailure(new AlreadyClosedException("repository [" + metadata.name() + "] closing")); + return; + } + executor.execute(ActionRunnable.wrap(ActionListener.runBefore(restoreListener, activityRefs::decRef), l -> { final BlobStoreIndexShardSnapshot snapshot = loadShardSnapshot(container, snapshotId); final SnapshotFiles snapshotFiles = new SnapshotFiles(snapshot.snapshot(), snapshot.indexFiles(), null); new FileRestoreContext(metadata.name(), shardId, snapshotId, recoveryState) { diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 6ee02fa9425c0..73350d60b256c 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -772,7 +772,8 @@ public void executeQueryPhase(QuerySearchRequest request, SearchShardTask task, ) { searchContext.searcher().setAggregatedDfs(request.dfs()); QueryPhase.execute(searchContext); - if (searchContext.queryResult().hasSearchContext() == false && readerContext.singleSession()) { + final QuerySearchResult queryResult = searchContext.queryResult(); + if (queryResult.hasSearchContext() == false && readerContext.singleSession()) { // no hits, we can release the context since there will be no fetch phase freeReaderContext(readerContext.id()); } @@ -781,10 +782,10 @@ public void executeQueryPhase(QuerySearchRequest request, SearchShardTask task, // and receive them back in the fetch phase. // We also pass the rescoreDocIds to the LegacyReaderContext in case the search state needs to stay in the data node. final RescoreDocIds rescoreDocIds = searchContext.rescoreDocIds(); - searchContext.queryResult().setRescoreDocIds(rescoreDocIds); + queryResult.setRescoreDocIds(rescoreDocIds); readerContext.setRescoreDocIds(rescoreDocIds); - searchContext.queryResult().incRef(); - return searchContext.queryResult(); + queryResult.incRef(); + return queryResult; } catch (Exception e) { assert TransportActions.isShardNotAvailableException(e) == false : new AssertionError(e); logger.trace("Query phase failed", e); @@ -1321,11 +1322,9 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc context.addQuerySearchResultReleasable(aggContext); try { final AggregatorFactories factories = source.aggregations().build(aggContext, null); - final Supplier supplier = () -> aggReduceContextBuilder( - context::isCancelled, - source.aggregations() + context.aggregations( + new SearchContextAggregations(factories, () -> aggReduceContextBuilder(context::isCancelled, source.aggregations())) ); - context.aggregations(new SearchContextAggregations(factories, supplier)); } catch (IOException e) { throw new AggregationInitializationException("Failed to create aggregators", e); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java index c9f937b489a73..ff1ca58d351e3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java @@ -26,6 +26,18 @@ public abstract class InternalMultiBucketAggregation< A extends InternalMultiBucketAggregation, B extends InternalMultiBucketAggregation.InternalBucket> extends InternalAggregation implements MultiBucketsAggregation { + /** + * When we pre-count the empty buckets we report them periodically + * because you can configure the date_histogram to create an astounding + * number of buckets. It'd take a while to count that high only to abort. + * So we report every couple thousand buckets. It's be simpler to report + * every single bucket we plan to allocate one at a time but that'd cause + * needless overhead on the circuit breakers. Counting a couple thousand + * buckets is plenty fast to fail this quickly in pathological cases and + * plenty large to keep the overhead minimal. + */ + protected static final int REPORT_EMPTY_EVERY = 10_000; + public InternalMultiBucketAggregation(String name, Map metadata) { super(name, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index ed883a4b04d6b..4ffc9abdc2202 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -373,18 +373,6 @@ protected Bucket reduceBucket(List buckets, AggregationReduceContext con return createBucket(buckets.get(0).key, docCount, aggs); } - /** - * When we pre-count the empty buckets we report them periodically - * because you can configure the date_histogram to create an astounding - * number of buckets. It'd take a while to count that high only to abort. - * So we report every couple thousand buckets. It's be simpler to report - * every single bucket we plan to allocate one at a time but that'd cause - * needless overhead on the circuit breakers. Counting a couple thousand - * buckets is plenty fast to fail this quickly in pathological cases and - * plenty large to keep the overhead minimal. - */ - private static final int REPORT_EMPTY_EVERY = 10_000; - private void addEmptyBuckets(List list, AggregationReduceContext reduceContext) { /* * Make sure we have space for the empty buckets we're going to add by diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index caef13221b0f3..6ce723d12db26 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -291,10 +291,11 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent(histogram.buckets.iterator())); + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } + int consumeBucketCount = 0; List reducedBuckets = new ArrayList<>(); if (pq.size() > 0) { // list of buckets coming from different shards that have the same key @@ -310,6 +311,10 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent= minDocCount || reduceContext.isFinalReduce() == false) { reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } } currentBuckets.clear(); key = top.current().key; @@ -330,10 +335,15 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent= minDocCount || reduceContext.isFinalReduce() == false) { reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } } } } + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); return reducedBuckets; } @@ -358,18 +368,6 @@ private double round(double key) { return Math.floor((key - emptyBucketInfo.offset) / emptyBucketInfo.interval) * emptyBucketInfo.interval + emptyBucketInfo.offset; } - /** - * When we pre-count the empty buckets we report them periodically - * because you can configure the histogram to create more buckets than - * there are atoms in the universe. It'd take a while to count that high - * only to abort. So we report every couple thousand buckets. It's be - * simpler to report every single bucket we plan to allocate one at a time - * but that'd cause needless overhead on the circuit breakers. Counting a - * couple thousand buckets is plenty fast to fail this quickly in - * pathological cases and plenty large to keep the overhead minimal. - */ - private static final int REPORT_EMPTY_EVERY = 10_000; - private void addEmptyBuckets(List list, AggregationReduceContext reduceContext) { /* * Make sure we have space for the empty buckets we're going to add by @@ -377,7 +375,7 @@ private void addEmptyBuckets(List list, AggregationReduceContext reduceC * consumeBucketsAndMaybeBreak. */ class Counter implements DoubleConsumer { - private int size = list.size(); + private int size = 0; @Override public void accept(double key) { @@ -456,11 +454,9 @@ private void iterateEmptyBuckets(List list, ListIterator iter, D @Override public InternalAggregation reduce(List aggregations, AggregationReduceContext reduceContext) { List reducedBuckets = reduceBuckets(aggregations, reduceContext); - boolean alreadyAccountedForBuckets = false; if (reduceContext.isFinalReduce()) { if (minDocCount == 0) { addEmptyBuckets(reducedBuckets, reduceContext); - alreadyAccountedForBuckets = true; } if (InternalOrder.isKeyDesc(order)) { // we just need to reverse here... @@ -474,9 +470,6 @@ public InternalAggregation reduce(List aggregations, Aggreg CollectionUtil.introSort(reducedBuckets, order.comparator()); } } - if (false == alreadyAccountedForBuckets) { - reduceContext.consumeBucketsAndMaybeBreak(reducedBuckets.size()); - } return new InternalHistogram(getName(), reducedBuckets, order, minDocCount, emptyBucketInfo, format, keyed, getMetadata()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 85307a903a3eb..1d32251ffc33a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -93,6 +93,18 @@ public long getDocCount() { return docCount; } + public void setDocCount(long docCount) { + this.docCount = docCount; + } + + public long getBucketOrd() { + return bucketOrd; + } + + public void setBucketOrd(long bucketOrd) { + this.bucketOrd = bucketOrd; + } + @Override public long getDocCountError() { if (showDocCountError == false) { @@ -102,7 +114,7 @@ public long getDocCountError() { } @Override - protected void setDocCountError(long docCountError) { + public void setDocCountError(long docCountError) { this.docCountError = docCountError; } @@ -121,6 +133,10 @@ public Aggregations getAggregations() { return aggregations; } + public void setAggregations(InternalAggregations aggregations) { + this.aggregations = aggregations; + } + @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java index fe27738fe7589..fa05ffbd58295 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java @@ -59,6 +59,14 @@ public Object getKey() { return getKeyAsString(); } + public BytesRef getTermBytes() { + return termBytes; + } + + public void setTermBytes(BytesRef termBytes) { + this.termBytes = termBytes; + } + // this method is needed for scripted numeric aggs @Override public Number getKeyAsNumber() { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java index 78d6882472ebd..193f8c04664bf 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java @@ -16,6 +16,7 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.transport.LeakTracker; import java.io.IOException; @@ -26,14 +27,8 @@ public final class QueryFetchSearchResult extends SearchPhaseResult { private final RefCounted refCounted; public QueryFetchSearchResult(StreamInput in) throws IOException { - super(in); // These get a ref count of 1 when we create them, so we don't need to incRef here - queryResult = new QuerySearchResult(in); - fetchResult = new FetchSearchResult(in); - refCounted = AbstractRefCounted.of(() -> { - queryResult.decRef(); - fetchResult.decRef(); - }); + this(new QuerySearchResult(in), new FetchSearchResult(in)); } public QueryFetchSearchResult(QuerySearchResult queryResult, FetchSearchResult fetchResult) { @@ -42,10 +37,10 @@ public QueryFetchSearchResult(QuerySearchResult queryResult, FetchSearchResult f // We're acquiring a copy, we should incRef it this.queryResult.incRef(); this.fetchResult.incRef(); - refCounted = AbstractRefCounted.of(() -> { + refCounted = LeakTracker.wrap(AbstractRefCounted.of(() -> { queryResult.decRef(); fetchResult.decRef(); - }); + })); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java b/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java index 9ce93a825f849..86f6db0b681d7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java @@ -37,7 +37,8 @@ public class ShardFetchRequest extends TransportRequest { private final int[] docIds; - private ScoreDoc lastEmittedDoc; + @Nullable + private final ScoreDoc lastEmittedDoc; public ShardFetchRequest(ShardSearchContextId contextId, List docIds, ScoreDoc lastEmittedDoc) { this.contextId = contextId; @@ -60,6 +61,8 @@ public ShardFetchRequest(StreamInput in) throws IOException { lastEmittedDoc = Lucene.readScoreDoc(in); } else if (flag != 0) { throw new IOException("Unknown flag: " + flag); + } else { + lastEmittedDoc = null; } } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 3044d15ab8552..01015ec8cc78e 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -94,13 +94,14 @@ static void executeRank(SearchContext searchContext) throws QueryPhaseExecutionE if (searchTimedOut) { break; } - RankSearchContext rankSearchContext = new RankSearchContext(searchContext, rankQuery, rankShardContext.windowSize()); - QueryPhase.addCollectorsAndSearch(rankSearchContext); - QuerySearchResult rrfQuerySearchResult = rankSearchContext.queryResult(); - rrfRankResults.add(rrfQuerySearchResult.topDocs().topDocs); - serviceTimeEWMA += rrfQuerySearchResult.serviceTimeEWMA(); - nodeQueueSize = Math.max(nodeQueueSize, rrfQuerySearchResult.nodeQueueSize()); - searchTimedOut = rrfQuerySearchResult.searchTimedOut(); + try (RankSearchContext rankSearchContext = new RankSearchContext(searchContext, rankQuery, rankShardContext.windowSize())) { + QueryPhase.addCollectorsAndSearch(rankSearchContext); + QuerySearchResult rrfQuerySearchResult = rankSearchContext.queryResult(); + rrfRankResults.add(rrfQuerySearchResult.topDocs().topDocs); + serviceTimeEWMA += rrfQuerySearchResult.serviceTimeEWMA(); + nodeQueueSize = Math.max(nodeQueueSize, rrfQuerySearchResult.nodeQueueSize()); + searchTimedOut = rrfQuerySearchResult.searchTimedOut(); + } } querySearchResult.setRankShardResult(rankShardContext.combine(rrfRankResults)); diff --git a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index edebf602af188..301d7fb219ca7 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.rank.RankShardResult; import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.transport.LeakTracker; import java.io.IOException; import java.util.ArrayList; @@ -104,8 +105,8 @@ public QuerySearchResult(ShardSearchContextId contextId, SearchShardTarget shard setSearchShardTarget(shardTarget); isNull = false; setShardSearchRequest(shardSearchRequest); - this.refCounted = AbstractRefCounted.of(this::close); this.toRelease = new ArrayList<>(); + this.refCounted = LeakTracker.wrap(AbstractRefCounted.of(() -> Releasables.close(toRelease))); } private QuerySearchResult(boolean isNull) { @@ -245,10 +246,6 @@ public void releaseAggs() { } } - private void close() { - Releasables.close(toRelease); - } - public void addReleasable(Releasable releasable) { toRelease.add(releasable); } diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java index 84f04283d64e8..ed6fcd16fb5e2 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java @@ -64,6 +64,7 @@ public RankSearchContext(SearchContext parent, Query rankQuery, int windowSize) this.rankQuery = parent.buildFilteredQuery(rankQuery); this.windowSize = windowSize; this.querySearchResult = new QuerySearchResult(parent.readerContext().id(), parent.shardTarget(), parent.request()); + this.addReleasable(querySearchResult::decRef); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/transport/LeakTracker.java b/server/src/main/java/org/elasticsearch/transport/LeakTracker.java similarity index 89% rename from test/framework/src/main/java/org/elasticsearch/transport/LeakTracker.java rename to server/src/main/java/org/elasticsearch/transport/LeakTracker.java index ce82e62df698a..4eefd4cd2080a 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/LeakTracker.java +++ b/server/src/main/java/org/elasticsearch/transport/LeakTracker.java @@ -13,6 +13,8 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Assertions; +import org.elasticsearch.core.RefCounted; import java.lang.ref.ReferenceQueue; import java.lang.ref.WeakReference; @@ -69,6 +71,41 @@ public void reportLeak() { } } + public static RefCounted wrap(RefCounted refCounted) { + if (Assertions.ENABLED == false) { + return refCounted; + } + var leak = INSTANCE.track(refCounted); + return new RefCounted() { + @Override + public void incRef() { + leak.record(); + refCounted.incRef(); + } + + @Override + public boolean tryIncRef() { + leak.record(); + return refCounted.tryIncRef(); + } + + @Override + public boolean decRef() { + if (refCounted.decRef()) { + leak.close(refCounted); + return true; + } + leak.record(); + return false; + } + + @Override + public boolean hasReferences() { + return refCounted.hasReferences(); + } + }; + } + public static final class Leak extends WeakReference { @SuppressWarnings({ "unchecked", "rawtypes" }) diff --git a/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java b/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java index 09a24f6b76a8e..2d5d7b7b522d1 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java @@ -29,6 +29,7 @@ import java.util.function.Consumer; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -579,6 +580,9 @@ public String toString() { l.onResponse(null); } catch (Exception e) { // ok + } catch (AssertionError e) { + // ensure this was only thrown by ActionListener#assertOnce + assertThat(e.getMessage(), endsWith("must handle its own exceptions")); } } else { l.onFailure(new RuntimeException("supplied")); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java index 3378ff0063bb0..7b452beac0938 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.allocator.ClusterBalanceStats; -import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceStats; +import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceStatsTests; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; @@ -30,6 +30,7 @@ import static java.util.function.Function.identity; import static java.util.stream.Collectors.toMap; +import static org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceStatsTests.randomDesiredBalanceStats; import static org.hamcrest.Matchers.containsInAnyOrder; public class DesiredBalanceResponseTests extends AbstractWireSerializingTestCase { @@ -49,20 +50,6 @@ protected DesiredBalanceResponse createTestInstance() { ); } - private DesiredBalanceStats randomDesiredBalanceStats() { - return new DesiredBalanceStats( - randomNonNegativeLong(), - randomBoolean(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong() - ); - } - private ClusterBalanceStats randomClusterBalanceStats() { return new ClusterBalanceStats( randomNonNegativeInt(), @@ -156,7 +143,7 @@ private Map> randomRo protected DesiredBalanceResponse mutateInstance(DesiredBalanceResponse instance) { return switch (randomInt(4)) { case 0 -> new DesiredBalanceResponse( - randomValueOtherThan(instance.getStats(), this::randomDesiredBalanceStats), + randomValueOtherThan(instance.getStats(), DesiredBalanceStatsTests::randomDesiredBalanceStats), instance.getClusterBalanceStats(), instance.getRoutingTable(), instance.getClusterInfo() diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java index 1d80454fcea12..a98d7662b8983 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java @@ -59,6 +59,7 @@ import static org.elasticsearch.cluster.ClusterModule.BALANCED_ALLOCATOR; import static org.elasticsearch.cluster.ClusterModule.DESIRED_BALANCE_ALLOCATOR; import static org.elasticsearch.cluster.ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING; +import static org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceStatsTests.randomDesiredBalanceStats; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Mockito.mock; @@ -219,17 +220,7 @@ public void testGetDesiredBalance() throws Exception { } when(desiredBalanceShardsAllocator.getDesiredBalance()).thenReturn(new DesiredBalance(randomInt(1024), shardAssignments)); - DesiredBalanceStats desiredBalanceStats = new DesiredBalanceStats( - randomInt(Integer.MAX_VALUE), - randomBoolean(), - randomInt(Integer.MAX_VALUE), - randomInt(Integer.MAX_VALUE), - randomInt(Integer.MAX_VALUE), - randomInt(Integer.MAX_VALUE), - randomInt(Integer.MAX_VALUE), - randomInt(Integer.MAX_VALUE), - randomInt(Integer.MAX_VALUE) - ); + DesiredBalanceStats desiredBalanceStats = randomDesiredBalanceStats(); when(desiredBalanceShardsAllocator.getStats()).thenReturn(desiredBalanceStats); ClusterInfo clusterInfo = ClusterInfo.EMPTY; when(clusterInfoService.getClusterInfo()).thenReturn(clusterInfo); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/reservedstate/ReservedComposableIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/reservedstate/ReservedComposableIndexTemplateActionTests.java index 28476a0d8b839..173cb4c66d18f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/reservedstate/ReservedComposableIndexTemplateActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/reservedstate/ReservedComposableIndexTemplateActionTests.java @@ -875,7 +875,13 @@ public void testTemplatesWithReservedPrefix() throws Exception { .indexTemplates( Map.of( reservedComposableIndexName(conflictingTemplateName), - new ComposableIndexTemplate(singletonList("foo*"), null, Collections.emptyList(), 1L, 1L, Collections.emptyMap()) + ComposableIndexTemplate.builder() + .indexPatterns(singletonList("foo*")) + .componentTemplates(Collections.emptyList()) + .priority(1L) + .version(1L) + .metadata(Collections.emptyMap()) + .build() ) ) .build(); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java index e097b83fb9d35..1276f6c2db58b 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.delete.DeleteRequest; @@ -21,7 +20,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; @@ -34,7 +32,6 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockUtils; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -101,10 +98,6 @@ private void indicesThatCannotBeCreatedTestCase( when(clusterService.state()).thenReturn(state); when(clusterService.getSettings()).thenReturn(Settings.EMPTY); - DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); - when(state.getNodes()).thenReturn(discoveryNodes); - when(discoveryNodes.getMinNodeVersion()).thenReturn(VersionUtils.randomCompatibleVersion(random(), Version.CURRENT)); - DiscoveryNode localNode = mock(DiscoveryNode.class); when(clusterService.localNode()).thenReturn(localNode); when(localNode.isIngestNode()).thenReturn(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index 0168eb0488a5b..95039f6fb0de1 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -46,7 +45,6 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockUtils; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportResponseHandler; @@ -194,7 +192,6 @@ public void setupAction() { nodes = mock(DiscoveryNodes.class); Map ingestNodes = Map.of("node1", remoteNode1, "node2", remoteNode2); when(nodes.getIngestNodes()).thenReturn(ingestNodes); - when(nodes.getMinNodeVersion()).thenReturn(VersionUtils.randomCompatibleVersion(random(), Version.CURRENT)); ClusterState state = mock(ClusterState.class); when(state.getNodes()).thenReturn(nodes); Metadata metadata = Metadata.builder() @@ -679,16 +676,10 @@ public void testFindDefaultPipelineFromTemplateMatch() { public void testFindDefaultPipelineFromV2TemplateMatch() { Exception exception = new Exception("fake exception"); - ComposableIndexTemplate t1 = new ComposableIndexTemplate( - Collections.singletonList("missing_*"), - new Template(Settings.builder().put(IndexSettings.DEFAULT_PIPELINE.getKey(), "pipeline2").build(), null, null), - null, - null, - null, - null, - null, - null - ); + ComposableIndexTemplate t1 = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("missing_*")) + .template(new Template(Settings.builder().put(IndexSettings.DEFAULT_PIPELINE.getKey(), "pipeline2").build(), null, null)) + .build(); ClusterState state = clusterService.state(); Metadata metadata = Metadata.builder().put("my-template", t1).build(); diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 65a0950d05b4d..b896ae3d3f025 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -81,30 +81,38 @@ public void sendExecuteQuery( new SearchShardTarget("node1", new ShardId("test", "na", 0), null), null ); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(2); // the size of the result set - listener.onResponse(queryResult); + try { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(2); // the size of the result set + listener.onResponse(queryResult); + } finally { + queryResult.decRef(); + } } else if (request.contextId().getId() == 2) { QuerySearchResult queryResult = new QuerySearchResult( new ShardSearchContextId("", 123), new SearchShardTarget("node2", new ShardId("test", "na", 0), null), null ); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(2); // the size of the result set - listener.onResponse(queryResult); + try { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(2); // the size of the result set + listener.onResponse(queryResult); + } finally { + queryResult.decRef(); + } } else { fail("no such request ID: " + request.contextId()); } @@ -172,15 +180,19 @@ public void sendExecuteQuery( new SearchShardTarget("node1", new ShardId("test", "na", 0), null), null ); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(2); // the size of the result set - listener.onResponse(queryResult); + try { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(2); // the size of the result set + listener.onResponse(queryResult); + } finally { + queryResult.decRef(); + } } else if (request.contextId().getId() == 2) { listener.onFailure(new MockDirectoryWrapper.FakeIOException()); } else { @@ -252,15 +264,19 @@ public void sendExecuteQuery( new SearchShardTarget("node1", new ShardId("test", "na", 0), null), null ); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(2); // the size of the result set - listener.onResponse(queryResult); + try { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(2); // the size of the result set + listener.onResponse(queryResult); + } finally { + queryResult.decRef(); + } } else if (request.contextId().getId() == 2) { throw new UncheckedIOException(new MockDirectoryWrapper.FakeIOException()); } else { diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 82e579ce7eb36..3fa5c6fc4283a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -76,8 +76,12 @@ public void testShortcutQueryAndFetchOptimization() { SearchHits hits = new SearchHits(new SearchHit[] { new SearchHit(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); fetchResult.shardResult(hits, fetchProfile(profiled)); QueryFetchSearchResult fetchSearchResult = new QueryFetchSearchResult(queryResult, fetchResult); - fetchSearchResult.setShardIndex(0); - results.consumeResult(fetchSearchResult, () -> {}); + try { + fetchSearchResult.setShardIndex(0); + results.consumeResult(fetchSearchResult, () -> {}); + } finally { + fetchSearchResult.decRef(); + } numHits = 1; } else { numHits = 0; @@ -135,33 +139,42 @@ public void testFetchTwoDocument() { ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null); QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(resultSetSize); // the size of the result set - queryResult.setShardIndex(0); - addProfiling(profiled, queryResult); - results.consumeResult(queryResult, () -> {}); + try { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(resultSetSize); // the size of the result set + queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); + results.consumeResult(queryResult, () -> {}); + + } finally { + queryResult.decRef(); + } final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); - SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null); - queryResult = new QuerySearchResult(ctx2, shard2Target, null); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(resultSetSize); - queryResult.setShardIndex(1); - addProfiling(profiled, queryResult); - results.consumeResult(queryResult, () -> {}); + try { + queryResult = new QuerySearchResult(ctx2, shard2Target, null); + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(resultSetSize); + queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); + results.consumeResult(queryResult, () -> {}); + } finally { + queryResult.decRef(); + } mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @Override @@ -228,31 +241,39 @@ public void testFailFetchOneDoc() { final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123); SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); QuerySearchResult queryResult = new QuerySearchResult(ctx, shard1Target, null); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(resultSetSize); // the size of the result set - queryResult.setShardIndex(0); - addProfiling(profiled, queryResult); - results.consumeResult(queryResult, () -> {}); + try { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(resultSetSize); // the size of the result set + queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); + results.consumeResult(queryResult, () -> {}); + } finally { + queryResult.decRef(); + } SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null); queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(resultSetSize); - queryResult.setShardIndex(1); - addProfiling(profiled, queryResult); - results.consumeResult(queryResult, () -> {}); + try { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(resultSetSize); + queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); + results.consumeResult(queryResult, () -> {}); + } finally { + queryResult.decRef(); + } mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @Override @@ -345,10 +366,14 @@ public void testFetchDocsConcurrently() throws InterruptedException { ), new DocValueFormat[0] ); - queryResult.size(resultSetSize); // the size of the result set - queryResult.setShardIndex(i); - addProfiling(profiled, queryResult); - results.consumeResult(queryResult, () -> {}); + try { + queryResult.size(resultSetSize); // the size of the result set + queryResult.setShardIndex(i); + addProfiling(profiled, queryResult); + results.consumeResult(queryResult, () -> {}); + } finally { + queryResult.decRef(); + } } mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @Override @@ -437,32 +462,39 @@ public void testExceptionFailsPhase() { boolean profiled = randomBoolean(); SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); - QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), shard1Target, null); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(resultSetSize); // the size of the result set - queryResult.setShardIndex(0); - addProfiling(profiled, queryResult); - results.consumeResult(queryResult, () -> {}); - SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); - queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(resultSetSize); - queryResult.setShardIndex(1); - addProfiling(profiled, queryResult); - results.consumeResult(queryResult, () -> {}); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), shard1Target, null); + try { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(resultSetSize); // the size of the result set + queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); + results.consumeResult(queryResult, () -> {}); + } finally { + queryResult.decRef(); + } + try { + queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(resultSetSize); + queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); + results.consumeResult(queryResult, () -> {}); + } finally { + queryResult.decRef(); + } AtomicInteger numFetches = new AtomicInteger(0); mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @@ -527,32 +559,39 @@ public void testCleanupIrrelevantContexts() { // contexts that are not fetched s final ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(resultSetSize); // the size of the result set - queryResult.setShardIndex(0); - addProfiling(profiled, queryResult); - results.consumeResult(queryResult, () -> {}); - + try { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(resultSetSize); // the size of the result set + queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); + results.consumeResult(queryResult, () -> {}); + } finally { + queryResult.decRef(); + } final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null); queryResult = new QuerySearchResult(ctx2, shard2Target, null); - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }), - 2.0F - ), - new DocValueFormat[0] - ); - queryResult.size(resultSetSize); - queryResult.setShardIndex(1); - addProfiling(profiled, queryResult); - results.consumeResult(queryResult, () -> {}); + try { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }), + 2.0F + ), + new DocValueFormat[0] + ); + queryResult.size(resultSetSize); + queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); + results.consumeResult(queryResult, () -> {}); + } finally { + queryResult.decRef(); + } mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 93436ed9b0768..424ccdafb87e4 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -72,6 +72,7 @@ import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportMessage; import org.junit.After; import org.junit.Before; @@ -155,27 +156,31 @@ public void testSortDocs() { int nShards = randomIntBetween(1, 20); int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2); AtomicArray results = generateQueryResults(nShards, suggestions, queryResultSize, false, false, false); - Optional first = results.asList().stream().findFirst(); - int from = 0, size = 0; - if (first.isPresent()) { - from = first.get().queryResult().from(); - size = first.get().queryResult().size(); - } - int accumulatedLength = Math.min(queryResultSize, getTotalQueryHits(results)); - List reducedCompletionSuggestions = reducedSuggest(results); - for (Suggest.Suggestion suggestion : reducedCompletionSuggestions) { - int suggestionSize = suggestion.getEntries().get(0).getOptions().size(); - accumulatedLength += suggestionSize; - } - List topDocsList = new ArrayList<>(); - for (SearchPhaseResult result : results.asList()) { - QuerySearchResult queryResult = result.queryResult(); - TopDocs topDocs = queryResult.consumeTopDocs().topDocs; - SearchPhaseController.setShardIndex(topDocs, result.getShardIndex()); - topDocsList.add(topDocs); + try { + Optional first = results.asList().stream().findFirst(); + int from = 0, size = 0; + if (first.isPresent()) { + from = first.get().queryResult().from(); + size = first.get().queryResult().size(); + } + int accumulatedLength = Math.min(queryResultSize, getTotalQueryHits(results)); + List reducedCompletionSuggestions = reducedSuggest(results); + for (Suggest.Suggestion suggestion : reducedCompletionSuggestions) { + int suggestionSize = suggestion.getEntries().get(0).getOptions().size(); + accumulatedLength += suggestionSize; + } + List topDocsList = new ArrayList<>(); + for (SearchPhaseResult result : results.asList()) { + QuerySearchResult queryResult = result.queryResult(); + TopDocs topDocs = queryResult.consumeTopDocs().topDocs; + SearchPhaseController.setShardIndex(topDocs, result.getShardIndex()); + topDocsList.add(topDocs); + } + ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(true, topDocsList, from, size, reducedCompletionSuggestions).scoreDocs(); + assertThat(sortedDocs.length, equalTo(accumulatedLength)); + } finally { + results.asList().forEach(TransportMessage::decRef); } - ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(true, topDocsList, from, size, reducedCompletionSuggestions).scoreDocs(); - assertThat(sortedDocs.length, equalTo(accumulatedLength)); } public void testSortDocsIsIdempotent() throws Exception { @@ -190,36 +195,45 @@ public void testSortDocsIsIdempotent() throws Exception { queryResultSize, useConstantScore ); + List topDocsList = new ArrayList<>(); boolean ignoreFrom = randomBoolean(); - Optional first = results.asList().stream().findFirst(); int from = 0, size = 0; - if (first.isPresent()) { - from = first.get().queryResult().from(); - size = first.get().queryResult().size(); - } - List topDocsList = new ArrayList<>(); - for (SearchPhaseResult result : results.asList()) { - QuerySearchResult queryResult = result.queryResult(); - TopDocs topDocs = queryResult.consumeTopDocs().topDocs; - topDocsList.add(topDocs); - SearchPhaseController.setShardIndex(topDocs, result.getShardIndex()); + ScoreDoc[] sortedDocs; + try { + Optional first = results.asList().stream().findFirst(); + if (first.isPresent()) { + from = first.get().queryResult().from(); + size = first.get().queryResult().size(); + } + for (SearchPhaseResult result : results.asList()) { + QuerySearchResult queryResult = result.queryResult(); + TopDocs topDocs = queryResult.consumeTopDocs().topDocs; + topDocsList.add(topDocs); + SearchPhaseController.setShardIndex(topDocs, result.getShardIndex()); + } + sortedDocs = SearchPhaseController.sortDocs(ignoreFrom, topDocsList, from, size, Collections.emptyList()).scoreDocs(); + } finally { + results.asList().forEach(TransportMessage::decRef); } - ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(ignoreFrom, topDocsList, from, size, Collections.emptyList()).scoreDocs(); - results = generateSeededQueryResults(randomSeed, nShards, Collections.emptyList(), queryResultSize, useConstantScore); - topDocsList = new ArrayList<>(); - for (SearchPhaseResult result : results.asList()) { - QuerySearchResult queryResult = result.queryResult(); - TopDocs topDocs = queryResult.consumeTopDocs().topDocs; - topDocsList.add(topDocs); - SearchPhaseController.setShardIndex(topDocs, result.getShardIndex()); - } - ScoreDoc[] sortedDocs2 = SearchPhaseController.sortDocs(ignoreFrom, topDocsList, from, size, Collections.emptyList()).scoreDocs(); - assertEquals(sortedDocs.length, sortedDocs2.length); - for (int i = 0; i < sortedDocs.length; i++) { - assertEquals(sortedDocs[i].doc, sortedDocs2[i].doc); - assertEquals(sortedDocs[i].shardIndex, sortedDocs2[i].shardIndex); - assertEquals(sortedDocs[i].score, sortedDocs2[i].score, 0.0f); + try { + topDocsList = new ArrayList<>(); + for (SearchPhaseResult result : results.asList()) { + QuerySearchResult queryResult = result.queryResult(); + TopDocs topDocs = queryResult.consumeTopDocs().topDocs; + topDocsList.add(topDocs); + SearchPhaseController.setShardIndex(topDocs, result.getShardIndex()); + } + ScoreDoc[] sortedDocs2 = SearchPhaseController.sortDocs(ignoreFrom, topDocsList, from, size, Collections.emptyList()) + .scoreDocs(); + assertEquals(sortedDocs.length, sortedDocs2.length); + for (int i = 0; i < sortedDocs.length; i++) { + assertEquals(sortedDocs[i].doc, sortedDocs2[i].doc); + assertEquals(sortedDocs[i].shardIndex, sortedDocs2[i].shardIndex); + assertEquals(sortedDocs[i].score, sortedDocs2[i].score, 0.0f); + } + } finally { + results.asList().forEach(TransportMessage::decRef); } } @@ -257,77 +271,87 @@ public void testMerge() { profile, false ); - SearchPhaseController.ReducedQueryPhase reducedQueryPhase = SearchPhaseController.reducedQueryPhase( - queryResults.asList(), - new ArrayList<>(), - new ArrayList<>(), - new TopDocsStats(trackTotalHits), - 0, - true, - InternalAggregationTestCase.emptyReduceContextBuilder(), - null, - true - ); - List shards = queryResults.asList().stream().map(SearchPhaseResult::getSearchShardTarget).collect(toList()); - AtomicArray fetchResults = generateFetchResults( - shards, - reducedQueryPhase.sortedTopDocs().scoreDocs(), - reducedQueryPhase.suggest(), - profile - ); - InternalSearchResponse mergedResponse = SearchPhaseController.merge( - false, - reducedQueryPhase, - fetchResults.asList(), - fetchResults::get - ); - if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { - assertNull(mergedResponse.hits.getTotalHits()); - } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); - } - for (SearchHit hit : mergedResponse.hits().getHits()) { - SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); - assertSame(searchPhaseResult.getSearchShardTarget(), hit.getShard()); - } - int suggestSize = 0; - for (Suggest.Suggestion s : reducedQueryPhase.suggest()) { - suggestSize += s.getEntries().stream().mapToInt(e -> e.getOptions().size()).sum(); - } - assertThat(suggestSize, lessThanOrEqualTo(maxSuggestSize)); - assertThat(mergedResponse.hits().getHits().length, equalTo(reducedQueryPhase.sortedTopDocs().scoreDocs().length - suggestSize)); - Suggest suggestResult = mergedResponse.suggest(); - for (Suggest.Suggestion suggestion : reducedQueryPhase.suggest()) { - assertThat(suggestion, instanceOf(CompletionSuggestion.class)); - if (suggestion.getEntries().get(0).getOptions().size() > 0) { - CompletionSuggestion suggestionResult = suggestResult.getSuggestion(suggestion.getName()); - assertNotNull(suggestionResult); - List options = suggestionResult.getEntries().get(0).getOptions(); - assertThat(options.size(), equalTo(suggestion.getEntries().get(0).getOptions().size())); - for (CompletionSuggestion.Entry.Option option : options) { - assertNotNull(option.getHit()); - SearchPhaseResult searchPhaseResult = fetchResults.get(option.getHit().getShard().getShardId().id()); - assertSame(searchPhaseResult.getSearchShardTarget(), option.getHit().getShard()); - } - } - } - if (profile) { - assertThat(mergedResponse.profile().entrySet(), hasSize(nShards)); - assertThat( - // All shards should have a query profile - mergedResponse.profile().toString(), - mergedResponse.profile().values().stream().filter(r -> r.getQueryProfileResults() != null).count(), - equalTo((long) nShards) + try { + SearchPhaseController.ReducedQueryPhase reducedQueryPhase = SearchPhaseController.reducedQueryPhase( + queryResults.asList(), + new ArrayList<>(), + new ArrayList<>(), + new TopDocsStats(trackTotalHits), + 0, + true, + InternalAggregationTestCase.emptyReduceContextBuilder(), + null, + true + ); + List shards = queryResults.asList() + .stream() + .map(SearchPhaseResult::getSearchShardTarget) + .collect(toList()); + AtomicArray fetchResults = generateFetchResults( + shards, + reducedQueryPhase.sortedTopDocs().scoreDocs(), + reducedQueryPhase.suggest(), + profile + ); + InternalSearchResponse mergedResponse = SearchPhaseController.merge( + false, + reducedQueryPhase, + fetchResults.asList(), + fetchResults::get ); + if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { + assertNull(mergedResponse.hits.getTotalHits()); + } else { + assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + } + for (SearchHit hit : mergedResponse.hits().getHits()) { + SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); + assertSame(searchPhaseResult.getSearchShardTarget(), hit.getShard()); + } + int suggestSize = 0; + for (Suggest.Suggestion s : reducedQueryPhase.suggest()) { + suggestSize += s.getEntries().stream().mapToInt(e -> e.getOptions().size()).sum(); + } + assertThat(suggestSize, lessThanOrEqualTo(maxSuggestSize)); assertThat( - // Some or all shards should have a fetch profile - mergedResponse.profile().toString(), - mergedResponse.profile().values().stream().filter(r -> r.getFetchPhase() != null).count(), - both(greaterThan(0L)).and(lessThanOrEqualTo((long) nShards)) + mergedResponse.hits().getHits().length, + equalTo(reducedQueryPhase.sortedTopDocs().scoreDocs().length - suggestSize) ); - } else { - assertThat(mergedResponse.profile(), is(anEmptyMap())); + Suggest suggestResult = mergedResponse.suggest(); + for (Suggest.Suggestion suggestion : reducedQueryPhase.suggest()) { + assertThat(suggestion, instanceOf(CompletionSuggestion.class)); + if (suggestion.getEntries().get(0).getOptions().size() > 0) { + CompletionSuggestion suggestionResult = suggestResult.getSuggestion(suggestion.getName()); + assertNotNull(suggestionResult); + List options = suggestionResult.getEntries().get(0).getOptions(); + assertThat(options.size(), equalTo(suggestion.getEntries().get(0).getOptions().size())); + for (CompletionSuggestion.Entry.Option option : options) { + assertNotNull(option.getHit()); + SearchPhaseResult searchPhaseResult = fetchResults.get(option.getHit().getShard().getShardId().id()); + assertSame(searchPhaseResult.getSearchShardTarget(), option.getHit().getShard()); + } + } + } + if (profile) { + assertThat(mergedResponse.profile().entrySet(), hasSize(nShards)); + assertThat( + // All shards should have a query profile + mergedResponse.profile().toString(), + mergedResponse.profile().values().stream().filter(r -> r.getQueryProfileResults() != null).count(), + equalTo((long) nShards) + ); + assertThat( + // Some or all shards should have a fetch profile + mergedResponse.profile().toString(), + mergedResponse.profile().values().stream().filter(r -> r.getFetchPhase() != null).count(), + both(greaterThan(0L)).and(lessThanOrEqualTo((long) nShards)) + ); + } else { + assertThat(mergedResponse.profile(), is(anEmptyMap())); + } + } finally { + queryResults.asList().forEach(TransportMessage::decRef); } } } @@ -337,70 +361,80 @@ public void testMergeWithRank() { int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2); for (int trackTotalHits : new int[] { SearchContext.TRACK_TOTAL_HITS_DISABLED, SearchContext.TRACK_TOTAL_HITS_ACCURATE }) { AtomicArray queryResults = generateQueryResults(nShards, List.of(), queryResultSize, false, false, true); - SearchPhaseController.ReducedQueryPhase reducedQueryPhase = SearchPhaseController.reducedQueryPhase( - queryResults.asList(), - new ArrayList<>(), - new ArrayList<>(), - new TopDocsStats(trackTotalHits), - 0, - true, - InternalAggregationTestCase.emptyReduceContextBuilder(), - new RankCoordinatorContext(randomIntBetween(1, 10), 0, randomIntBetween(11, 100)) { - @Override - public SearchPhaseController.SortedTopDocs rank(List querySearchResults, TopDocsStats topDocStats) { - PriorityQueue queue = new PriorityQueue(windowSize) { - @Override - protected boolean lessThan(RankDoc a, RankDoc b) { - return a.score < b.score; - } - }; - for (QuerySearchResult qsr : querySearchResults) { - RankShardResult rsr = qsr.getRankShardResult(); - if (rsr != null) { - for (RankDoc rd : ((TestRankShardResult) rsr).testRankDocs) { - queue.insertWithOverflow(rd); + try { + SearchPhaseController.ReducedQueryPhase reducedQueryPhase = SearchPhaseController.reducedQueryPhase( + queryResults.asList(), + new ArrayList<>(), + new ArrayList<>(), + new TopDocsStats(trackTotalHits), + 0, + true, + InternalAggregationTestCase.emptyReduceContextBuilder(), + new RankCoordinatorContext(randomIntBetween(1, 10), 0, randomIntBetween(11, 100)) { + @Override + public SearchPhaseController.SortedTopDocs rank( + List querySearchResults, + TopDocsStats topDocStats + ) { + PriorityQueue queue = new PriorityQueue(windowSize) { + @Override + protected boolean lessThan(RankDoc a, RankDoc b) { + return a.score < b.score; + } + }; + for (QuerySearchResult qsr : querySearchResults) { + RankShardResult rsr = qsr.getRankShardResult(); + if (rsr != null) { + for (RankDoc rd : ((TestRankShardResult) rsr).testRankDocs) { + queue.insertWithOverflow(rd); + } } } + int size = Math.min(this.size, queue.size()); + RankDoc[] topResults = new RankDoc[size]; + for (int rdi = 0; rdi < size; ++rdi) { + topResults[rdi] = queue.pop(); + topResults[rdi].rank = rdi + 1; + } + topDocStats.fetchHits = topResults.length; + return new SearchPhaseController.SortedTopDocs(topResults, false, null, null, null, 0); } - int size = Math.min(this.size, queue.size()); - RankDoc[] topResults = new RankDoc[size]; - for (int rdi = 0; rdi < size; ++rdi) { - topResults[rdi] = queue.pop(); - topResults[rdi].rank = rdi + 1; - } - topDocStats.fetchHits = topResults.length; - return new SearchPhaseController.SortedTopDocs(topResults, false, null, null, null, 0); - } - }, - true - ); - List shards = queryResults.asList().stream().map(SearchPhaseResult::getSearchShardTarget).collect(toList()); - AtomicArray fetchResults = generateFetchResults( - shards, - reducedQueryPhase.sortedTopDocs().scoreDocs(), - reducedQueryPhase.suggest(), - false - ); - InternalSearchResponse mergedResponse = SearchPhaseController.merge( - false, - reducedQueryPhase, - fetchResults.asList(), - fetchResults::get - ); - if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { - assertNull(mergedResponse.hits.getTotalHits()); - } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); - } - int rank = 1; - for (SearchHit hit : mergedResponse.hits().getHits()) { - SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); - assertSame(searchPhaseResult.getSearchShardTarget(), hit.getShard()); - assertEquals(rank++, hit.getRank()); + }, + true + ); + List shards = queryResults.asList() + .stream() + .map(SearchPhaseResult::getSearchShardTarget) + .collect(toList()); + AtomicArray fetchResults = generateFetchResults( + shards, + reducedQueryPhase.sortedTopDocs().scoreDocs(), + reducedQueryPhase.suggest(), + false + ); + InternalSearchResponse mergedResponse = SearchPhaseController.merge( + false, + reducedQueryPhase, + fetchResults.asList(), + fetchResults::get + ); + if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { + assertNull(mergedResponse.hits.getTotalHits()); + } else { + assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + } + int rank = 1; + for (SearchHit hit : mergedResponse.hits().getHits()) { + SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); + assertSame(searchPhaseResult.getSearchShardTarget(), hit.getShard()); + assertEquals(rank++, hit.getRank()); + } + assertThat(mergedResponse.hits().getHits().length, equalTo(reducedQueryPhase.sortedTopDocs().scoreDocs().length)); + assertThat(mergedResponse.profile(), is(anEmptyMap())); + } finally { + queryResults.asList().forEach(TransportMessage::decRef); } - assertThat(mergedResponse.hits().getHits().length, equalTo(reducedQueryPhase.sortedTopDocs().scoreDocs().length)); - assertThat(mergedResponse.profile(), is(anEmptyMap())); } } @@ -602,51 +636,63 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { new SearchShardTarget("node", new ShardId("a", "b", 0), null), null ); - result.topDocs( - new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), - new DocValueFormat[0] - ); - InternalAggregations aggs = InternalAggregations.from(singletonList(new Max("test", 1.0D, DocValueFormat.RAW, emptyMap()))); - result.aggregations(aggs); - result.setShardIndex(0); - consumer.consumeResult(result, latch::countDown); - + try { + result.topDocs( + new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), + new DocValueFormat[0] + ); + InternalAggregations aggs = InternalAggregations.from(singletonList(new Max("test", 1.0D, DocValueFormat.RAW, emptyMap()))); + result.aggregations(aggs); + result.setShardIndex(0); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } result = new QuerySearchResult( new ShardSearchContextId("", 1), new SearchShardTarget("node", new ShardId("a", "b", 0), null), null ); - result.topDocs( - new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), - new DocValueFormat[0] - ); - aggs = InternalAggregations.from(singletonList(new Max("test", 3.0D, DocValueFormat.RAW, emptyMap()))); - result.aggregations(aggs); - result.setShardIndex(2); - consumer.consumeResult(result, latch::countDown); - + try { + result.topDocs( + new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), + new DocValueFormat[0] + ); + InternalAggregations aggs = InternalAggregations.from(singletonList(new Max("test", 3.0D, DocValueFormat.RAW, emptyMap()))); + result.aggregations(aggs); + result.setShardIndex(2); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } result = new QuerySearchResult( new ShardSearchContextId("", 1), new SearchShardTarget("node", new ShardId("a", "b", 0), null), null ); - result.topDocs( - new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), - new DocValueFormat[0] - ); - aggs = InternalAggregations.from(singletonList(new Max("test", 2.0D, DocValueFormat.RAW, emptyMap()))); - result.aggregations(aggs); - result.setShardIndex(1); - consumer.consumeResult(result, latch::countDown); - + try { + result.topDocs( + new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), + new DocValueFormat[0] + ); + InternalAggregations aggs = InternalAggregations.from(singletonList(new Max("test", 2.0D, DocValueFormat.RAW, emptyMap()))); + result.aggregations(aggs); + result.setShardIndex(1); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } while (numEmptyResponses > 0) { result = QuerySearchResult.nullInstance(); - int shardId = 2 + numEmptyResponses; - result.setShardIndex(shardId); - result.setSearchShardTarget(new SearchShardTarget("node", new ShardId("a", "b", shardId), null)); - consumer.consumeResult(result, latch::countDown); + try { + int shardId = 2 + numEmptyResponses; + result.setShardIndex(shardId); + result.setSearchShardTarget(new SearchShardTarget("node", new ShardId("a", "b", shardId), null)); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } numEmptyResponses--; - } latch.await(); final int numTotalReducePhases; @@ -707,20 +753,24 @@ public void testConsumerConcurrently() throws Exception { new SearchShardTarget("node", new ShardId("a", "b", id), null), null ); - result.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(0, number) }), - number - ), - new DocValueFormat[0] - ); - InternalAggregations aggs = InternalAggregations.from( - Collections.singletonList(new Max("test", (double) number, DocValueFormat.RAW, Collections.emptyMap())) - ); - result.aggregations(aggs); - result.setShardIndex(id); - result.size(1); - consumer.consumeResult(result, latch::countDown); + try { + result.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(0, number) }), + number + ), + new DocValueFormat[0] + ); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList(new Max("test", (double) number, DocValueFormat.RAW, Collections.emptyMap())) + ); + result.aggregations(aggs); + result.setShardIndex(id); + result.size(1); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } }); threads[i].start(); @@ -769,17 +819,21 @@ public void testConsumerOnlyAggs() throws Exception { new SearchShardTarget("node", new ShardId("a", "b", i), null), null ); - result.topDocs( - new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), number), - new DocValueFormat[0] - ); - InternalAggregations aggs = InternalAggregations.from( - Collections.singletonList(new Max("test", (double) number, DocValueFormat.RAW, Collections.emptyMap())) - ); - result.aggregations(aggs); - result.setShardIndex(i); - result.size(1); - consumer.consumeResult(result, latch::countDown); + try { + result.topDocs( + new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), number), + new DocValueFormat[0] + ); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList(new Max("test", (double) number, DocValueFormat.RAW, Collections.emptyMap())) + ); + result.aggregations(aggs); + result.setShardIndex(i); + result.size(1); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } } latch.await(); @@ -823,16 +877,20 @@ public void testConsumerOnlyHits() throws Exception { new SearchShardTarget("node", new ShardId("a", "b", i), null), null ); - result.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(0, number) }), - number - ), - new DocValueFormat[0] - ); - result.setShardIndex(i); - result.size(1); - consumer.consumeResult(result, latch::countDown); + try { + result.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(0, number) }), + number + ), + new DocValueFormat[0] + ); + result.setShardIndex(i); + result.size(1); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } } latch.await(); SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); @@ -880,18 +938,22 @@ public void testReduceTopNWithFromOffset() throws Exception { new SearchShardTarget("node", new ShardId("a", "b", i), null), null ); - ScoreDoc[] docs = new ScoreDoc[3]; - for (int j = 0; j < docs.length; j++) { - docs[j] = new ScoreDoc(0, score--); + try { + ScoreDoc[] docs = new ScoreDoc[3]; + for (int j = 0; j < docs.length; j++) { + docs[j] = new ScoreDoc(0, score--); + } + result.topDocs( + new TopDocsAndMaxScore(new TopDocs(new TotalHits(3, TotalHits.Relation.EQUAL_TO), docs), docs[0].score), + new DocValueFormat[0] + ); + result.setShardIndex(i); + result.size(5); + result.from(5); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); } - result.topDocs( - new TopDocsAndMaxScore(new TopDocs(new TotalHits(3, TotalHits.Relation.EQUAL_TO), docs), docs[0].score), - new DocValueFormat[0] - ); - result.setShardIndex(i); - result.size(5); - result.from(5); - consumer.consumeResult(result, latch::countDown); } latch.await(); // 4*3 results = 12 we get result 5 to 10 here with from=5 and size=5 @@ -936,10 +998,14 @@ public void testConsumerSortByField() throws Exception { new SearchShardTarget("node", new ShardId("a", "b", i), null), null ); - result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); - result.setShardIndex(i); - result.size(size); - consumer.consumeResult(result, latch::countDown); + try { + result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); + result.setShardIndex(i); + result.size(size); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } } latch.await(); SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); @@ -986,10 +1052,14 @@ public void testConsumerFieldCollapsing() throws Exception { new SearchShardTarget("node", new ShardId("a", "b", i), null), null ); - result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); - result.setShardIndex(i); - result.size(size); - consumer.consumeResult(result, latch::countDown); + try { + result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); + result.setShardIndex(i); + result.size(size); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } } latch.await(); SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); @@ -1031,55 +1101,59 @@ public void testConsumerSuggestions() throws Exception { new SearchShardTarget("node", new ShardId("a", "b", i), null), null ); - List>> suggestions = - new ArrayList<>(); - { - TermSuggestion termSuggestion = new TermSuggestion("term", 1, SortBy.SCORE); - TermSuggestion.Entry entry = new TermSuggestion.Entry(new Text("entry"), 0, 10); - int numOptions = randomIntBetween(1, 10); - for (int j = 0; j < numOptions; j++) { - int score = numOptions - j; - maxScoreTerm = Math.max(maxScoreTerm, score); - entry.addOption(new TermSuggestion.Entry.Option(new Text("option"), randomInt(), score)); + try { + List>> suggestions = + new ArrayList<>(); + { + TermSuggestion termSuggestion = new TermSuggestion("term", 1, SortBy.SCORE); + TermSuggestion.Entry entry = new TermSuggestion.Entry(new Text("entry"), 0, 10); + int numOptions = randomIntBetween(1, 10); + for (int j = 0; j < numOptions; j++) { + int score = numOptions - j; + maxScoreTerm = Math.max(maxScoreTerm, score); + entry.addOption(new TermSuggestion.Entry.Option(new Text("option"), randomInt(), score)); + } + termSuggestion.addTerm(entry); + suggestions.add(termSuggestion); } - termSuggestion.addTerm(entry); - suggestions.add(termSuggestion); - } - { - PhraseSuggestion phraseSuggestion = new PhraseSuggestion("phrase", 1); - PhraseSuggestion.Entry entry = new PhraseSuggestion.Entry(new Text("entry"), 0, 10); - int numOptions = randomIntBetween(1, 10); - for (int j = 0; j < numOptions; j++) { - int score = numOptions - j; - maxScorePhrase = Math.max(maxScorePhrase, score); - entry.addOption(new PhraseSuggestion.Entry.Option(new Text("option"), new Text("option"), score)); + { + PhraseSuggestion phraseSuggestion = new PhraseSuggestion("phrase", 1); + PhraseSuggestion.Entry entry = new PhraseSuggestion.Entry(new Text("entry"), 0, 10); + int numOptions = randomIntBetween(1, 10); + for (int j = 0; j < numOptions; j++) { + int score = numOptions - j; + maxScorePhrase = Math.max(maxScorePhrase, score); + entry.addOption(new PhraseSuggestion.Entry.Option(new Text("option"), new Text("option"), score)); + } + phraseSuggestion.addTerm(entry); + suggestions.add(phraseSuggestion); } - phraseSuggestion.addTerm(entry); - suggestions.add(phraseSuggestion); - } - { - CompletionSuggestion completionSuggestion = new CompletionSuggestion("completion", 1, false); - CompletionSuggestion.Entry entry = new CompletionSuggestion.Entry(new Text("entry"), 0, 10); - int numOptions = randomIntBetween(1, 10); - for (int j = 0; j < numOptions; j++) { - int score = numOptions - j; - maxScoreCompletion = Math.max(maxScoreCompletion, score); - CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option( - j, - new Text("option"), - score, - Collections.emptyMap() - ); - entry.addOption(option); + { + CompletionSuggestion completionSuggestion = new CompletionSuggestion("completion", 1, false); + CompletionSuggestion.Entry entry = new CompletionSuggestion.Entry(new Text("entry"), 0, 10); + int numOptions = randomIntBetween(1, 10); + for (int j = 0; j < numOptions; j++) { + int score = numOptions - j; + maxScoreCompletion = Math.max(maxScoreCompletion, score); + CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option( + j, + new Text("option"), + score, + Collections.emptyMap() + ); + entry.addOption(option); + } + completionSuggestion.addTerm(entry); + suggestions.add(completionSuggestion); } - completionSuggestion.addTerm(entry); - suggestions.add(completionSuggestion); + result.suggest(new Suggest(suggestions)); + result.topDocs(new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN), new DocValueFormat[0]); + result.setShardIndex(i); + result.size(0); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); } - result.suggest(new Suggest(suggestions)); - result.topDocs(new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN), new DocValueFormat[0]); - result.setShardIndex(i); - result.size(0); - consumer.consumeResult(result, latch::countDown); } latch.await(); SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); @@ -1173,20 +1247,24 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna new SearchShardTarget("node", new ShardId("a", "b", id), null), null ); - result.topDocs( - new TopDocsAndMaxScore( - new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(0, number) }), - number - ), - new DocValueFormat[0] - ); - InternalAggregations aggs = InternalAggregations.from( - Collections.singletonList(new Max("test", (double) number, DocValueFormat.RAW, Collections.emptyMap())) - ); - result.aggregations(aggs); - result.setShardIndex(id); - result.size(1); - consumer.consumeResult(result, latch::countDown); + try { + result.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(0, number) }), + number + ), + new DocValueFormat[0] + ); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList(new Max("test", (double) number, DocValueFormat.RAW, Collections.emptyMap())) + ); + result.aggregations(aggs); + result.setShardIndex(id); + result.size(1); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } }); threads[i].start(); } @@ -1253,17 +1331,24 @@ private void testReduceCase(int numShards, int bufferSize, boolean shouldFail) t new SearchShardTarget("node", new ShardId("a", "b", index), null), null ); - result.topDocs( - new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN), - new DocValueFormat[0] - ); - InternalAggregations aggs = InternalAggregations.from( - Collections.singletonList(new Max("test", 0d, DocValueFormat.RAW, Collections.emptyMap())) - ); - result.aggregations(aggs); - result.setShardIndex(index); - result.size(1); - consumer.consumeResult(result, latch::countDown); + try { + result.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), + Float.NaN + ), + new DocValueFormat[0] + ); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList(new Max("test", 0d, DocValueFormat.RAW, Collections.emptyMap())) + ); + result.aggregations(aggs); + result.setShardIndex(index); + result.size(1); + consumer.consumeResult(result, latch::countDown); + } finally { + result.decRef(); + } }); threads[index].start(); } @@ -1314,14 +1399,21 @@ public void testFailConsumeAggs() throws Exception { new SearchShardTarget("node", new ShardId("a", "b", index), null), null ); - result.topDocs( - new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN), - new DocValueFormat[0] - ); - result.aggregations(null); - result.setShardIndex(index); - result.size(1); - expectThrows(Exception.class, () -> consumer.consumeResult(result, () -> {})); + try { + result.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), + Float.NaN + ), + new DocValueFormat[0] + ); + result.aggregations(null); + result.setShardIndex(index); + result.size(1); + expectThrows(Exception.class, () -> consumer.consumeResult(result, () -> {})); + } finally { + result.decRef(); + } } assertNull(consumer.reduce().aggregations()); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 6d5380273c8c8..7270326933dea 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -118,38 +118,55 @@ public void sendExecuteQuery( new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), null ); - SortField sortField = new SortField("timestamp", SortField.Type.LONG); - if (withCollapse) { - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopFieldGroups( - "collapse_field", - new TotalHits(1, withScroll ? TotalHits.Relation.EQUAL_TO : TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), - new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { request.shardId().id() }) }, - new SortField[] { sortField }, - new Object[] { 0L } + try { + SortField sortField = new SortField("timestamp", SortField.Type.LONG); + if (withCollapse) { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopFieldGroups( + "collapse_field", + new TotalHits( + 1, + withScroll ? TotalHits.Relation.EQUAL_TO : TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO + ), + new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { request.shardId().id() }) }, + new SortField[] { sortField }, + new Object[] { 0L } + ), + Float.NaN ), - Float.NaN - ), - new DocValueFormat[] { DocValueFormat.RAW } - ); - } else { - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopFieldDocs( - new TotalHits(1, withScroll ? TotalHits.Relation.EQUAL_TO : TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), - new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { request.shardId().id() }) }, - new SortField[] { sortField } + new DocValueFormat[] { DocValueFormat.RAW } + ); + } else { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopFieldDocs( + new TotalHits( + 1, + withScroll ? TotalHits.Relation.EQUAL_TO : TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO + ), + new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { request.shardId().id() }) }, + new SortField[] { sortField } + ), + Float.NaN ), - Float.NaN - ), - new DocValueFormat[] { DocValueFormat.RAW } - ); + new DocValueFormat[] { DocValueFormat.RAW } + ); + } + queryResult.from(0); + queryResult.size(1); + successfulOps.incrementAndGet(); + queryResult.incRef(); + new Thread(() -> { + try { + listener.onResponse(queryResult); + } finally { + queryResult.decRef(); + } + }).start(); + } finally { + queryResult.decRef(); } - queryResult.from(0); - queryResult.size(1); - successfulOps.incrementAndGet(); - new Thread(() -> listener.onResponse(queryResult)).start(); } }; CountDownLatch latch = new CountDownLatch(1); diff --git a/server/src/test/java/org/elasticsearch/action/support/RefCountingListenerTests.java b/server/src/test/java/org/elasticsearch/action/support/RefCountingListenerTests.java index b1f49cb6efd6d..fe1d45e6a5002 100644 --- a/server/src/test/java/org/elasticsearch/action/support/RefCountingListenerTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/RefCountingListenerTests.java @@ -10,14 +10,16 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.util.concurrent.RunOnce; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ReachabilityChecker; +import java.io.IOException; import java.util.ArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Consumer; import static org.elasticsearch.common.util.concurrent.EsExecutors.DIRECT_EXECUTOR_SERVICE; import static org.hamcrest.Matchers.containsString; @@ -88,7 +90,7 @@ public String toString() { var reachChecker = new ReachabilityChecker(); var consumed = new AtomicBoolean(); - var consumingListener = refs.acquire(reachChecker.register(new Consumer() { + var consumingListener = refs.acquire(reachChecker.register(new CheckedConsumer() { @Override public void accept(String s) { assertEquals("test response", s); @@ -185,6 +187,7 @@ public void testValidation() { public void testConsumerFailure() { final var executed = new AtomicBoolean(); + final Runnable completeAcquiredRunOnce; try (var refs = new RefCountingListener(new ActionListener() { @Override public void onResponse(Void unused) { @@ -197,8 +200,19 @@ public void onFailure(Exception e) { executed.set(true); } })) { - refs.acquire(ignored -> { throw new ElasticsearchException("simulated"); }).onResponse(null); + final var listener = refs.acquire(ignored -> { + if (randomBoolean()) { + throw new ElasticsearchException("simulated"); + } else { + throw new IOException("simulated"); + } + }); + completeAcquiredRunOnce = new RunOnce(() -> listener.onResponse(null)); + if (randomBoolean()) { + completeAcquiredRunOnce.run(); + } } + completeAcquiredRunOnce.run(); assertTrue(executed.get()); } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationStateTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationStateTests.java index 6b5531c3ed43e..3851d13dc2c15 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationStateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationStateTests.java @@ -106,11 +106,11 @@ public void testStartJoinBeforeBootstrap() { assertThat(cs1.getCurrentTerm(), equalTo(0L)); StartJoinRequest startJoinRequest1 = new StartJoinRequest(randomFrom(node1, node2), randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); - assertThat(v1.getMasterCandidateNode(), equalTo(startJoinRequest1.getMasterCandidateNode())); - assertThat(v1.getVotingNode(), equalTo(node1)); - assertThat(v1.getTerm(), equalTo(startJoinRequest1.getTerm())); - assertThat(v1.getLastAcceptedTerm(), equalTo(initialStateNode1.term())); - assertThat(v1.getLastAcceptedVersion(), equalTo(initialStateNode1.version())); + assertThat(v1.masterCandidateNode(), equalTo(startJoinRequest1.getMasterCandidateNode())); + assertThat(v1.votingNode(), equalTo(node1)); + assertThat(v1.term(), equalTo(startJoinRequest1.getTerm())); + assertThat(v1.lastAcceptedTerm(), equalTo(initialStateNode1.term())); + assertThat(v1.lastAcceptedVersion(), equalTo(initialStateNode1.version())); assertThat(cs1.getCurrentTerm(), equalTo(startJoinRequest1.getTerm())); StartJoinRequest startJoinRequest2 = new StartJoinRequest( @@ -129,11 +129,11 @@ public void testStartJoinAfterBootstrap() { StartJoinRequest startJoinRequest1 = new StartJoinRequest(randomFrom(node1, node2), randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); - assertThat(v1.getMasterCandidateNode(), equalTo(startJoinRequest1.getMasterCandidateNode())); - assertThat(v1.getVotingNode(), equalTo(node1)); - assertThat(v1.getTerm(), equalTo(startJoinRequest1.getTerm())); - assertThat(v1.getLastAcceptedTerm(), equalTo(state1.term())); - assertThat(v1.getLastAcceptedVersion(), equalTo(state1.version())); + assertThat(v1.masterCandidateNode(), equalTo(startJoinRequest1.getMasterCandidateNode())); + assertThat(v1.votingNode(), equalTo(node1)); + assertThat(v1.term(), equalTo(startJoinRequest1.getTerm())); + assertThat(v1.lastAcceptedTerm(), equalTo(state1.term())); + assertThat(v1.lastAcceptedVersion(), equalTo(state1.version())); assertThat(cs1.getCurrentTerm(), equalTo(startJoinRequest1.getTerm())); StartJoinRequest startJoinRequest2 = new StartJoinRequest( @@ -212,7 +212,7 @@ public void testJoinWithHigherAcceptedTerm() { Join badJoin = new Join( randomFrom(node1, node2), node1, - v1.getTerm(), + v1.term(), randomLongBetween(state2.term() + 1, 30), randomNonNegativeLong() ); @@ -234,7 +234,7 @@ public void testJoinWithSameAcceptedTermButHigherVersion() { StartJoinRequest startJoinRequest2 = new StartJoinRequest(node2, randomLongBetween(startJoinRequest1.getTerm() + 1, 10)); Join v1 = cs1.handleStartJoin(startJoinRequest2); - Join badJoin = new Join(randomFrom(node1, node2), node1, v1.getTerm(), state2.term(), randomLongBetween(state2.version() + 1, 30)); + Join badJoin = new Join(randomFrom(node1, node2), node1, v1.term(), state2.term(), randomLongBetween(state2.version() + 1, 30)); assertThat( expectThrows(CoordinationStateRejectedException.class, () -> cs1.handleJoin(badJoin)).getMessage(), containsString("higher than current last accepted version") @@ -253,7 +253,7 @@ public void testJoinWithLowerLastAcceptedTermWinsElection() { StartJoinRequest startJoinRequest2 = new StartJoinRequest(node2, randomLongBetween(startJoinRequest1.getTerm() + 1, 10)); Join v1 = cs1.handleStartJoin(startJoinRequest2); - Join join = new Join(node1, node1, v1.getTerm(), randomLongBetween(0, state2.term() - 1), randomLongBetween(0, 20)); + Join join = new Join(node1, node1, v1.term(), randomLongBetween(0, state2.term() - 1), randomLongBetween(0, 20)); assertTrue(cs1.handleJoin(join)); assertTrue(cs1.electionWon()); assertTrue(cs1.containsJoinVoteFor(node1)); @@ -275,7 +275,7 @@ public void testJoinWithSameLastAcceptedTermButLowerOrSameVersionWinsElection() StartJoinRequest startJoinRequest2 = new StartJoinRequest(node2, randomLongBetween(startJoinRequest1.getTerm() + 1, 10)); Join v1 = cs1.handleStartJoin(startJoinRequest2); - Join join = new Join(node1, node1, v1.getTerm(), state2.term(), randomLongBetween(0, state2.version())); + Join join = new Join(node1, node1, v1.term(), state2.term(), randomLongBetween(0, state2.version())); assertTrue(cs1.handleJoin(join)); assertTrue(cs1.electionWon()); assertTrue(cs1.containsJoinVoteFor(node1)); @@ -296,7 +296,7 @@ public void testJoinDoesNotWinElection() { StartJoinRequest startJoinRequest2 = new StartJoinRequest(node2, randomLongBetween(startJoinRequest1.getTerm() + 1, 10)); Join v1 = cs1.handleStartJoin(startJoinRequest2); - Join join = new Join(node2, node1, v1.getTerm(), randomLongBetween(0, state2.term()), randomLongBetween(0, state2.version())); + Join join = new Join(node2, node1, v1.term(), randomLongBetween(0, state2.term()), randomLongBetween(0, state2.version())); assertTrue(cs1.handleJoin(join)); assertFalse(cs1.electionWon()); assertEquals(cs1.getLastPublishedVersion(), 0L); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java index 50bbbad05a778..82a172d1dccb8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.monitor.StatusInfo; import org.elasticsearch.tasks.TaskManager; @@ -92,7 +93,7 @@ public void testJoinDeduplication() { Function.identity(), (listener, term) -> listener.onResponse(null), CompatibilityVersionsUtils.staticCurrent(), - Set.of() + new FeatureService(List.of()) ); transportService.start(); @@ -113,7 +114,7 @@ public void testJoinDeduplication() { assertEquals(node1, capturedRequest1.node()); assertTrue(joinHelper.isJoinPending()); - final var join1Term = optionalJoin1.stream().mapToLong(Join::getTerm).findFirst().orElse(0L); + final var join1Term = optionalJoin1.stream().mapToLong(Join::term).findFirst().orElse(0L); final var join1Status = new JoinStatus(node1, join1Term, PENDING_JOIN_WAITING_RESPONSE, TimeValue.ZERO); assertThat(joinHelper.getInFlightJoinStatuses(), equalTo(List.of(join1Status))); @@ -127,7 +128,7 @@ public void testJoinDeduplication() { CapturedRequest capturedRequest2 = capturedRequests2[0]; assertEquals(node2, capturedRequest2.node()); - final var join2Term = optionalJoin2.stream().mapToLong(Join::getTerm).findFirst().orElse(0L); + final var join2Term = optionalJoin2.stream().mapToLong(Join::term).findFirst().orElse(0L); final var join2Status = new JoinStatus(node2, join2Term, PENDING_JOIN_WAITING_RESPONSE, TimeValue.ZERO); assertThat( new HashSet<>(joinHelper.getInFlightJoinStatuses()), @@ -260,7 +261,7 @@ public void testJoinFailureOnUnhealthyNodes() { Function.identity(), (listener, term) -> listener.onResponse(null), CompatibilityVersionsUtils.staticCurrent(), - Set.of() + new FeatureService(List.of()) ); transportService.start(); @@ -337,7 +338,7 @@ public void testLatestStoredStateFailure() { Function.identity(), (listener, term) -> listener.onFailure(new ElasticsearchException("simulated")), CompatibilityVersionsUtils.staticCurrent(), - Set.of() + new FeatureService(List.of()) ); final var joinAccumulator = joinHelper.new CandidateJoinAccumulator(); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java index a6d0424c639a2..91d893121c2ab 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java @@ -43,46 +43,46 @@ public void testJoinEqualsHashCodeSerialization() { // change sourceNode new Join( createNode(randomAlphaOfLength(20)), - join.getMasterCandidateNode(), - join.getTerm(), - join.getLastAcceptedTerm(), - join.getLastAcceptedVersion() + join.masterCandidateNode(), + join.term(), + join.lastAcceptedTerm(), + join.lastAcceptedVersion() ); case 1 -> // change targetNode new Join( - join.getVotingNode(), + join.votingNode(), createNode(randomAlphaOfLength(20)), - join.getTerm(), - join.getLastAcceptedTerm(), - join.getLastAcceptedVersion() + join.term(), + join.lastAcceptedTerm(), + join.lastAcceptedVersion() ); case 2 -> // change term new Join( - join.getVotingNode(), - join.getMasterCandidateNode(), - randomValueOtherThan(join.getTerm(), ESTestCase::randomNonNegativeLong), - join.getLastAcceptedTerm(), - join.getLastAcceptedVersion() + join.votingNode(), + join.masterCandidateNode(), + randomValueOtherThan(join.term(), ESTestCase::randomNonNegativeLong), + join.lastAcceptedTerm(), + join.lastAcceptedVersion() ); case 3 -> // change last accepted term new Join( - join.getVotingNode(), - join.getMasterCandidateNode(), - join.getTerm(), - randomValueOtherThan(join.getLastAcceptedTerm(), ESTestCase::randomNonNegativeLong), - join.getLastAcceptedVersion() + join.votingNode(), + join.masterCandidateNode(), + join.term(), + randomValueOtherThan(join.lastAcceptedTerm(), ESTestCase::randomNonNegativeLong), + join.lastAcceptedVersion() ); case 4 -> // change version new Join( - join.getVotingNode(), - join.getMasterCandidateNode(), - join.getTerm(), - join.getLastAcceptedTerm(), - randomValueOtherThan(join.getLastAcceptedVersion(), ESTestCase::randomNonNegativeLong) + join.votingNode(), + join.masterCandidateNode(), + join.term(), + join.lastAcceptedTerm(), + randomValueOtherThan(join.lastAcceptedVersion(), ESTestCase::randomNonNegativeLong) ); default -> throw new AssertionError(); } @@ -224,7 +224,7 @@ public void testJoinRequestEqualsHashCodeSerialization() { randomNonNegativeLong() ); JoinRequest initialJoinRequest = new JoinRequest( - initialJoin.getVotingNode(), + initialJoin.votingNode(), CompatibilityVersionsUtils.fakeSystemIndicesRandom(), Set.of(generateRandomStringArray(10, 10, false)), randomNonNegativeLong(), diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java index 46f03aef76b90..3d8f7caaa55bc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java @@ -33,6 +33,8 @@ import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -72,7 +74,7 @@ public class NodeJoinExecutorTests extends ESTestCase { - private static final ActionListener NOT_COMPLETED_LISTENER = ActionTestUtils.assertNoFailureListener(t -> {}); + private static final ActionListener NO_FAILURE_LISTENER = ActionTestUtils.assertNoFailureListener(t -> {}); public void testPreventJoinClusterWithNewerIndices() { Settings.builder().build(); @@ -157,6 +159,74 @@ public void testPreventJoinClusterWithUnsupportedNodeVersions() { } } + public void testPreventJoinClusterWithMissingFeatures() throws Exception { + AllocationService allocationService = createAllocationService(); + RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); + FeatureService featureService = new FeatureService(List.of(new FeatureSpecification() { + @Override + public Set getFeatures() { + return Set.of(new NodeFeature("f1"), new NodeFeature("f2")); + } + })); + + NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService, featureService); + + DiscoveryNode masterNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); + DiscoveryNode otherNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(masterNode).localNodeId(masterNode.getId()).masterNodeId(masterNode.getId()).add(otherNode)) + .nodeFeatures(Map.of(masterNode.getId(), Set.of("f1", "f2"), otherNode.getId(), Set.of("f1", "f2"))) + .build(); + + DiscoveryNode newNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); + ClusterStateTaskExecutorUtils.executeAndAssertSuccessful( + clusterState, + executor, + List.of( + JoinTask.singleNode( + newNode, + CompatibilityVersionsUtils.staticCurrent(), + Set.of("f1"), + TEST_REASON, + ActionListener.wrap( + o -> fail("Should have failed"), + t -> assertThat(t.getMessage(), containsString("is missing required features [f2]")) + ), + 0L + ) + ) + ); + } + + public void testCanJoinClusterWithMissingIncompleteFeatures() throws Exception { + AllocationService allocationService = createAllocationService(); + RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); + FeatureService featureService = new FeatureService(List.of(new FeatureSpecification() { + @Override + public Set getFeatures() { + return Set.of(new NodeFeature("f1"), new NodeFeature("f2")); + } + })); + + NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService, featureService); + + DiscoveryNode masterNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); + DiscoveryNode otherNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(masterNode).localNodeId(masterNode.getId()).masterNodeId(masterNode.getId()).add(otherNode)) + .nodeFeatures(Map.of(masterNode.getId(), Set.of("f1", "f2"), otherNode.getId(), Set.of("f1"))) + .build(); + + DiscoveryNode newNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); + ClusterStateTaskExecutorUtils.executeAndAssertSuccessful( + clusterState, + executor, + List.of( + JoinTask.singleNode(newNode, CompatibilityVersionsUtils.staticCurrent(), Set.of("f1"), TEST_REASON, NO_FAILURE_LISTENER, 0L) + ) + ); + } + public void testSuccess() { Settings.builder().build(); Metadata.Builder metaBuilder = Metadata.builder(); @@ -205,7 +275,7 @@ public void testUpdatesNodeWithNewRoles() throws Exception { when(allocationService.adaptAutoExpandReplicas(any())).then(invocationOnMock -> invocationOnMock.getArguments()[0]); final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); - final NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService); + final NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final DiscoveryNode masterNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); @@ -226,14 +296,7 @@ public void testUpdatesNodeWithNewRoles() throws Exception { clusterState, executor, List.of( - JoinTask.singleNode( - actualNode, - CompatibilityVersionsUtils.staticCurrent(), - Set.of(), - TEST_REASON, - NOT_COMPLETED_LISTENER, - 0L - ) + JoinTask.singleNode(actualNode, CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, NO_FAILURE_LISTENER, 0L) ) ); @@ -245,7 +308,7 @@ public void testRejectsStatesWithStaleTerm() { final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); final long executorTerm = randomLongBetween(0L, Long.MAX_VALUE - 1); - final var executor = new NodeJoinExecutor(allocationService, rerouteService); + final var executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final var masterNode = DiscoveryNodeUtils.create(UUIDs.randomBase64UUID(random())); final var clusterState = ClusterState.builder(ClusterName.DEFAULT) @@ -270,7 +333,7 @@ public void testRejectsStatesWithStaleTerm() { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER, + NO_FAILURE_LISTENER, executorTerm ) ) @@ -282,7 +345,7 @@ public void testRejectsStatesWithStaleTerm() { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER + NO_FAILURE_LISTENER ) ), executorTerm @@ -301,7 +364,7 @@ public void testRejectsStatesWithOtherMaster() { final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); final long executorTerm = randomNonNegativeLong(); - final var executor = new NodeJoinExecutor(allocationService, rerouteService); + final var executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final var masterNode = DiscoveryNodeUtils.create(UUIDs.randomBase64UUID(random())); final var localNode = DiscoveryNodeUtils.create(UUIDs.randomBase64UUID(random())); @@ -334,7 +397,7 @@ public void testRejectsStatesWithOtherMaster() { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER, + NO_FAILURE_LISTENER, executorTerm ) ) @@ -346,7 +409,7 @@ public void testRejectsStatesWithOtherMaster() { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER + NO_FAILURE_LISTENER ) ), executorTerm @@ -365,7 +428,7 @@ public void testRejectsStatesWithNoMasterIfNotBecomingMaster() { final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); final long executorTerm = randomNonNegativeLong(); - final var executor = new NodeJoinExecutor(allocationService, rerouteService); + final var executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final var masterNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); final var clusterState = ClusterState.builder(ClusterName.DEFAULT) @@ -389,7 +452,7 @@ public void testRejectsStatesWithNoMasterIfNotBecomingMaster() { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER, + NO_FAILURE_LISTENER, executorTerm ) ), @@ -406,7 +469,7 @@ public void testRemovesOlderNodeInstancesWhenBecomingMaster() throws Exception { final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); final long executorTerm = randomLongBetween(1, Long.MAX_VALUE); - final var executor = new NodeJoinExecutor(allocationService, rerouteService); + final var executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final var masterNode = DiscoveryNodeUtils.create(UUIDs.randomBase64UUID(random())); final var otherNodeOld = DiscoveryNodeUtils.create(UUIDs.randomBase64UUID(random())); @@ -438,14 +501,14 @@ public void testRemovesOlderNodeInstancesWhenBecomingMaster() throws Exception { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER + NO_FAILURE_LISTENER ), new JoinTask.NodeJoinTask( otherNodeNew, CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER + NO_FAILURE_LISTENER ) ), executorTerm @@ -472,7 +535,7 @@ public void testRemovesOlderNodeInstancesWhenBecomingMaster() throws Exception { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER, + NO_FAILURE_LISTENER, executorTerm ), JoinTask.singleNode( @@ -497,7 +560,7 @@ public void testUpdatesVotingConfigExclusionsIfNeeded() throws Exception { final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); final long executorTerm = randomLongBetween(1, Long.MAX_VALUE); - final var executor = new NodeJoinExecutor(allocationService, rerouteService); + final var executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final var masterNode = DiscoveryNodeUtils.create(UUIDs.randomBase64UUID(random())); final var otherNode = DiscoveryNodeUtils.builder(UUIDs.randomBase64UUID(random())) @@ -540,14 +603,14 @@ public void testUpdatesVotingConfigExclusionsIfNeeded() throws Exception { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER + NO_FAILURE_LISTENER ), new JoinTask.NodeJoinTask( otherNode, CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER + NO_FAILURE_LISTENER ) ), executorTerm @@ -566,7 +629,7 @@ public void testUpdatesVotingConfigExclusionsIfNeeded() throws Exception { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER + NO_FAILURE_LISTENER ) ), executorTerm @@ -582,7 +645,7 @@ public void testUpdatesVotingConfigExclusionsIfNeeded() throws Exception { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER, + NO_FAILURE_LISTENER, executorTerm ) ) @@ -602,7 +665,7 @@ public void testIgnoresOlderTerms() throws Exception { final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); final long currentTerm = randomLongBetween(100, 1000); - final var executor = new NodeJoinExecutor(allocationService, rerouteService); + final var executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final var masterNode = DiscoveryNodeUtils.create(UUIDs.randomBase64UUID(random())); final var clusterState = ClusterState.builder(ClusterName.DEFAULT) @@ -630,7 +693,7 @@ public void testIgnoresOlderTerms() throws Exception { public void testDesiredNodesMembershipIsUpgradedWhenNewNodesJoin() throws Exception { final var allocationService = createAllocationService(); final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); - final var executor = new NodeJoinExecutor(allocationService, rerouteService); + final var executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final var actualizedDesiredNodes = randomList(0, 5, this::createActualizedDesiredNode); final var pendingDesiredNodes = randomList(0, 5, this::createPendingDesiredNode); @@ -656,7 +719,7 @@ public void testDesiredNodesMembershipIsUpgradedWhenNewNodesJoin() throws Except CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER, + NO_FAILURE_LISTENER, 0L ) ) @@ -678,7 +741,7 @@ public void testDesiredNodesMembershipIsUpgradedWhenNewNodesJoin() throws Except public void testDesiredNodesMembershipIsUpgradedWhenANewMasterIsElected() throws Exception { final var allocationService = createAllocationService(); final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); - final var executor = new NodeJoinExecutor(allocationService, rerouteService); + final var executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final var actualizedDesiredNodes = randomList(1, 5, this::createPendingDesiredNode); final var pendingDesiredNodes = randomList(0, 5, this::createPendingDesiredNode); @@ -701,7 +764,7 @@ public void testDesiredNodesMembershipIsUpgradedWhenANewMasterIsElected() throws CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER + NO_FAILURE_LISTENER ) ), 1L @@ -729,7 +792,7 @@ public void testPerNodeLogging() { when(allocationService.adaptAutoExpandReplicas(any())).then(invocationOnMock -> invocationOnMock.getArguments()[0]); final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); - final NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService); + final NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final DiscoveryNode masterNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) @@ -811,7 +874,7 @@ public void testResetsNodeLeftGenerationOnNewTerm() throws Exception { when(allocationService.adaptAutoExpandReplicas(any())).then(invocationOnMock -> invocationOnMock.getArguments()[0]); final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); - final NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService); + final NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final long term = randomLongBetween(0, Long.MAX_VALUE - 1); final DiscoveryNode masterNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); @@ -835,7 +898,7 @@ public void testResetsNodeLeftGenerationOnNewTerm() throws Exception { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER + NO_FAILURE_LISTENER ) ), randomLongBetween(term + 1, Long.MAX_VALUE) @@ -851,7 +914,7 @@ public void testSetsNodeFeaturesWhenRejoining() throws Exception { final AllocationService allocationService = createAllocationService(); final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); - final NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService); + final NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService, createFeatureService()); final DiscoveryNode masterNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); @@ -875,7 +938,7 @@ public void testSetsNodeFeaturesWhenRejoining() throws Exception { CompatibilityVersionsUtils.staticCurrent(), Set.of("f1", "f2"), TEST_REASON, - NOT_COMPLETED_LISTENER, + NO_FAILURE_LISTENER, 0L ) ) @@ -895,16 +958,10 @@ private DesiredNodeWithStatus createPendingDesiredNode() { private static JoinTask createRandomTask(DiscoveryNode node, long term) { return randomBoolean() - ? JoinTask.singleNode(node, CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER, term) + ? JoinTask.singleNode(node, CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, NO_FAILURE_LISTENER, term) : JoinTask.completingElection( Stream.of( - new JoinTask.NodeJoinTask( - node, - CompatibilityVersionsUtils.staticCurrent(), - Set.of(), - TEST_REASON, - NOT_COMPLETED_LISTENER - ) + new JoinTask.NodeJoinTask(node, CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, NO_FAILURE_LISTENER) ), term ); @@ -919,6 +976,10 @@ private static AllocationService createAllocationService() { return allocationService; } + private static FeatureService createFeatureService() { + return new FeatureService(List.of()); + } + // Hard-coding the class name here because it is also mentioned in the troubleshooting docs, so should not be renamed without care. private static final String LOGGER_NAME = "org.elasticsearch.cluster.coordination.NodeJoinExecutor"; diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java index e9e5b1c5338df..1a9d068da12ad 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.monitor.NodeHealthService; import org.elasticsearch.monitor.StatusInfo; @@ -231,7 +232,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req LeaderHeartbeatService.NO_OP, StatefulPreVoteCollector::new, CompatibilityVersionsUtils.staticCurrent(), - Set.of() + new FeatureService(List.of()) ); transportService.start(); transportService.acceptIncomingRequests(); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTests.java index e0ccc6bef42de..8372418c01644 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTests.java @@ -140,7 +140,7 @@ protected void onCompletion(boolean committed) { @Override protected void onJoin(Join join) { - assertNull(joins.put(join.getVotingNode(), join)); + assertNull(joins.put(join.votingNode(), join)); } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java index ac969eb7c9a10..fe678ec23afad 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java @@ -228,15 +228,14 @@ public void testXContentSerializationWithRollover() throws IOException { } DataStreamLifecycle lifecycle = randomLifecycle(); Template template = new Template(settings, mappings, aliases, lifecycle); - new ComposableIndexTemplate( - List.of(randomAlphaOfLength(4)), - template, - List.of(), - randomNonNegativeLong(), - randomNonNegativeLong(), - null, - dataStreamTemplate - ); + ComposableIndexTemplate.builder() + .indexPatterns(List.of(randomAlphaOfLength(4))) + .template(template) + .componentTemplates(List.of()) + .priority(randomNonNegativeLong()) + .version(randomNonNegativeLong()) + .dataStreamTemplate(dataStreamTemplate) + .build(); try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java index 2b40e28416129..e7ec430e6bb20 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java @@ -60,6 +60,7 @@ public void testCreateDataStream() throws Exception { ClusterState newState = MetadataCreateDataStreamService.createDataStream( metadataCreateIndexService, cs, + true, req, ActionListener.noop() ); @@ -68,6 +69,7 @@ public void testCreateDataStream() throws Exception { assertThat(newState.metadata().dataStreams().get(dataStreamName).isSystem(), is(false)); assertThat(newState.metadata().dataStreams().get(dataStreamName).isHidden(), is(false)); assertThat(newState.metadata().dataStreams().get(dataStreamName).isReplicated(), is(false)); + assertThat(newState.metadata().dataStreams().get(dataStreamName).getLifecycle(), equalTo(DataStreamLifecycle.DEFAULT)); assertThat(newState.metadata().index(DataStream.getDefaultBackingIndexName(dataStreamName, 1)), notNullValue()); assertThat( newState.metadata().index(DataStream.getDefaultBackingIndexName(dataStreamName, 1)).getSettings().get("index.hidden"), @@ -97,6 +99,7 @@ public void testCreateDataStreamWithAliasFromTemplate() throws Exception { ClusterState newState = MetadataCreateDataStreamService.createDataStream( metadataCreateIndexService, cs, + randomBoolean(), req, ActionListener.noop() ); @@ -172,6 +175,7 @@ public void testCreateDataStreamWithAliasFromComponentTemplate() throws Exceptio ClusterState newState = MetadataCreateDataStreamService.createDataStream( metadataCreateIndexService, cs, + randomBoolean(), req, ActionListener.noop() ); @@ -224,6 +228,7 @@ public void testCreateSystemDataStream() throws Exception { ClusterState newState = MetadataCreateDataStreamService.createDataStream( metadataCreateIndexService, cs, + randomBoolean(), req, ActionListener.noop() ); @@ -252,7 +257,13 @@ public void testCreateDuplicateDataStream() throws Exception { ResourceAlreadyExistsException e = expectThrows( ResourceAlreadyExistsException.class, - () -> MetadataCreateDataStreamService.createDataStream(metadataCreateIndexService, cs, req, ActionListener.noop()) + () -> MetadataCreateDataStreamService.createDataStream( + metadataCreateIndexService, + cs, + randomBoolean(), + req, + ActionListener.noop() + ) ); assertThat(e.getMessage(), containsString("data_stream [" + dataStreamName + "] already exists")); } @@ -264,7 +275,13 @@ public void testCreateDataStreamWithInvalidName() throws Exception { CreateDataStreamClusterStateUpdateRequest req = new CreateDataStreamClusterStateUpdateRequest(dataStreamName); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> MetadataCreateDataStreamService.createDataStream(metadataCreateIndexService, cs, req, ActionListener.noop()) + () -> MetadataCreateDataStreamService.createDataStream( + metadataCreateIndexService, + cs, + randomBoolean(), + req, + ActionListener.noop() + ) ); assertThat(e.getMessage(), containsString("must not contain the following characters")); } @@ -276,7 +293,13 @@ public void testCreateDataStreamWithUppercaseCharacters() throws Exception { CreateDataStreamClusterStateUpdateRequest req = new CreateDataStreamClusterStateUpdateRequest(dataStreamName); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> MetadataCreateDataStreamService.createDataStream(metadataCreateIndexService, cs, req, ActionListener.noop()) + () -> MetadataCreateDataStreamService.createDataStream( + metadataCreateIndexService, + cs, + randomBoolean(), + req, + ActionListener.noop() + ) ); assertThat(e.getMessage(), containsString("data_stream [" + dataStreamName + "] must be lowercase")); } @@ -288,7 +311,13 @@ public void testCreateDataStreamStartingWithPeriod() throws Exception { CreateDataStreamClusterStateUpdateRequest req = new CreateDataStreamClusterStateUpdateRequest(dataStreamName); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> MetadataCreateDataStreamService.createDataStream(metadataCreateIndexService, cs, req, ActionListener.noop()) + () -> MetadataCreateDataStreamService.createDataStream( + metadataCreateIndexService, + cs, + randomBoolean(), + req, + ActionListener.noop() + ) ); assertThat(e.getMessage(), containsString("data_stream [" + dataStreamName + "] must not start with '.ds-'")); } @@ -300,7 +329,13 @@ public void testCreateDataStreamNoTemplate() throws Exception { CreateDataStreamClusterStateUpdateRequest req = new CreateDataStreamClusterStateUpdateRequest(dataStreamName); Exception e = expectThrows( IllegalArgumentException.class, - () -> MetadataCreateDataStreamService.createDataStream(metadataCreateIndexService, cs, req, ActionListener.noop()) + () -> MetadataCreateDataStreamService.createDataStream( + metadataCreateIndexService, + cs, + randomBoolean(), + req, + ActionListener.noop() + ) ); assertThat(e.getMessage(), equalTo("no matching index template found for data stream [my-data-stream]")); } @@ -315,7 +350,13 @@ public void testCreateDataStreamNoValidTemplate() throws Exception { CreateDataStreamClusterStateUpdateRequest req = new CreateDataStreamClusterStateUpdateRequest(dataStreamName); Exception e = expectThrows( IllegalArgumentException.class, - () -> MetadataCreateDataStreamService.createDataStream(metadataCreateIndexService, cs, req, ActionListener.noop()) + () -> MetadataCreateDataStreamService.createDataStream( + metadataCreateIndexService, + cs, + randomBoolean(), + req, + ActionListener.noop() + ) ); assertThat( e.getMessage(), @@ -333,7 +374,13 @@ public static ClusterState createDataStream(final String dataStreamName) throws .metadata(Metadata.builder().put("template", template).build()) .build(); CreateDataStreamClusterStateUpdateRequest req = new CreateDataStreamClusterStateUpdateRequest(dataStreamName); - return MetadataCreateDataStreamService.createDataStream(metadataCreateIndexService, cs, req, ActionListener.noop()); + return MetadataCreateDataStreamService.createDataStream( + metadataCreateIndexService, + cs, + randomBoolean(), + req, + ActionListener.noop() + ); } private static MetadataCreateIndexService getMetadataCreateIndexService() throws Exception { @@ -379,7 +426,10 @@ private static SystemDataStreamDescriptor systemDataStreamDescriptor() { ".system-data-stream", "test system datastream", Type.EXTERNAL, - new ComposableIndexTemplate(List.of(".system-data-stream"), null, null, null, null, null, new DataStreamTemplate()), + ComposableIndexTemplate.builder() + .indexPatterns(List.of(".system-data-stream")) + .dataStreamTemplate(new DataStreamTemplate()) + .build(), Map.of(), List.of("stack"), ExecutorNames.DEFAULT_SYSTEM_DATA_STREAM_THREAD_POOLS diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java index 5d1c3fd0650d7..14cb19ba89810 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java @@ -463,40 +463,28 @@ public void testUpdateComponentTemplateWithIndexHiddenSetting() throws Exception state = metadataIndexTemplateService.addComponentTemplate(state, true, "foo", componentTemplate); assertNotNull(state.metadata().componentTemplates().get("foo")); - ComposableIndexTemplate firstGlobalIndexTemplate = new ComposableIndexTemplate( - List.of("*"), - template, - List.of("foo"), - 1L, - null, - null, - null, - null - ); + ComposableIndexTemplate firstGlobalIndexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("*")) + .template(template) + .componentTemplates(List.of("foo")) + .priority(1L) + .build(); state = metadataIndexTemplateService.addIndexTemplateV2(state, true, "globalindextemplate1", firstGlobalIndexTemplate); - ComposableIndexTemplate secondGlobalIndexTemplate = new ComposableIndexTemplate( - List.of("*"), - template, - List.of("foo"), - 2L, - null, - null, - null, - null - ); + ComposableIndexTemplate secondGlobalIndexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("*")) + .template(template) + .componentTemplates(List.of("foo")) + .priority(2L) + .build(); state = metadataIndexTemplateService.addIndexTemplateV2(state, true, "globalindextemplate2", secondGlobalIndexTemplate); - ComposableIndexTemplate fooPatternIndexTemplate = new ComposableIndexTemplate( - List.of("foo-*"), - template, - List.of("foo"), - 3L, - null, - null, - null, - null - ); + ComposableIndexTemplate fooPatternIndexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("foo-*")) + .template(template) + .componentTemplates(List.of("foo")) + .priority(3L) + .build(); state = metadataIndexTemplateService.addIndexTemplateV2(state, true, "foopatternindextemplate", fooPatternIndexTemplate); // update the component template to set the index.hidden setting @@ -555,16 +543,14 @@ public void testUpdateIndexTemplateV2() throws Exception { List patterns = new ArrayList<>(template.indexPatterns()); patterns.add("new-pattern"); - template = new ComposableIndexTemplate( - patterns, - template.template(), - template.composedOf(), - template.priority(), - template.version(), - template.metadata(), - null, - null - ); + template = ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(template.template()) + .componentTemplates(template.composedOf()) + .priority(template.priority()) + .version(template.version()) + .metadata(template.metadata()) + .build(); state = metadataIndexTemplateService.addIndexTemplateV2(state, false, "foo", template); assertNotNull(state.metadata().templatesV2().get("foo")); @@ -669,16 +655,9 @@ public void testPuttingV2TemplateGeneratesWarning() throws Exception { .metadata(Metadata.builder(Metadata.EMPTY_METADATA).put(v1Template).build()) .build(); - ComposableIndexTemplate v2Template = new ComposableIndexTemplate( - Arrays.asList("foo-bar-*", "eggplant"), - null, - null, - null, - null, - null, - null, - null - ); + ComposableIndexTemplate v2Template = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("foo-bar-*", "eggplant")) + .build(); state = metadataIndexTemplateService.addIndexTemplateV2(state, false, "v2-template", v2Template); assertCriticalWarnings( @@ -725,16 +704,10 @@ public void onFailure(Exception e) { waitToCreateComponentTemplate.await(10, TimeUnit.SECONDS); - ComposableIndexTemplate globalIndexTemplate = new ComposableIndexTemplate( - List.of("*"), - null, - List.of("ct-with-index-hidden-setting"), - null, - null, - null, - null, - null - ); + ComposableIndexTemplate globalIndexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("*")) + .componentTemplates(List.of("ct-with-index-hidden-setting")) + .build(); expectThrows( InvalidIndexTemplateException.class, @@ -770,16 +743,9 @@ public void onFailure(Exception e) { */ public void testPuttingV1StarTemplateGeneratesWarning() throws Exception { final MetadataIndexTemplateService metadataIndexTemplateService = getMetadataIndexTemplateService(); - ComposableIndexTemplate v2Template = new ComposableIndexTemplate( - Arrays.asList("foo-bar-*", "eggplant"), - null, - null, - null, - null, - null, - null, - null - ); + ComposableIndexTemplate v2Template = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("foo-bar-*", "eggplant")) + .build(); ClusterState state = metadataIndexTemplateService.addIndexTemplateV2(ClusterState.EMPTY_STATE, false, "v2-template", v2Template); MetadataIndexTemplateService.PutRequest req = new MetadataIndexTemplateService.PutRequest("cause", "v1-template"); @@ -801,16 +767,9 @@ public void testPuttingV1StarTemplateGeneratesWarning() throws Exception { */ public void testPuttingV1NonStarTemplateGeneratesError() throws Exception { final MetadataIndexTemplateService metadataIndexTemplateService = getMetadataIndexTemplateService(); - ComposableIndexTemplate v2Template = new ComposableIndexTemplate( - Arrays.asList("foo-bar-*", "eggplant"), - null, - null, - null, - null, - null, - null, - null - ); + ComposableIndexTemplate v2Template = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("foo-bar-*", "eggplant")) + .build(); ClusterState state = metadataIndexTemplateService.addIndexTemplateV2(ClusterState.EMPTY_STATE, false, "v2-template", v2Template); MetadataIndexTemplateService.PutRequest req = new MetadataIndexTemplateService.PutRequest("cause", "v1-template"); @@ -845,16 +804,9 @@ public void testUpdatingV1NonStarTemplateWithUnchangedPatternsGeneratesWarning() .metadata(Metadata.builder(Metadata.EMPTY_METADATA).put(v1Template).build()) .build(); - ComposableIndexTemplate v2Template = new ComposableIndexTemplate( - Arrays.asList("foo-bar-*", "eggplant"), - null, - null, - null, - null, - null, - null, - null - ); + ComposableIndexTemplate v2Template = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("foo-bar-*", "eggplant")) + .build(); state = metadataIndexTemplateService.addIndexTemplateV2(state, false, "v2-template", v2Template); assertCriticalWarnings( @@ -894,16 +846,9 @@ public void testUpdatingV1NonStarWithChangedPatternsTemplateGeneratesError() thr .metadata(Metadata.builder(Metadata.EMPTY_METADATA).put(v1Template).build()) .build(); - ComposableIndexTemplate v2Template = new ComposableIndexTemplate( - Arrays.asList("foo-bar-*", "eggplant"), - null, - null, - null, - null, - null, - null, - null - ); + ComposableIndexTemplate v2Template = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("foo-bar-*", "eggplant")) + .build(); state = metadataIndexTemplateService.addIndexTemplateV2(state, false, "v2-template", v2Template); assertCriticalWarnings( @@ -937,28 +882,16 @@ public void testUpdatingV1NonStarWithChangedPatternsTemplateGeneratesError() thr public void testPuttingOverlappingV2Template() throws Exception { { - ComposableIndexTemplate template = new ComposableIndexTemplate( - Arrays.asList("egg*", "baz"), - null, - null, - 1L, - null, - null, - null, - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("egg*", "baz")) + .priority(1L) + .build(); MetadataIndexTemplateService metadataIndexTemplateService = getMetadataIndexTemplateService(); ClusterState state = metadataIndexTemplateService.addIndexTemplateV2(ClusterState.EMPTY_STATE, false, "foo", template); - ComposableIndexTemplate newTemplate = new ComposableIndexTemplate( - Arrays.asList("abc", "baz*"), - null, - null, - 1L, - null, - null, - null, - null - ); + ComposableIndexTemplate newTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("abc", "baz*")) + .priority(1L) + .build(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> metadataIndexTemplateService.addIndexTemplateV2(state, false, "foo2", newTemplate) @@ -974,28 +907,13 @@ public void testPuttingOverlappingV2Template() throws Exception { } { - ComposableIndexTemplate template = new ComposableIndexTemplate( - Arrays.asList("egg*", "baz"), - null, - null, - null, - null, - null, - null, - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder().indexPatterns(Arrays.asList("egg*", "baz")).build(); MetadataIndexTemplateService metadataIndexTemplateService = getMetadataIndexTemplateService(); ClusterState state = metadataIndexTemplateService.addIndexTemplateV2(ClusterState.EMPTY_STATE, false, "foo", template); - ComposableIndexTemplate newTemplate = new ComposableIndexTemplate( - Arrays.asList("abc", "baz*"), - null, - null, - 0L, - null, - null, - null, - null - ); + ComposableIndexTemplate newTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("abc", "baz*")) + .priority(0L) + .build(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> metadataIndexTemplateService.addIndexTemplateV2(state, false, "foo2", newTemplate) @@ -1018,9 +936,18 @@ public void testFindV2Templates() throws Exception { ComponentTemplate ct = ComponentTemplateTests.randomNonDeprecatedInstance(); state = service.addComponentTemplate(state, true, "ct", ct); - ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("i*"), null, List.of("ct"), null, 1L, null, null, null); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("i*")) + .componentTemplates(List.of("ct")) + .version(1L) + .build(); state = service.addIndexTemplateV2(state, true, "my-template", it); - ComposableIndexTemplate it2 = new ComposableIndexTemplate(List.of("in*"), null, List.of("ct"), 10L, 2L, null, null, null); + ComposableIndexTemplate it2 = ComposableIndexTemplate.builder() + .indexPatterns(List.of("in*")) + .componentTemplates(List.of("ct")) + .priority(10L) + .version(2L) + .build(); state = service.addIndexTemplateV2(state, true, "my-template2", it2); String result = MetadataIndexTemplateService.findV2Template(state.metadata(), "index", randomBoolean()); @@ -1035,9 +962,19 @@ public void testFindV2TemplatesForHiddenIndex() throws Exception { ComponentTemplate ct = ComponentTemplateTests.randomNonDeprecatedInstance(); state = service.addComponentTemplate(state, true, "ct", ct); - ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("i*"), null, List.of("ct"), 0L, 1L, null, null, null); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("i*")) + .componentTemplates(List.of("ct")) + .priority(0L) + .version(1L) + .build(); state = service.addIndexTemplateV2(state, true, "my-template", it); - ComposableIndexTemplate it2 = new ComposableIndexTemplate(List.of("*"), null, List.of("ct"), 10L, 2L, null, null, null); + ComposableIndexTemplate it2 = ComposableIndexTemplate.builder() + .indexPatterns(List.of("*")) + .componentTemplates(List.of("ct")) + .priority(10L) + .version(2L) + .build(); state = service.addIndexTemplateV2(state, true, "my-template2", it2); String result = MetadataIndexTemplateService.findV2Template(state.metadata(), "index", true); @@ -1053,9 +990,19 @@ public void testFindV2TemplatesForDateMathIndex() throws Exception { ComponentTemplate ct = ComponentTemplateTests.randomNonDeprecatedInstance(); state = service.addComponentTemplate(state, true, "ct", ct); - ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("index-*"), null, List.of("ct"), 0L, 1L, null, null, null); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("index-*")) + .componentTemplates(List.of("ct")) + .priority(0L) + .version(1L) + .build(); state = service.addIndexTemplateV2(state, true, "my-template", it); - ComposableIndexTemplate it2 = new ComposableIndexTemplate(List.of("*"), null, List.of("ct"), 10L, 2L, null, null, null); + ComposableIndexTemplate it2 = ComposableIndexTemplate.builder() + .indexPatterns(List.of("*")) + .componentTemplates(List.of("ct")) + .priority(10L) + .version(2L) + .build(); state = service.addIndexTemplateV2(state, true, "my-template2", it2); String result = MetadataIndexTemplateService.findV2Template(state.metadata(), indexName, true); @@ -1067,16 +1014,13 @@ public void testFindV2InvalidGlobalTemplate() { Template templateWithHiddenSetting = new Template(builder().put(IndexMetadata.SETTING_INDEX_HIDDEN, true).build(), null, null); try { // add an invalid global template that specifies the `index.hidden` setting - ComposableIndexTemplate invalidGlobalTemplate = new ComposableIndexTemplate( - List.of("*"), - templateWithHiddenSetting, - List.of("ct"), - 5L, - 1L, - null, - null, - null - ); + ComposableIndexTemplate invalidGlobalTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("*")) + .template(templateWithHiddenSetting) + .componentTemplates(List.of("ct")) + .priority(5L) + .version(1L) + .build(); Metadata invalidGlobalTemplateMetadata = Metadata.builder() .putCustom( ComposableIndexTemplateMetadata.TYPE, @@ -1119,14 +1063,20 @@ public void testResolveConflictingMappings() throws Exception { }"""), null), null, null); state = service.addComponentTemplate(state, true, "ct_high", ct1); state = service.addComponentTemplate(state, true, "ct_low", ct2); - ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("i*"), new Template(null, new CompressedXContent(""" - { - "properties": { - "field": { - "type": "keyword" - } - } - }"""), null), List.of("ct_low", "ct_high"), 0L, 1L, null, null, null); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("i*")) + .template(new Template(null, new CompressedXContent(""" + { + "properties": { + "field": { + "type": "keyword" + } + } + }"""), null)) + .componentTemplates(List.of("ct_low", "ct_high")) + .priority(0L) + .version(1L) + .build(); state = service.addIndexTemplateV2(state, true, "my-template", it); List mappings = MetadataIndexTemplateService.collectMappings(state, "my-template", "my-index"); @@ -1175,14 +1125,20 @@ public void testResolveMappings() throws Exception { }"""), null), null, null); state = service.addComponentTemplate(state, true, "ct_high", ct1); state = service.addComponentTemplate(state, true, "ct_low", ct2); - ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("i*"), new Template(null, new CompressedXContent(""" - { - "properties": { - "field3": { - "type": "integer" - } - } - }"""), null), List.of("ct_low", "ct_high"), 0L, 1L, null, null, null); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("i*")) + .template(new Template(null, new CompressedXContent(""" + { + "properties": { + "field3": { + "type": "integer" + } + } + }"""), null)) + .componentTemplates(List.of("ct_low", "ct_high")) + .priority(0L) + .version(1L) + .build(); state = service.addIndexTemplateV2(state, true, "my-template", it); List mappings = MetadataIndexTemplateService.collectMappings(state, "my-template", "my-index"); @@ -1219,14 +1175,21 @@ public void testDefinedTimestampMappingIsAddedForDataStreamTemplates() throws Ex state = service.addComponentTemplate(state, true, "ct1", ct1); { - ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("logs*"), new Template(null, new CompressedXContent(""" - { - "properties": { - "field2": { - "type": "integer" - } - } - }"""), null), List.of("ct1"), 0L, 1L, null, new ComposableIndexTemplate.DataStreamTemplate(), null); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs*")) + .template(new Template(null, new CompressedXContent(""" + { + "properties": { + "field2": { + "type": "integer" + } + } + }"""), null)) + .componentTemplates(List.of("ct1")) + .priority(0L) + .version(1L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); state = service.addIndexTemplateV2(state, true, "logs-data-stream-template", it); List mappings = MetadataIndexTemplateService.collectMappings( @@ -1267,14 +1230,20 @@ public void testDefinedTimestampMappingIsAddedForDataStreamTemplates() throws Ex { // indices matched by templates without the data stream field defined don't get the default @timestamp mapping - ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("timeseries*"), new Template(null, new CompressedXContent(""" - { - "properties": { - "field2": { - "type": "integer" - } - } - }"""), null), List.of("ct1"), 0L, 1L, null, null, null); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("timeseries*")) + .template(new Template(null, new CompressedXContent(""" + { + "properties": { + "field2": { + "type": "integer" + } + } + }"""), null)) + .componentTemplates(List.of("ct1")) + .priority(0L) + .version(1L) + .build(); state = service.addIndexTemplateV2(state, true, "timeseries-template", it); List mappings = MetadataIndexTemplateService.collectMappings(state, "timeseries-template", "timeseries"); @@ -1335,16 +1304,13 @@ public void testUserDefinedMappingTakesPrecedenceOverDefault() throws Exception }"""), null), null, null); state = service.addComponentTemplate(state, true, "ct1", ct1); - ComposableIndexTemplate it = new ComposableIndexTemplate( - List.of("logs*"), - null, - List.of("ct1"), - 0L, - 1L, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs*")) + .componentTemplates(List.of("ct1")) + .priority(0L) + .version(1L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); state = service.addIndexTemplateV2(state, true, "logs-template", it); List mappings = MetadataIndexTemplateService.collectMappings( @@ -1394,16 +1360,13 @@ public void testUserDefinedMappingTakesPrecedenceOverDefault() throws Exception } } }"""), null); - ComposableIndexTemplate it = new ComposableIndexTemplate( - List.of("timeseries*"), - template, - null, - 0L, - 1L, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("timeseries*")) + .template(template) + .priority(0L) + .version(1L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); state = service.addIndexTemplateV2(state, true, "timeseries-template", it); List mappings = MetadataIndexTemplateService.collectMappings( @@ -1460,16 +1423,15 @@ public void testResolveSettings() throws Exception { ); state = service.addComponentTemplate(state, true, "ct_high", ct1); state = service.addComponentTemplate(state, true, "ct_low", ct2); - ComposableIndexTemplate it = new ComposableIndexTemplate( - List.of("i*"), - new Template(Settings.builder().put("index.blocks.write", false).put("index.number_of_shards", 3).build(), null, null), - List.of("ct_low", "ct_high"), - 0L, - 1L, - null, - null, - null - ); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("i*")) + .template( + new Template(Settings.builder().put("index.blocks.write", false).put("index.number_of_shards", 3).build(), null, null) + ) + .componentTemplates(List.of("ct_low", "ct_high")) + .priority(0L) + .version(1L) + .build(); state = service.addIndexTemplateV2(state, true, "my-template", it); Settings settings = MetadataIndexTemplateService.resolveSettings(state.metadata(), "my-template"); @@ -1495,16 +1457,13 @@ public void testResolveAliases() throws Exception { ComponentTemplate ct2 = new ComponentTemplate(new Template(null, null, a2), null, null); state = service.addComponentTemplate(state, true, "ct_high", ct1); state = service.addComponentTemplate(state, true, "ct_low", ct2); - ComposableIndexTemplate it = new ComposableIndexTemplate( - List.of("i*"), - new Template(null, null, a3), - List.of("ct_low", "ct_high"), - 0L, - 1L, - null, - null, - null - ); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("i*")) + .template(new Template(null, null, a3)) + .componentTemplates(List.of("ct_low", "ct_high")) + .priority(0L) + .version(1L) + .build(); state = service.addIndexTemplateV2(state, true, "my-template", it); List> resolvedAliases = MetadataIndexTemplateService.resolveAliases(state.metadata(), "my-template"); @@ -1662,16 +1621,14 @@ private void assertLifecycleResolution( DataStreamLifecycle lifecycleZ, DataStreamLifecycle expected ) throws Exception { - ComposableIndexTemplate it = new ComposableIndexTemplate( - List.of(randomAlphaOfLength(10) + "*"), - new Template(null, null, null, lifecycleZ), - composeOf, - 0L, - 1L, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of(randomAlphaOfLength(10) + "*")) + .template(new Template(null, null, null, lifecycleZ)) + .componentTemplates(composeOf) + .priority(0L) + .version(1L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); state = service.addIndexTemplateV2(state, true, "my-template", it); DataStreamLifecycle resolvedLifecycle = MetadataIndexTemplateService.resolveLifecycle(state.metadata(), "my-template"); @@ -1679,14 +1636,10 @@ private void assertLifecycleResolution( } public void testAddInvalidTemplate() throws Exception { - ComposableIndexTemplate template = new ComposableIndexTemplate( - Collections.singletonList("a"), - null, - Arrays.asList("good", "bad"), - null, - null, - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("a")) + .componentTemplates(Arrays.asList("good", "bad")) + .build(); ComponentTemplate ct = new ComponentTemplate(new Template(Settings.EMPTY, null, null), null, null); final MetadataIndexTemplateService service = getMetadataIndexTemplateService(); @@ -1766,14 +1719,10 @@ public void testRemoveComponentTemplate() throws Exception { } public void testRemoveComponentTemplateInUse() throws Exception { - ComposableIndexTemplate template = new ComposableIndexTemplate( - Collections.singletonList("a"), - null, - Collections.singletonList("ct"), - null, - null, - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("a")) + .componentTemplates(Collections.singletonList("ct")) + .build(); ComponentTemplate ct = new ComponentTemplate(new Template(null, new CompressedXContent("{}"), null), null, null); final MetadataIndexTemplateService service = getMetadataIndexTemplateService(); @@ -1848,19 +1797,25 @@ public void testIndexTemplateFailsToOverrideComponentTemplateMappingField() thro }"""), null), null, null); state = service.addComponentTemplate(state, true, "c1", ct1); state = service.addComponentTemplate(state, true, "c2", ct2); - ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("i*"), new Template(null, new CompressedXContent(""" - { - "properties": { - "field2": { - "type": "object", - "properties": { - "bar": { - "type": "object" - } - } - } - } - }"""), null), randomBoolean() ? Arrays.asList("c1", "c2") : Arrays.asList("c2", "c1"), 0L, 1L, null, null, null); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("i*")) + .template(new Template(null, new CompressedXContent(""" + { + "properties": { + "field2": { + "type": "object", + "properties": { + "bar": { + "type": "object" + } + } + } + } + }"""), null)) + .componentTemplates(randomBoolean() ? Arrays.asList("c1", "c2") : Arrays.asList("c2", "c1")) + .priority(0L) + .version(1L) + .build(); final ClusterState finalState = state; IllegalArgumentException e = expectThrows( @@ -1897,7 +1852,10 @@ public void testIndexTemplateFailsToAdd() throws Exception { null ); state = service.addComponentTemplate(state, true, "ct", ct); - ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("i*"), null, List.of("ct"), null, null, null); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("i*")) + .componentTemplates(List.of("ct")) + .build(); final ClusterState finalState = state; IllegalArgumentException e = expectThrows( @@ -1947,16 +1905,13 @@ public void testUpdateComponentTemplateFailsIfResolvedIndexTemplatesWouldBeInval """), null), null, null); state = service.addComponentTemplate(state, true, "c1", ct1); state = service.addComponentTemplate(state, true, "c2", ct2); - ComposableIndexTemplate it = new ComposableIndexTemplate( - List.of("i*"), - new Template(null, null, null), - randomBoolean() ? Arrays.asList("c1", "c2") : Arrays.asList("c2", "c1"), - 0L, - 1L, - null, - null, - null - ); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("i*")) + .template(new Template(null, null, null)) + .componentTemplates(randomBoolean() ? Arrays.asList("c1", "c2") : Arrays.asList("c2", "c1")) + .priority(0L) + .version(1L) + .build(); // Great, the templates aren't invalid state = service.addIndexTemplateV2(state, randomBoolean(), "my-template", it); @@ -2041,16 +1996,11 @@ public void testUnreferencedDataStreamsWhenAddingTemplate() throws Exception { ) .build(); - ComposableIndexTemplate template = new ComposableIndexTemplate( - Collections.singletonList("logs-*-*"), - null, - null, - 100L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-*-*")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); state = service.addIndexTemplateV2(state, false, "logs", template); @@ -2072,16 +2022,10 @@ public void testUnreferencedDataStreamsWhenAddingTemplate() throws Exception { // Test replacing it with a version without the data stream config IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - ComposableIndexTemplate nonDSTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-*-*"), - null, - null, - 100L, - null, - null, - null, - null - ); + ComposableIndexTemplate nonDSTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-*-*")) + .priority(100L) + .build(); service.addIndexTemplateV2(stateWithDS, false, "logs", nonDSTemplate); }); @@ -2095,16 +2039,10 @@ public void testUnreferencedDataStreamsWhenAddingTemplate() throws Exception { // Test adding a higher priority version that would cause problems e = expectThrows(IllegalArgumentException.class, () -> { - ComposableIndexTemplate nonDSTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-my*-*"), - null, - null, - 105L, - null, - null, - null, - null - ); + ComposableIndexTemplate nonDSTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-my*-*")) + .priority(105L) + .build(); service.addIndexTemplateV2(stateWithDS, false, "logs2", nonDSTemplate); }); @@ -2118,16 +2056,11 @@ public void testUnreferencedDataStreamsWhenAddingTemplate() throws Exception { // Change the pattern to one that doesn't match the data stream e = expectThrows(IllegalArgumentException.class, () -> { - ComposableIndexTemplate newTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-postgres-*"), - null, - null, - 100L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate newTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-postgres-*")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); service.addIndexTemplateV2(stateWithDS, false, "logs", newTemplate); }); @@ -2140,29 +2073,18 @@ public void testUnreferencedDataStreamsWhenAddingTemplate() throws Exception { ); // Add an additional template that matches our data stream at a lower priority - ComposableIndexTemplate mysqlTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-mysql-*"), - null, - null, - 50L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate mysqlTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-mysql-*")) + .priority(50L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); ClusterState stateWithDSAndTemplate = service.addIndexTemplateV2(stateWithDS, false, "logs-mysql", mysqlTemplate); // We should be able to replace the "logs" template, because we have the "logs-mysql" template that can handle the data stream - ComposableIndexTemplate nonDSTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-postgres-*"), - null, - null, - 100L, - null, - null, - null, - null - ); + ComposableIndexTemplate nonDSTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-postgres-*")) + .priority(100L) + .build(); service.addIndexTemplateV2(stateWithDSAndTemplate, false, "logs", nonDSTemplate); } @@ -2185,16 +2107,11 @@ public void testDataStreamsUsingTemplates() throws Exception { ) .build(); - ComposableIndexTemplate template = new ComposableIndexTemplate( - Collections.singletonList("logs-*-*"), - null, - null, - 100L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-*-*")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); state = service.addIndexTemplateV2(state, false, "logs", template); @@ -2214,16 +2131,11 @@ public void testDataStreamsUsingTemplates() throws Exception { ) .build(); - ComposableIndexTemplate fineGrainedLogsTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-mysql-*"), - null, - null, - 200L, // Higher priority - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate fineGrainedLogsTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-mysql-*")) + .priority(200L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); state = service.addIndexTemplateV2(stateWithDS, false, "logs2", fineGrainedLogsTemplate); @@ -2250,16 +2162,11 @@ public void testRemovingHigherOrderTemplateOfDataStreamWithMultipleTemplates() t ClusterState state = ClusterState.EMPTY_STATE; final MetadataIndexTemplateService service = getMetadataIndexTemplateService(); - ComposableIndexTemplate template = new ComposableIndexTemplate( - Collections.singletonList("logs-*"), - null, - null, - 100L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-*")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); state = service.addIndexTemplateV2(state, false, "logs", template); @@ -2279,16 +2186,11 @@ public void testRemovingHigherOrderTemplateOfDataStreamWithMultipleTemplates() t ) .build(); - ComposableIndexTemplate fineGrainedLogsTemplate = new ComposableIndexTemplate( - Collections.singletonList("logs-mysql-*"), - null, - null, - 200L, // Higher priority - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate fineGrainedLogsTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("logs-mysql-*")) + .priority(200L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); state = service.addIndexTemplateV2(stateWithDS, false, "logs-test", fineGrainedLogsTemplate); @@ -2304,28 +2206,16 @@ public void testRemovingHigherOrderTemplateOfDataStreamWithMultipleTemplates() t public void testV2TemplateOverlaps() throws Exception { { - ComposableIndexTemplate template = new ComposableIndexTemplate( - Arrays.asList("egg*", "baz"), - null, - null, - 1L, - null, - null, - null, - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("egg*", "baz")) + .priority(1L) + .build(); MetadataIndexTemplateService metadataIndexTemplateService = getMetadataIndexTemplateService(); ClusterState state = metadataIndexTemplateService.addIndexTemplateV2(ClusterState.EMPTY_STATE, false, "foo", template); - ComposableIndexTemplate newTemplate = new ComposableIndexTemplate( - Arrays.asList("abc", "baz*"), - null, - null, - 1L, - null, - null, - null, - null - ); + ComposableIndexTemplate newTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("abc", "baz*")) + .priority(1L) + .build(); // when validating is false, we return the conflicts instead of throwing an exception var overlaps = MetadataIndexTemplateService.v2TemplateOverlaps(state, "foo2", newTemplate, false); @@ -2346,44 +2236,23 @@ public void testV2TemplateOverlaps() throws Exception { ) ); - ComposableIndexTemplate nonConflict = new ComposableIndexTemplate( - Arrays.asList("abc", "bar*"), - null, - null, - 1L, - null, - null, - null, - null - ); + ComposableIndexTemplate nonConflict = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("abc", "bar*")) + .priority(1L) + .build(); overlaps = MetadataIndexTemplateService.v2TemplateOverlaps(state, "no-conflict", nonConflict, true); assertTrue(overlaps.isEmpty()); } { - ComposableIndexTemplate template = new ComposableIndexTemplate( - Arrays.asList("egg*", "baz"), - null, - null, - null, - null, - null, - null, - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder().indexPatterns(Arrays.asList("egg*", "baz")).build(); MetadataIndexTemplateService metadataIndexTemplateService = getMetadataIndexTemplateService(); ClusterState state = metadataIndexTemplateService.addIndexTemplateV2(ClusterState.EMPTY_STATE, false, "foo", template); - ComposableIndexTemplate newTemplate = new ComposableIndexTemplate( - Arrays.asList("abc", "baz*"), - null, - null, - 0L, - null, - null, - null, - null - ); + ComposableIndexTemplate newTemplate = ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("abc", "baz*")) + .priority(0L) + .build(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> MetadataIndexTemplateService.v2TemplateOverlaps(state, "foo2", newTemplate, true) @@ -2543,16 +2412,13 @@ public void testComposableTemplateWithSubobjectsFalse() throws Exception { state = service.addComponentTemplate(state, true, "subobjects", subobjects); state = service.addComponentTemplate(state, true, "field_mapping", fieldMapping); - ComposableIndexTemplate it = new ComposableIndexTemplate( - List.of("test-*"), - new Template(null, null, null), - List.of("subobjects", "field_mapping"), - 0L, - 1L, - null, - null, - null - ); + ComposableIndexTemplate it = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template(new Template(null, null, null)) + .componentTemplates(List.of("subobjects", "field_mapping")) + .priority(0L) + .version(1L) + .build(); state = service.addIndexTemplateV2(state, true, "composable-template", it); List mappings = MetadataIndexTemplateService.collectMappings(state, "composable-template", "test-index"); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamServiceTests.java index 5b3079338e830..128601ff21250 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamServiceTests.java @@ -279,21 +279,17 @@ public void testCreateDataStreamWithSuppliedWriteIndex() throws Exception { .put(foo2, false) .put( "template", - new ComposableIndexTemplate( - List.of(dataStreamName + "*"), - null, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName + "*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ) ) .build(); ClusterState newState = MetadataMigrateToDataStreamService.migrateToDataStream( cs, + randomBoolean(), this::getMapperService, new MetadataMigrateToDataStreamService.MigrateToDataStreamClusterStateUpdateRequest( dataStreamName, @@ -341,21 +337,17 @@ public void testCreateDataStreamHidesBackingIndicesAndRemovesAlias() throws Exce .put(foo2, false) .put( "template", - new ComposableIndexTemplate( - List.of(dataStreamName + "*"), - null, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName + "*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ) ) .build(); ClusterState newState = MetadataMigrateToDataStreamService.migrateToDataStream( cs, + randomBoolean(), this::getMapperService, new MetadataMigrateToDataStreamService.MigrateToDataStreamClusterStateUpdateRequest( dataStreamName, @@ -403,15 +395,10 @@ public void testCreateDataStreamWithoutSuppliedWriteIndex() { .put(foo2, false) .put( "template", - new ComposableIndexTemplate( - List.of(dataStreamName + "*"), - null, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName + "*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ) ) .build(); @@ -420,6 +407,7 @@ public void testCreateDataStreamWithoutSuppliedWriteIndex() { IllegalArgumentException.class, () -> MetadataMigrateToDataStreamService.migrateToDataStream( cs, + randomBoolean(), this::getMapperService, new MetadataMigrateToDataStreamService.MigrateToDataStreamClusterStateUpdateRequest( dataStreamName, diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index fe0dd5ea1a5e7..264d8c5ca1a95 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -2279,30 +2279,23 @@ public void testIsTimeSeriesTemplate() throws IOException { // Settings in component template: { var componentTemplate = new ComponentTemplate(template, null, null); - var indexTemplate = new ComposableIndexTemplate( - List.of("test-*"), - null, - List.of("component_template_1"), - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .componentTemplates(List.of("component_template_1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); assertThat(m.isTimeSeriesTemplate(indexTemplate), is(true)); } // Settings in composable index template: { var componentTemplate = new ComponentTemplate(new Template(null, null, null), null, null); - var indexTemplate = new ComposableIndexTemplate( - List.of("test-*"), - template, - List.of("component_template_1"), - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template(template) + .componentTemplates(List.of("component_template_1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); assertThat(m.isTimeSeriesTemplate(indexTemplate), is(true)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetadataTests.java index 8bcd9201092d8..cb681b57b58dd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetadataTests.java @@ -89,20 +89,21 @@ public void testSimpleJsonFromAndTo() throws IOException { ) .put( "index_templatev2", - new ComposableIndexTemplate( - Arrays.asList("foo", "bar*"), - new Template( - Settings.builder().put("setting", "value").build(), - new CompressedXContent("{\"baz\":\"eggplant\"}"), - Collections.singletonMap("alias", AliasMetadata.builder("alias").build()) - ), - Collections.singletonList("component_template"), - 5L, - 4L, - Collections.singletonMap("my_meta", Collections.singletonMap("potato", "chicken")), - randomBoolean() ? null : new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(Arrays.asList("foo", "bar*")) + .template( + new Template( + Settings.builder().put("setting", "value").build(), + new CompressedXContent("{\"baz\":\"eggplant\"}"), + Collections.singletonMap("alias", AliasMetadata.builder("alias").build()) + ) + ) + .componentTemplates(Collections.singletonList("component_template")) + .priority(5L) + .version(4L) + .metadata(Collections.singletonMap("my_meta", Collections.singletonMap("potato", "chicken"))) + .dataStreamTemplate(randomBoolean() ? null : new ComposableIndexTemplate.DataStreamTemplate()) + .build() ) .put( IndexMetadata.builder("test12") diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStatsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStatsTests.java index ccd0ec6c0fb7b..d962472f23f95 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStatsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStatsTests.java @@ -12,8 +12,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import java.util.Locale; - import static org.hamcrest.Matchers.equalTo; public class DesiredBalanceStatsTests extends AbstractWireSerializingTestCase { @@ -25,6 +23,10 @@ protected Writeable.Reader instanceReader() { @Override protected DesiredBalanceStats createTestInstance() { + return randomDesiredBalanceStats(); + } + + public static DesiredBalanceStats randomDesiredBalanceStats() { return new DesiredBalanceStats( randomNonNegativeLong(), randomBoolean(), @@ -34,6 +36,9 @@ protected DesiredBalanceStats createTestInstance() { randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong() ); } @@ -48,8 +53,7 @@ public void testToXContent() { assertThat( Strings.toString(instance, true, false), equalTo( - String.format( - Locale.ROOT, + Strings.format( """ { "computation_converged_index" : %d, @@ -60,9 +64,12 @@ public void testToXContent() { "computation_iterations" : %d, "computed_shard_movements" : %d, "computation_time_in_millis" : %d, - "reconciliation_time_in_millis" : %d + "reconciliation_time_in_millis" : %d, + "unassigned_shards" : %d, + "total_allocations" : %d, + "undesired_allocations" : %d, + "undesired_allocations_fraction" : %s }""", - instance.lastConvergedIndex(), instance.computationActive(), instance.computationSubmitted(), @@ -71,7 +78,11 @@ public void testToXContent() { instance.computationIterations(), instance.computedShardMovements(), instance.cumulativeComputationTime(), - instance.cumulativeReconciliationTime() + instance.cumulativeReconciliationTime(), + instance.unassignedShards(), + instance.totalAllocations(), + instance.undesiredAllocations(), + Double.toString(instance.undesiredAllocationsFraction()) ) ) ); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceTests.java index 9ab44ec3fb047..14402d1e571ec 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceTests.java @@ -1808,15 +1808,10 @@ private SystemIndices getSystemIndices( featureDataStreamName, "description", SystemDataStreamDescriptor.Type.EXTERNAL, - new ComposableIndexTemplate( - List.of(systemDataStreamPattern), - null, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ), + ComposableIndexTemplate.builder() + .indexPatterns(List.of(systemDataStreamPattern)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(), Map.of(), List.of("test"), new ExecutorNames( diff --git a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java b/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java index b7792c5f85207..ece78448a1e8c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java @@ -49,8 +49,7 @@ public class TransportVersionsFixupListenerTests extends ESTestCase { - // TODO: replace with real constants when 8.8.0 is released - private static final Version NEXT_VERSION = Version.fromString("8.8.1"); + private static final Version NEXT_VERSION = Version.V_8_8_1; private static final TransportVersion NEXT_TRANSPORT_VERSION = TransportVersion.fromId(NEXT_VERSION.id); @SuppressWarnings("unchecked") diff --git a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index da1f2aa89642b..6061ed31d898e 100644 --- a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.GatewayMetaState; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.plugins.ClusterCoordinationPlugin; @@ -43,7 +44,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Supplier; @@ -117,7 +117,7 @@ private DiscoveryModule newModule( null, new NoneCircuitBreakerService(), CompatibilityVersionsUtils.staticCurrent(), - Set.of() + new FeatureService(List.of()) ); } diff --git a/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java b/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java index 0a799934ae64e..9da1eb9c553a8 100644 --- a/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java +++ b/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java @@ -93,7 +93,7 @@ public void testGetNodeFeaturesCombinesAllSpecs() { ); FeatureService service = new FeatureService(specs); - assertThat(service.getNodeFeatures(), containsInAnyOrder("f1", "f2", "f3", "f4", "f5")); + assertThat(service.getNodeFeatures().keySet(), containsInAnyOrder("f1", "f2", "f3", "f4", "f5")); } public void testStateHasFeatures() { diff --git a/server/src/test/java/org/elasticsearch/index/MergePolicyConfigTests.java b/server/src/test/java/org/elasticsearch/index/MergePolicyConfigTests.java index dbad9dd1cbdb5..7748208fcce32 100644 --- a/server/src/test/java/org/elasticsearch/index/MergePolicyConfigTests.java +++ b/server/src/test/java/org/elasticsearch/index/MergePolicyConfigTests.java @@ -60,7 +60,11 @@ private void assertCompoundThreshold(Settings settings, double noCFSRatio, ByteS } private static IndexSettings indexSettings(Settings settings) { - return new IndexSettings(newIndexMeta("test", settings), Settings.EMPTY); + return indexSettings(settings, Settings.EMPTY); + } + + private static IndexSettings indexSettings(Settings indexSettings, Settings nodeSettings) { + return new IndexSettings(newIndexMeta("test", indexSettings), nodeSettings); } public void testNoMerges() { @@ -118,7 +122,7 @@ public void testUpdateSettings() throws IOException { assertThat(indexSettings.getMergePolicy(randomBoolean()), Matchers.instanceOf(LogByteSizeMergePolicy.class)); } - public void testTieredMergePolicySettingsUpdate() throws IOException { + public void testTieredMergePolicySettingsUpdate() { IndexSettings indexSettings = indexSettings(Settings.EMPTY); assertEquals( ((TieredMergePolicy) indexSettings.getMergePolicy(false)).getForceMergeDeletesPctAllowed(), @@ -353,10 +357,6 @@ public Settings build(boolean value) { return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), value).build(); } - private Settings build(ByteSizeValue value) { - return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), value).build(); - } - public void testCompoundFileConfiguredByByteSize() throws IOException { for (boolean isTimeSeriesIndex : new boolean[] { false, true }) { try (Directory dir = newDirectory()) { @@ -394,4 +394,38 @@ public void testCompoundFileConfiguredByByteSize() throws IOException { } } } + + public void testDefaultMaxMergedSegment() { + var indexSettings = indexSettings(Settings.EMPTY); + { + TieredMergePolicy tieredPolicy = (TieredMergePolicy) new MergePolicyConfig(logger, indexSettings).getMergePolicy(false); + assertEquals(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getMbFrac(), tieredPolicy.getMaxMergedSegmentMB(), 0.0d); + } + { + LogByteSizeMergePolicy timePolicy = (LogByteSizeMergePolicy) new MergePolicyConfig(logger, indexSettings).getMergePolicy(true); + assertEquals(MergePolicyConfig.DEFAULT_MAX_TIME_BASED_MERGED_SEGMENT.getMbFrac(), timePolicy.getMaxMergeMB(), 0.0d); + } + } + + public void testDefaultMaxMergedSegmentWithNodeOverrides() { + var maxMergedSegmentSize = ByteSizeValue.ofBytes(randomLongBetween(1L, Long.MAX_VALUE)); + { + var indexSettings = indexSettings( + Settings.EMPTY, + Settings.builder().put(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT_SETTING.getKey(), maxMergedSegmentSize).build() + ); + TieredMergePolicy tieredPolicy = (TieredMergePolicy) new MergePolicyConfig(logger, indexSettings).getMergePolicy(false); + assertEquals(maxMergedSegmentSize.getMbFrac(), tieredPolicy.getMaxMergedSegmentMB(), 0.0d); + } + { + var indexSettings = indexSettings( + Settings.EMPTY, + Settings.builder() + .put(MergePolicyConfig.DEFAULT_MAX_TIME_BASED_MERGED_SEGMENT_SETTING.getKey(), maxMergedSegmentSize) + .build() + ); + LogByteSizeMergePolicy timePolicy = (LogByteSizeMergePolicy) new MergePolicyConfig(logger, indexSettings).getMergePolicy(true); + assertEquals(maxMergedSegmentSize.getMbFrac(), timePolicy.getMaxMergeMB(), 0.0d); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index d042e20e435c8..3d1337af4edfb 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -123,6 +123,7 @@ import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestIssueLogging; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSDirectoryFactory; import org.elasticsearch.threadpool.ThreadPool; @@ -3832,6 +3833,10 @@ public void testIsSearchIdle() throws Exception { closeShards(primary); } + @TestIssueLogging( + issueUrl = "https://github.com/elastic/elasticsearch/issues/101008", + value = "org.elasticsearch.index.shard.IndexShard:TRACE" + ) public void testScheduledRefresh() throws Exception { // Setup and make shard search idle: Settings settings = indexSettings(IndexVersion.current(), 1, 1).build(); @@ -3893,13 +3898,18 @@ public void testScheduledRefresh() throws Exception { latch.await(); // Index a document while shard is search active and ensure scheduleRefresh(...) makes documen visible: + logger.info("--> index doc while shard search active"); indexDoc(primary, "_doc", "2", "{\"foo\" : \"bar\"}"); + logger.info("--> scheduledRefresh(future4)"); PlainActionFuture future4 = new PlainActionFuture<>(); primary.scheduledRefresh(future4); assertFalse(future4.actionGet()); + + logger.info("--> ensure search idle"); assertTrue(primary.isSearchIdle()); assertTrue(primary.searchIdleTime() >= TimeValue.ZERO.millis()); primary.flushOnIdle(0); + logger.info("--> scheduledRefresh(future5)"); PlainActionFuture future5 = new PlainActionFuture<>(); primary.scheduledRefresh(future5); assertTrue(future5.actionGet()); // make sure we refresh once the shard is inactive diff --git a/server/src/test/java/org/elasticsearch/indices/ExecutorSelectorTests.java b/server/src/test/java/org/elasticsearch/indices/ExecutorSelectorTests.java index 4fbac946f5967..b27094a6f37f2 100644 --- a/server/src/test/java/org/elasticsearch/indices/ExecutorSelectorTests.java +++ b/server/src/test/java/org/elasticsearch/indices/ExecutorSelectorTests.java @@ -77,15 +77,10 @@ public void testDefaultSystemDataStreamThreadPools() { ".test-data-stream", "a data stream for testing", SystemDataStreamDescriptor.Type.INTERNAL, - new ComposableIndexTemplate( - List.of(".system-data-stream"), - null, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ), + ComposableIndexTemplate.builder() + .indexPatterns(List.of(".system-data-stream")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(), Map.of(), Collections.singletonList("test"), null @@ -114,15 +109,10 @@ public void testCustomSystemDataStreamThreadPools() { ".test-data-stream", "a data stream for testing", SystemDataStreamDescriptor.Type.INTERNAL, - new ComposableIndexTemplate( - List.of(".system-data-stream"), - null, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ), + ComposableIndexTemplate.builder() + .indexPatterns(List.of(".system-data-stream")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(), Map.of(), Collections.singletonList("test"), new ExecutorNames( diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index 6f57707cd9e78..13ecc0841ba55 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -84,6 +84,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettingProviders; @@ -134,6 +135,7 @@ public class ClusterStateChanges { private final TransportService transportService; private final AllocationService allocationService; private final ClusterService clusterService; + private final FeatureService featureService; private final ShardStateAction.ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor; private final ShardStateAction.ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor; @@ -216,6 +218,8 @@ protected ExecutorService createThreadPoolExecutor() { } // services + featureService = new FeatureService(List.of()); + transportService = new TransportService( SETTINGS, transport, @@ -406,7 +410,7 @@ public ClusterState reroute(ClusterState state, ClusterRerouteRequest request) { public ClusterState addNode(ClusterState clusterState, DiscoveryNode discoveryNode, TransportVersion transportVersion) { return runTasks( - new NodeJoinExecutor(allocationService, (s, p, r) -> {}), + new NodeJoinExecutor(allocationService, (s, p, r) -> {}, featureService), clusterState, List.of( JoinTask.singleNode( @@ -423,7 +427,7 @@ public ClusterState addNode(ClusterState clusterState, DiscoveryNode discoveryNo public ClusterState joinNodesAndBecomeMaster(ClusterState clusterState, List nodes, TransportVersion transportVersion) { return runTasks( - new NodeJoinExecutor(allocationService, (s, p, r) -> {}), + new NodeJoinExecutor(allocationService, (s, p, r) -> {}, featureService), clusterState, List.of( JoinTask.completingElection( diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index d4ebf1f44182a..8f0444287d07e 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -382,16 +382,24 @@ public void onFailure(Exception e) { new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()), result ); - SearchPhaseResult searchPhaseResult = result.get(); - List intCursors = new ArrayList<>(1); - intCursors.add(0); - ShardFetchRequest req = new ShardFetchRequest(searchPhaseResult.getContextId(), intCursors, null/* not a scroll */); - PlainActionFuture listener = new PlainActionFuture<>(); - service.executeFetchPhase(req, new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()), listener); - listener.get(); - if (useScroll) { - // have to free context since this test does not remove the index from IndicesService. - service.freeReaderContext(searchPhaseResult.getContextId()); + final SearchPhaseResult searchPhaseResult = result.get(); + try { + List intCursors = new ArrayList<>(1); + intCursors.add(0); + ShardFetchRequest req = new ShardFetchRequest( + searchPhaseResult.getContextId(), + intCursors, + null/* not a scroll */ + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.executeFetchPhase(req, new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()), listener); + listener.get().decRef(); + if (useScroll) { + // have to free context since this test does not remove the index from IndicesService. + service.freeReaderContext(searchPhaseResult.getContextId()); + } + } finally { + searchPhaseResult.decRef(); } } catch (ExecutionException ex) { assertThat(ex.getCause(), instanceOf(RuntimeException.class)); @@ -1046,6 +1054,7 @@ public void onResponse(SearchPhaseResult searchPhaseResult) { // make sure that the wrapper is called when the query is actually executed assertEquals(6, numWrapInvocations.get()); } finally { + searchPhaseResult.decRef(); latch.countDown(); } } @@ -1360,6 +1369,7 @@ public void onResponse(SearchPhaseResult result) { assertNotNull(result.queryResult().topDocs()); assertNotNull(result.queryResult().aggregations()); } finally { + result.decRef(); latch.countDown(); } } @@ -1390,6 +1400,7 @@ public void onResponse(SearchPhaseResult result) { assertNotNull(result.queryResult().topDocs()); assertNotNull(result.queryResult().aggregations()); } finally { + result.decRef(); latch.countDown(); } } @@ -1418,6 +1429,7 @@ public void onResponse(SearchPhaseResult result) { assertThat(result, instanceOf(QuerySearchResult.class)); assertTrue(result.queryResult().isNull()); } finally { + result.decRef(); latch.countDown(); } } @@ -1558,6 +1570,7 @@ public void testCancelQueryPhaseEarly() throws Exception { @Override public void onResponse(SearchPhaseResult searchPhaseResult) { service.freeReaderContext(searchPhaseResult.getContextId()); + searchPhaseResult.decRef(); latch1.countDown(); } @@ -1602,6 +1615,7 @@ public void onResponse(SearchPhaseResult searchPhaseResult) { fail("Search not cancelled early"); } finally { service.freeReaderContext(searchPhaseResult.getContextId()); + searchPhaseResult.decRef(); latch3.countDown(); } } @@ -1728,7 +1742,11 @@ public void testWaitOnRefresh() { ); service.executeQueryPhase(request, task, future); SearchPhaseResult searchPhaseResult = future.actionGet(); - assertEquals(1, searchPhaseResult.queryResult().getTotalHits().value); + try { + assertEquals(1, searchPhaseResult.queryResult().getTotalHits().value); + } finally { + searchPhaseResult.decRef(); + } } public void testWaitOnRefreshFailsWithRefreshesDisabled() { @@ -1907,7 +1925,7 @@ public void testDfsQueryPhaseRewrite() { plainActionFuture ); - plainActionFuture.actionGet(); + plainActionFuture.actionGet().decRef(); assertThat(((TestRewriteCounterQueryBuilder) request.source().query()).asyncRewriteCount, equalTo(1)); final ShardSearchContextId contextId = context.id(); assertTrue(service.freeReaderContext(contextId)); @@ -2068,114 +2086,37 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { try (ReaderContext readerContext = createReaderContext(indexService, indexShard)) { SearchShardTask task = new SearchShardTask(0, "type", "action", "description", null, emptyMap()); { - SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, true); - ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); - - final int maxPoolSize = executor.getMaximumPoolSize(); - assertEquals( - "Sanity check to ensure this isn't the default of 1 when pool size is unset", - configuredMaxPoolSize, - maxPoolSize - ); + try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, true)) { + ContextIndexSearcher searcher = searchContext.searcher(); + assertNotNull(searcher.getExecutor()); - final int expectedSlices = ContextIndexSearcher.computeSlices(searcher.getIndexReader().leaves(), maxPoolSize, 1).length; - assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); - - final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); - assertBusy( - () -> assertEquals( - "DFS supports parallel collection, so the number of slices should be > 1.", - expectedSlices, - executor.getCompletedTaskCount() - priorExecutorTaskCount - ) - ); - } - { - SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true); - ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); - - final int maxPoolSize = executor.getMaximumPoolSize(); - assertEquals( - "Sanity check to ensure this isn't the default of 1 when pool size is unset", - configuredMaxPoolSize, - maxPoolSize - ); + final int maxPoolSize = executor.getMaximumPoolSize(); + assertEquals( + "Sanity check to ensure this isn't the default of 1 when pool size is unset", + configuredMaxPoolSize, + maxPoolSize + ); - final int expectedSlices = ContextIndexSearcher.computeSlices(searcher.getIndexReader().leaves(), maxPoolSize, 1).length; - assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); - - final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); - assertBusy( - () -> assertEquals( - "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", - expectedSlices, - executor.getCompletedTaskCount() - priorExecutorTaskCount - ) - ); - } - { - SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.FETCH, true); - ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); - final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); - assertBusy( - () -> assertEquals( - "The number of slices should be 1 as FETCH does not support parallel collection.", - 1, - executor.getCompletedTaskCount() - priorExecutorTaskCount - ) - ); - } - { - SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.NONE, true); - ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); - final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); - assertBusy( - () -> assertEquals( - "The number of slices should be 1 as NONE does not support parallel collection.", - 1, - executor.getCompletedTaskCount() - priorExecutorTaskCount - ) - ); - } + final int expectedSlices = ContextIndexSearcher.computeSlices( + searcher.getIndexReader().leaves(), + maxPoolSize, + 1 + ).length; + assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); - try { - ClusterUpdateSettingsResponse response = client().admin() - .cluster() - .prepareUpdateSettings() - .setPersistentSettings(Settings.builder().put(QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.getKey(), false).build()) - .get(); - assertTrue(response.isAcknowledged()); - { - SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true); - ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( () -> assertEquals( - "The number of slices should be 1 when QUERY parallel collection is disabled.", - 1, + "DFS supports parallel collection, so the number of slices should be > 1.", + expectedSlices, executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); } - } finally { - // Reset to the original default setting and check to ensure it takes effect. - client().admin() - .cluster() - .prepareUpdateSettings() - .setPersistentSettings(Settings.builder().putNull(QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.getKey()).build()) - .get(); - { - SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true); + } + { + try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); assertNotNull(searcher.getExecutor()); @@ -2204,6 +2145,97 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { ); } } + { + try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.FETCH, true)) { + ContextIndexSearcher searcher = searchContext.searcher(); + assertNotNull(searcher.getExecutor()); + final long priorExecutorTaskCount = executor.getCompletedTaskCount(); + searcher.search(termQuery, new TotalHitCountCollectorManager()); + assertBusy( + () -> assertEquals( + "The number of slices should be 1 as FETCH does not support parallel collection.", + 1, + executor.getCompletedTaskCount() - priorExecutorTaskCount + ) + ); + } + } + { + try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.NONE, true)) { + ContextIndexSearcher searcher = searchContext.searcher(); + assertNotNull(searcher.getExecutor()); + final long priorExecutorTaskCount = executor.getCompletedTaskCount(); + searcher.search(termQuery, new TotalHitCountCollectorManager()); + assertBusy( + () -> assertEquals( + "The number of slices should be 1 as NONE does not support parallel collection.", + 1, + executor.getCompletedTaskCount() - priorExecutorTaskCount + ) + ); + } + } + + try { + ClusterUpdateSettingsResponse response = client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put(QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.getKey(), false).build()) + .get(); + assertTrue(response.isAcknowledged()); + { + try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { + ContextIndexSearcher searcher = searchContext.searcher(); + assertNotNull(searcher.getExecutor()); + final long priorExecutorTaskCount = executor.getCompletedTaskCount(); + searcher.search(termQuery, new TotalHitCountCollectorManager()); + assertBusy( + () -> assertEquals( + "The number of slices should be 1 when QUERY parallel collection is disabled.", + 1, + executor.getCompletedTaskCount() - priorExecutorTaskCount + ) + ); + } + } + } finally { + // Reset to the original default setting and check to ensure it takes effect. + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().putNull(QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.getKey()).build()) + .get(); + { + try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { + ContextIndexSearcher searcher = searchContext.searcher(); + assertNotNull(searcher.getExecutor()); + + final int maxPoolSize = executor.getMaximumPoolSize(); + assertEquals( + "Sanity check to ensure this isn't the default of 1 when pool size is unset", + configuredMaxPoolSize, + maxPoolSize + ); + + final int expectedSlices = ContextIndexSearcher.computeSlices( + searcher.getIndexReader().leaves(), + maxPoolSize, + 1 + ).length; + assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); + + final long priorExecutorTaskCount = executor.getCompletedTaskCount(); + searcher.search(termQuery, new TotalHitCountCollectorManager()); + assertBusy( + () -> assertEquals( + "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", + expectedSlices, + executor.getCompletedTaskCount() - priorExecutorTaskCount + ) + ); + } + } + } } } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 9569bd982363e..065a8bb22ab68 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -1065,7 +1065,7 @@ public T search(Query query, CollectorManager col } }; - SearchContext context = new TestSearchContext(null, indexShard, searcher) { + try (SearchContext context = new TestSearchContext(null, indexShard, searcher) { @Override public Query buildFilteredQuery(Query query) { return query; @@ -1075,37 +1075,38 @@ public Query buildFilteredQuery(Query query) { public ReaderContext readerContext() { return new ReaderContext(new ShardSearchContextId("test", 1L), null, indexShard, null, 0L, false); } - }; + }) { - List queries = List.of(new TermQuery(new Term("field0", "term")), new TermQuery(new Term("field1", "term0"))); - context.parsedQuery( - new ParsedQuery(new BooleanQuery.Builder().add(queries.get(0), Occur.SHOULD).add(queries.get(1), Occur.SHOULD).build()) - ); - context.rankShardContext(new RankShardContext(queries, 0, 100) { - @Override - public RankShardResult combine(List rankResults) { - return null; - } - }); - - context.trackTotalHitsUpTo(SearchContext.TRACK_TOTAL_HITS_DISABLED); - context.aggregations(null); - QueryPhase.executeRank(context); - assertEquals(queries, executed); - - executed.clear(); - context.trackTotalHitsUpTo(100); - context.aggregations(null); - QueryPhase.executeRank(context); - assertEquals(context.rewrittenQuery(), executed.get(0)); - assertEquals(queries, executed.subList(1, executed.size())); - - executed.clear(); - context.trackTotalHitsUpTo(SearchContext.TRACK_TOTAL_HITS_DISABLED); - context.aggregations(new SearchContextAggregations(AggregatorFactories.EMPTY, () -> null)); - QueryPhase.executeRank(context); - assertEquals(context.rewrittenQuery(), executed.get(0)); - assertEquals(queries, executed.subList(1, executed.size())); + List queries = List.of(new TermQuery(new Term("field0", "term")), new TermQuery(new Term("field1", "term0"))); + context.parsedQuery( + new ParsedQuery(new BooleanQuery.Builder().add(queries.get(0), Occur.SHOULD).add(queries.get(1), Occur.SHOULD).build()) + ); + context.rankShardContext(new RankShardContext(queries, 0, 100) { + @Override + public RankShardResult combine(List rankResults) { + return null; + } + }); + + context.trackTotalHitsUpTo(SearchContext.TRACK_TOTAL_HITS_DISABLED); + context.aggregations(null); + QueryPhase.executeRank(context); + assertEquals(queries, executed); + + executed.clear(); + context.trackTotalHitsUpTo(100); + context.aggregations(null); + QueryPhase.executeRank(context); + assertEquals(context.rewrittenQuery(), executed.get(0)); + assertEquals(queries, executed.subList(1, executed.size())); + + executed.clear(); + context.trackTotalHitsUpTo(SearchContext.TRACK_TOTAL_HITS_DISABLED); + context.aggregations(new SearchContextAggregations(AggregatorFactories.EMPTY, () -> null)); + QueryPhase.executeRank(context); + assertEquals(context.rewrittenQuery(), executed.get(0)); + assertEquals(queries, executed.subList(1, executed.size())); + } } private static final QueryCachingPolicy NEVER_CACHE_POLICY = new QueryCachingPolicy() { diff --git a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java index c728bed5ed7bb..516ffeb9418bd 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java @@ -97,28 +97,36 @@ private static QuerySearchResult createTestInstance() throws Exception { public void testSerialization() throws Exception { QuerySearchResult querySearchResult = createTestInstance(); - boolean delayed = randomBoolean(); - QuerySearchResult deserialized = copyWriteable( - querySearchResult, - namedWriteableRegistry, - delayed ? in -> new QuerySearchResult(in, true) : QuerySearchResult::new, - TransportVersion.current() - ); - assertEquals(querySearchResult.getContextId().getId(), deserialized.getContextId().getId()); - assertNull(deserialized.getSearchShardTarget()); - assertEquals(querySearchResult.topDocs().maxScore, deserialized.topDocs().maxScore, 0f); - assertEquals(querySearchResult.topDocs().topDocs.totalHits, deserialized.topDocs().topDocs.totalHits); - assertEquals(querySearchResult.from(), deserialized.from()); - assertEquals(querySearchResult.size(), deserialized.size()); - assertEquals(querySearchResult.hasAggs(), deserialized.hasAggs()); - if (deserialized.hasAggs()) { - assertThat(deserialized.aggregations().isSerialized(), is(delayed)); - Aggregations aggs = querySearchResult.consumeAggs(); - Aggregations deserializedAggs = deserialized.consumeAggs(); - assertEquals(aggs.asList(), deserializedAggs.asList()); - assertThat(deserialized.aggregations(), is(nullValue())); + try { + boolean delayed = randomBoolean(); + QuerySearchResult deserialized = copyWriteable( + querySearchResult, + namedWriteableRegistry, + delayed ? in -> new QuerySearchResult(in, true) : QuerySearchResult::new, + TransportVersion.current() + ); + try { + assertEquals(querySearchResult.getContextId().getId(), deserialized.getContextId().getId()); + assertNull(deserialized.getSearchShardTarget()); + assertEquals(querySearchResult.topDocs().maxScore, deserialized.topDocs().maxScore, 0f); + assertEquals(querySearchResult.topDocs().topDocs.totalHits, deserialized.topDocs().topDocs.totalHits); + assertEquals(querySearchResult.from(), deserialized.from()); + assertEquals(querySearchResult.size(), deserialized.size()); + assertEquals(querySearchResult.hasAggs(), deserialized.hasAggs()); + if (deserialized.hasAggs()) { + assertThat(deserialized.aggregations().isSerialized(), is(delayed)); + Aggregations aggs = querySearchResult.consumeAggs(); + Aggregations deserializedAggs = deserialized.consumeAggs(); + assertEquals(aggs.asList(), deserializedAggs.asList()); + assertThat(deserialized.aggregations(), is(nullValue())); + } + assertEquals(querySearchResult.terminatedEarly(), deserialized.terminatedEarly()); + } finally { + deserialized.decRef(); + } + } finally { + querySearchResult.decRef(); } - assertEquals(querySearchResult.terminatedEarly(), deserialized.terminatedEarly()); } public void testNullResponse() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index aa4e071ba17fa..e87184a38a776 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -2204,7 +2204,7 @@ public void start(ClusterState initialState) { LeaderHeartbeatService.NO_OP, StatefulPreVoteCollector::new, CompatibilityVersionsUtils.staticCurrent(), - Set.of() + new FeatureService(List.of()) ); masterService.setClusterStatePublisher(coordinator); coordinator.start(); diff --git a/settings.gradle b/settings.gradle index 09aaef7ede189..74315c6516653 100644 --- a/settings.gradle +++ b/settings.gradle @@ -106,7 +106,8 @@ List projects = [ 'test:logger-usage', 'test:test-clusters', 'test:x-content', - 'test:yaml-rest-runner' + 'test:yaml-rest-runner', + 'test:metadata-extractor' ] /** diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index a5c17b32173a5..611f2ab9f5749 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -68,6 +68,7 @@ import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.SeedHostsProvider; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.ClusterStateUpdaters; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.gateway.MockGatewayMetaState; @@ -952,6 +953,7 @@ public final class ClusterNode { private AckedFakeThreadPoolMasterService masterService; private DisruptableClusterApplierService clusterApplierService; private ClusterService clusterService; + private FeatureService featureService; TransportService transportService; private MasterHistoryService masterHistoryService; CoordinationDiagnosticsService coordinationDiagnosticsService; @@ -1114,6 +1116,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { threadPool ); clusterService = new ClusterService(settings, clusterSettings, masterService, clusterApplierService); + featureService = new FeatureService(List.of()); masterHistoryService = new MasterHistoryService(transportService, threadPool, clusterService); clusterService.setNodeConnectionsService( new NodeConnectionsService(clusterService.getSettings(), threadPool, transportService) @@ -1152,7 +1155,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { coordinationServices.getLeaderHeartbeatService(), coordinationServices.getPreVoteCollectorFactory(), CompatibilityVersionsUtils.staticCurrent(), - Set.of() + featureService ); coordinationDiagnosticsService = new CoordinationDiagnosticsService( clusterService, diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 825325c00a70b..e73cd086bc019 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -329,7 +329,10 @@ public static void getClusterStateWithDataStreams( ) { builder.put( "template_1", - new ComposableIndexTemplate(List.of("*"), null, null, null, null, null, new ComposableIndexTemplate.DataStreamTemplate()) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); List allIndices = new ArrayList<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java index ebc5ca4cd0fd3..c1c4d70e0b906 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -132,7 +132,12 @@ protected SearchContext createContext( boolean includeAggregations ) throws IOException { SearchContext searchContext = super.createContext(readerContext, request, task, resultsType, includeAggregations); - onCreateSearchContext.accept(searchContext); + try { + onCreateSearchContext.accept(searchContext); + } catch (Exception e) { + searchContext.close(); + throw e; + } return searchContext; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 10eaf322f9504..fdc7647ce7bdd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -330,11 +330,14 @@ public static void assertSearchHit(SearchResponse searchResponse, int number, Ma assertThat(searchResponse.getHits().getAt(number - 1), matcher); } - public static void assertNoFailures(SearchRequestBuilder searchRequestBuilder) { + public static void assertNoFailures(ActionRequestBuilder searchRequestBuilder) { assertNoFailuresAndResponse(searchRequestBuilder, r -> {}); } - public static void assertNoFailuresAndResponse(SearchRequestBuilder searchRequestBuilder, Consumer consumer) { + public static void assertNoFailuresAndResponse( + ActionRequestBuilder searchRequestBuilder, + Consumer consumer + ) { assertResponse(searchRequestBuilder, res -> { assertNoFailures(res); consumer.accept(res); @@ -352,7 +355,7 @@ public static void assertNoFailuresAndResponse(ActionFuture resp } } - public static void assertResponse(SearchRequestBuilder searchRequestBuilder, Consumer consumer) { + public static void assertResponse(ActionRequestBuilder searchRequestBuilder, Consumer consumer) { var res = searchRequestBuilder.get(); try { consumer.accept(res); @@ -372,7 +375,7 @@ public static void assertResponse(ActionFuture responseFuture, C } public static void assertCheckedResponse( - SearchRequestBuilder searchRequestBuilder, + ActionRequestBuilder searchRequestBuilder, CheckedConsumer consumer ) throws IOException { var res = searchRequestBuilder.get(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 9566456a041bc..aff8a20aa88b6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -52,9 +52,11 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.node.selection.HealthNode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; @@ -75,6 +77,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.io.UncheckedIOException; import java.nio.CharBuffer; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -90,6 +93,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; +import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; @@ -132,6 +136,8 @@ public abstract class ESRestTestCase extends ESTestCase { public static final String CLIENT_SOCKET_TIMEOUT = "client.socket.timeout"; public static final String CLIENT_PATH_PREFIX = "client.path.prefix"; + private static Map historicalFeatures; + /** * Convert the entity from a {@link Response} into a map of maps. */ @@ -1671,23 +1677,20 @@ private static void updateIndexSettings(String index, Settings settings) throws client().performRequest(request); } - protected static void expectSoftDeletesWarning(Request request, String indexName) { - final List expectedWarnings = List.of( + protected static void expectSoftDeletesWarning(Request request, String indexName) throws IOException { + final String expectedWarning = "Creating indices with soft-deletes disabled is deprecated and will be removed in future Elasticsearch versions. " + "Please do not specify value for setting [index.soft_deletes.enabled] of index [" + indexName - + "]." - ); - if (nodeVersions.stream().allMatch(version -> version.onOrAfter(Version.V_7_6_0))) { - request.setOptions( - RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> warnings.equals(expectedWarnings) == false) - ); - } else if (nodeVersions.stream().anyMatch(version -> version.onOrAfter(Version.V_7_6_0))) { - request.setOptions( - RequestOptions.DEFAULT.toBuilder() - .setWarningsHandler(warnings -> warnings.isEmpty() == false && warnings.equals(expectedWarnings) == false) - ); - } + + "]."; + + final var softDeleteDisabledDeprecated = minimumIndexVersion().onOrAfter(IndexVersions.V_7_6_0); + request.setOptions(expectVersionSpecificWarnings(v -> { + if (softDeleteDisabledDeprecated) { + v.current(expectedWarning); + } + v.compatible(expectedWarning); + })); } protected static Map getIndexSettings(String index) throws IOException { @@ -1987,7 +1990,7 @@ public void assertEmptyTranslog(String index) throws Exception { * that we have renewed every PRRL to the global checkpoint of the corresponding copy and properly synced to all copies. */ public void ensurePeerRecoveryRetentionLeasesRenewedAndSynced(String index) throws Exception { - boolean mustHavePRRLs = minimumNodeVersion().onOrAfter(Version.V_7_6_0); + boolean mustHavePRRLs = minimumIndexVersion().onOrAfter(IndexVersions.V_7_6_0); assertBusy(() -> { Map stats = entityAsMap(client().performRequest(new Request("GET", index + "/_stats?level=shards"))); @SuppressWarnings("unchecked") @@ -2213,4 +2216,31 @@ private static boolean isMlEnabled() { } } + protected Map getHistoricalFeatures() { + if (historicalFeatures == null) { + Map historicalFeaturesMap = new HashMap<>(); + String metadataPath = System.getProperty("tests.features.metadata.path"); + if (metadataPath == null) { + throw new UnsupportedOperationException("Historical features information is unavailable when using legacy test plugins."); + } + + String[] metadataFiles = metadataPath.split(System.getProperty("path.separator")); + for (String metadataFile : metadataFiles) { + try ( + InputStream in = Files.newInputStream(PathUtils.get(metadataFile)); + XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, in) + ) { + for (Map.Entry entry : parser.mapStrings().entrySet()) { + historicalFeaturesMap.put(new NodeFeature(entry.getKey()), Version.fromString(entry.getValue())); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + historicalFeatures = Collections.unmodifiableMap(historicalFeaturesMap); + } + + return historicalFeatures; + } } diff --git a/test/metadata-extractor/build.gradle b/test/metadata-extractor/build.gradle new file mode 100644 index 0000000000000..8d720dab2dbc2 --- /dev/null +++ b/test/metadata-extractor/build.gradle @@ -0,0 +1,8 @@ +plugins { + id 'elasticsearch.java' +} + +dependencies { + implementation project(':server') + testImplementation project(':test:framework') +} diff --git a/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java b/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java new file mode 100644 index 0000000000000..c969b09ea982d --- /dev/null +++ b/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.extractor.features; + +import org.elasticsearch.Version; +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.xcontent.XContentGenerator; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.io.OutputStream; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.InvalidPathException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.util.HashMap; +import java.util.Map; +import java.util.ServiceLoader; + +public class HistoricalFeaturesMetadataExtractor { + private final ClassLoader classLoader; + + public HistoricalFeaturesMetadataExtractor(ClassLoader classLoader) { + this.classLoader = classLoader; + } + + public static void main(String[] args) { + if (args.length != 1) { + printUsageAndExit(); + } + + Path outputFile = null; + try { + outputFile = Paths.get(args[0]); + } catch (InvalidPathException e) { + printUsageAndExit(); + } + + new HistoricalFeaturesMetadataExtractor(HistoricalFeaturesMetadataExtractor.class.getClassLoader()).generateMetadataFile( + outputFile + ); + } + + public void generateMetadataFile(Path outputFile) { + try ( + OutputStream os = Files.newOutputStream(outputFile, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE); + XContentGenerator generator = JsonXContent.jsonXContent.createGenerator(os) + ) { + generator.writeStartObject(); + for (Map.Entry entry : extractHistoricalFeatureMetadata().entrySet()) { + generator.writeStringField(entry.getKey().id(), entry.getValue().toString()); + } + generator.writeEndObject(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + public Map extractHistoricalFeatureMetadata() { + Map historicalFeatures = new HashMap<>(); + ServiceLoader featureSpecLoader = ServiceLoader.load(FeatureSpecification.class, classLoader); + for (FeatureSpecification featureSpecification : featureSpecLoader) { + historicalFeatures.putAll(featureSpecification.getHistoricalFeatures()); + } + + return historicalFeatures; + } + + private static void printUsageAndExit() { + System.err.println("Usage: HistoricalFeaturesMetadataExtractor "); + System.exit(1); + } +} diff --git a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java new file mode 100644 index 0000000000000..ba80decd046e6 --- /dev/null +++ b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.extractor.features; + +import org.elasticsearch.Version; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Rule; +import org.junit.rules.TemporaryFolder; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; + +import static org.elasticsearch.xcontent.XContentParserConfiguration.EMPTY; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.not; + +public class HistoricalFeaturesMetadataExtractorTests extends ESTestCase { + + @Rule + public TemporaryFolder temporaryFolder = new TemporaryFolder(); + + public void testExtractHistoricalMetadata() throws IOException { + HistoricalFeaturesMetadataExtractor extractor = new HistoricalFeaturesMetadataExtractor(this.getClass().getClassLoader()); + Map nodeFeatureVersionMap = extractor.extractHistoricalFeatureMetadata(); + assertThat(nodeFeatureVersionMap, not(anEmptyMap())); + + Path outputFile = temporaryFolder.newFile().toPath(); + extractor.generateMetadataFile(outputFile); + try (XContentParser parser = JsonXContent.jsonXContent.createParser(EMPTY, Files.newInputStream(outputFile))) { + Map parsedMap = parser.mapStrings(); + for (Map.Entry entry : nodeFeatureVersionMap.entrySet()) { + assertThat(parsedMap, hasEntry(entry.getKey().id(), entry.getValue().toString())); + } + } + } +} diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/ResourceUtils.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/ResourceUtils.java index b9a6edfb958f3..1e6a9a9998a82 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/ResourceUtils.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/ResourceUtils.java @@ -13,15 +13,20 @@ import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; +import java.util.Map; public class ResourceUtils { public static final String APM_TEMPLATE_VERSION_VARIABLE = "xpack.apmdata.template.version"; static byte[] loadVersionedResourceUTF8(String name, int version) { + return loadVersionedResourceUTF8(name, version, Map.of()); + } + + static byte[] loadVersionedResourceUTF8(String name, int version, Map variables) { try { String content = loadResource(name); - content = TemplateUtils.replaceVariable(content, APM_TEMPLATE_VERSION_VARIABLE, String.valueOf(version)); + content = TemplateUtils.replaceVariables(content, String.valueOf(version), APM_TEMPLATE_VERSION_VARIABLE, variables); return content.getBytes(StandardCharsets.UTF_8); } catch (IOException e) { throw new RuntimeException(e); diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/YamlIngestPipelineConfig.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/YamlIngestPipelineConfig.java index 938fd69f80abe..de1b715dd138d 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/YamlIngestPipelineConfig.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/YamlIngestPipelineConfig.java @@ -31,6 +31,6 @@ public XContentType getXContentType() { @Override public BytesReference loadConfig() { - return new BytesArray(loadVersionedResourceUTF8("/ingest-pipelines/" + id + ".yaml", version)); + return new BytesArray(loadVersionedResourceUTF8("/ingest-pipelines/" + id + ".yaml", version, variables)); } } diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java index 6dc940d191685..9ceac97bce384 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java @@ -1238,7 +1238,6 @@ public void testRemoteClusterOnlyCCSWithFailuresOnAllShards() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100957") public void testCancelViaTasksAPI() throws Exception { Map testClusterInfo = setupTwoClusters(); String localIndex = (String) testClusterInfo.get("local.index"); @@ -1324,13 +1323,11 @@ public void testCancelViaTasksAPI() throws Exception { assertTrue(searchResponseAfterCancellation.isRunning()); assertFalse(searchResponseAfterCancellation.getSearchResponse().isTimedOut()); assertThat(searchResponseAfterCancellation.getSearchResponse().getClusters().getTotal(), equalTo(2)); - assertThat(searchResponseAfterCancellation.getSearchResponse().getFailedShards(), equalTo(0)); AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); assertTrue(statusResponse.isPartial()); assertTrue(statusResponse.isRunning()); assertThat(statusResponse.getClusters().getTotal(), equalTo(2)); - assertThat(statusResponse.getFailedShards(), equalTo(0)); assertNull(statusResponse.getCompletionStatus()); } finally { @@ -1345,7 +1342,6 @@ public void testCancelViaTasksAPI() throws Exception { assertTrue(statusResponseAfterCompletion.isPartial()); assertFalse(statusResponseAfterCompletion.isRunning()); assertThat(statusResponseAfterCompletion.getClusters().getTotal(), equalTo(2)); - assertThat(statusResponseAfterCompletion.getFailedShards(), greaterThan(0)); assertThat(statusResponseAfterCompletion.getCompletionStatus(), equalTo(RestStatus.BAD_REQUEST)); AsyncSearchResponse searchResponseAfterCompletion = getAsyncSearch(response.getId()); @@ -1353,11 +1349,8 @@ public void testCancelViaTasksAPI() throws Exception { assertFalse(searchResponseAfterCompletion.isRunning()); assertFalse(searchResponseAfterCompletion.getSearchResponse().isTimedOut()); assertThat(searchResponseAfterCompletion.getSearchResponse().getClusters().getTotal(), equalTo(2)); - assertThat(searchResponseAfterCompletion.getSearchResponse().getFailedShards(), greaterThan(0)); Throwable cause = ExceptionsHelper.unwrap(searchResponseAfterCompletion.getFailure(), TaskCancelledException.class); assertNotNull("TaskCancelledException should be in the causal chain", cause); - ShardSearchFailure[] shardFailures = searchResponseAfterCompletion.getSearchResponse().getShardFailures(); - assertThat(shardFailures.length, greaterThan(0)); String json = Strings.toString( ChunkedToXContent.wrapAsToXContent(searchResponseAfterCompletion) .toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS) diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java index f6cf0bf6583b4..6368f6a108530 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.SearchService; -import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportService; @@ -37,16 +36,13 @@ import org.elasticsearch.xpack.core.search.action.SubmitAsyncSearchRequest; import java.util.Map; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; public class TransportSubmitAsyncSearchAction extends HandledTransportAction { private final ClusterService clusterService; private final NodeClient nodeClient; - private final BiFunction, SearchRequest, AggregationReduceContext> requestToAggReduceContextBuilder; + private final SearchService searchService; private final TransportSearchAction searchAction; private final ThreadContext threadContext; private final AsyncTaskIndexService store; @@ -72,10 +68,7 @@ public TransportSubmitAsyncSearchAction( ); this.clusterService = clusterService; this.nodeClient = nodeClient; - this.requestToAggReduceContextBuilder = (task, request) -> searchService.aggReduceContextBuilder( - task, - request.source().aggregations() - ).forFinalReduction(); + this.searchService = searchService; this.searchAction = searchAction; this.threadContext = transportService.getThreadPool().getThreadContext(); this.store = new AsyncTaskIndexService<>( @@ -162,12 +155,11 @@ private SearchRequest createSearchRequest(SubmitAsyncSearchRequest request, Task nodeClient.threadPool().getThreadContext(), clusterService.state() ); - SearchRequest searchRequest = new SearchRequest(request.getSearchRequest()) { + var originalSearchRequest = request.getSearchRequest(); + SearchRequest searchRequest = new SearchRequest(originalSearchRequest) { @Override public AsyncSearchTask createTask(long id, String type, String action, TaskId parentTaskId, Map taskHeaders) { AsyncExecutionId searchId = new AsyncExecutionId(docID, new TaskId(nodeClient.getLocalNodeId(), id)); - Function, Supplier> aggReduceContextSupplierFactory = - isCancelled -> () -> requestToAggReduceContextBuilder.apply(isCancelled, request.getSearchRequest()); return new AsyncSearchTask( id, type, @@ -180,7 +172,8 @@ public AsyncSearchTask createTask(long id, String type, String action, TaskId pa searchId, store.getClientWithOrigin(), nodeClient.threadPool(), - aggReduceContextSupplierFactory + isCancelled -> () -> searchService.aggReduceContextBuilder(isCancelled, originalSearchRequest.source().aggregations()) + .forFinalReduction() ); } }; diff --git a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageIT.java b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageIT.java index c214cc6006eee..2cbe06000422c 100644 --- a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageIT.java +++ b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageIT.java @@ -123,16 +123,11 @@ private static void createDataStreamAndTemplate(String dataStreamName) throws IO client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request(dataStreamName + "_template").indexTemplate( - new ComposableIndexTemplate( - Collections.singletonList(dataStreamName), - new Template(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build(), null, null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList(dataStreamName)) + .template(new Template(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build(), null, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ) ).actionGet(); client().execute(CreateDataStreamAction.INSTANCE, new CreateDataStreamAction.Request(dataStreamName)).actionGet(); diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java index 5031a52630033..f97a1d0754e6c 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java @@ -638,23 +638,20 @@ public void testAutoFollowDatastreamWithClosingFollowerIndex() throws Exception final String datastream = "logs-1"; PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request("template-id"); request.indexTemplate( - new ComposableIndexTemplate( - List.of("logs-*"), - new Template( - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .build(), - null, - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs-*")) + .template( + new Template( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .build(), + null, + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); assertAcked(leaderClient().execute(PutComposableIndexTemplateAction.INSTANCE, request).get()); diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java index f21bfc07deba2..88482eabafed5 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java @@ -1754,7 +1754,7 @@ private String getIndexSettingsWithNestedMapping( private void putFollowerTemplate(String setting, String settingValue) { Template template = new Template(Settings.builder().put(setting, settingValue).build(), null, null); - ComposableIndexTemplate cit = new ComposableIndexTemplate(List.of("follower"), template, null, null, null, null); + ComposableIndexTemplate cit = ComposableIndexTemplate.builder().indexPatterns(List.of("follower")).template(template).build(); assertAcked( followerClient().execute( PutComposableIndexTemplateAction.INSTANCE, diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java index 6421b70f9e453..20231af156ee1 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierAllocationDeciderIT.java @@ -26,9 +26,9 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xpack.core.DataTiersFeatureSetUsage; import org.elasticsearch.xpack.core.action.XPackUsageRequestBuilder; import org.elasticsearch.xpack.core.action.XPackUsageResponse; +import org.elasticsearch.xpack.core.datatiers.DataTiersFeatureSetUsage; import org.junit.Before; import java.util.ArrayList; diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/DataTiersUsageRestCancellationIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/DataTiersUsageRestCancellationIT.java index f669bb8589fd7..faeb760b3c181 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/DataTiersUsageRestCancellationIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/DataTiersUsageRestCancellationIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.rest.action; import org.apache.http.client.methods.HttpGet; -import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.PlainActionFuture; @@ -35,6 +34,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageResponse; +import org.elasticsearch.xpack.core.datatiers.NodesDataTiersUsageTransportAction; import java.nio.file.Path; import java.util.Arrays; @@ -76,7 +76,7 @@ public void testCancellation() throws Exception { final SubscribableListener nodeStatsRequestsReleaseListener = new SubscribableListener<>(); for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { ((MockTransportService) transportService).addRequestHandlingBehavior( - TransportNodesStatsAction.TYPE.name() + "[n]", + NodesDataTiersUsageTransportAction.TYPE.name() + "[n]", (handler, request, channel, task) -> { tasksBlockedLatch.countDown(); nodeStatsRequestsReleaseListener.addListener( @@ -94,14 +94,13 @@ public void testCancellation() throws Exception { safeAwait(tasksBlockedLatch); // must wait for the node-level tasks to start to avoid cancelling being handled earlier cancellable.cancel(); - // NB this test works by blocking node-level stats requests; when #100230 is addressed this will need to target a different action. - assertAllCancellableTasksAreCancelled(TransportNodesStatsAction.TYPE.name()); + assertAllCancellableTasksAreCancelled(NodesDataTiersUsageTransportAction.TYPE.name()); assertAllCancellableTasksAreCancelled(XPackUsageAction.NAME); nodeStatsRequestsReleaseListener.onResponse(null); expectThrows(CancellationException.class, future::actionGet); - assertAllTasksHaveFinished(TransportNodesStatsAction.TYPE.name()); + assertAllTasksHaveFinished(NodesDataTiersUsageTransportAction.TYPE.name()); assertAllTasksHaveFinished(XPackUsageAction.NAME); } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/template/RolloverEnabledTestTemplateRegistry.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/template/RolloverEnabledTestTemplateRegistry.java index 3f2856472a182..819b0e01ac4de 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/template/RolloverEnabledTestTemplateRegistry.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/template/RolloverEnabledTestTemplateRegistry.java @@ -43,15 +43,12 @@ protected String getOrigin() { protected Map getComposableTemplateConfigs() { return Map.of( TEST_INDEX_TEMPLATE_ID, - new ComposableIndexTemplate( - List.of(TEST_INDEX_PATTERN), - null, - null, - 100L, - version, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of(TEST_INDEX_PATTERN)) + .priority(100L) + .version(version) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); } diff --git a/x-pack/plugin/core/src/main/java/module-info.java b/x-pack/plugin/core/src/main/java/module-info.java index deb3c4384a04b..c4c978f656d21 100644 --- a/x-pack/plugin/core/src/main/java/module-info.java +++ b/x-pack/plugin/core/src/main/java/module-info.java @@ -57,6 +57,7 @@ exports org.elasticsearch.xpack.core.common.validation; exports org.elasticsearch.xpack.core.common; exports org.elasticsearch.xpack.core.datastreams; + exports org.elasticsearch.xpack.core.datatiers; exports org.elasticsearch.xpack.core.deprecation; exports org.elasticsearch.xpack.core.downsample; exports org.elasticsearch.xpack.core.enrich.action; @@ -226,4 +227,6 @@ with org.elasticsearch.xpack.core.ml.MlConfigVersionComponent, org.elasticsearch.xpack.core.transform.TransformConfigVersionComponent; + + provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.xpack.core.XPackFeatures; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/DataTiersUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/DataTiersUsageTransportAction.java deleted file mode 100644 index 295df1ea51b6b..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/DataTiersUsageTransportAction.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; -import org.elasticsearch.action.admin.indices.stats.IndexShardStats; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.internal.ParentTaskAssigningClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.cluster.routing.RoutingNodes; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.allocation.DataTier; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.store.StoreStats; -import org.elasticsearch.protocol.xpack.XPackUsageRequest; -import org.elasticsearch.search.aggregations.metrics.TDigestState; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; -import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; -import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.StreamSupport; - -public class DataTiersUsageTransportAction extends XPackUsageFeatureTransportAction { - - private final Client client; - - @Inject - public DataTiersUsageTransportAction( - TransportService transportService, - ClusterService clusterService, - ThreadPool threadPool, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - Client client - ) { - super( - XPackUsageFeatureAction.DATA_TIERS.name(), - transportService, - clusterService, - threadPool, - actionFilters, - indexNameExpressionResolver - ); - this.client = client; - } - - @Override - protected void masterOperation( - Task task, - XPackUsageRequest request, - ClusterState state, - ActionListener listener - ) { - new ParentTaskAssigningClient(client, clusterService.localNode(), task).admin() - .cluster() - .prepareNodesStats() - .all() - .setIndices(CommonStatsFlags.ALL) - .execute(listener.delegateFailureAndWrap((delegate, nodesStatsResponse) -> { - final RoutingNodes routingNodes = state.getRoutingNodes(); - final Map indices = state.getMetadata().getIndices(); - - // Determine which tiers each index would prefer to be within - Map indicesToTiers = tierIndices(indices); - - // Generate tier specific stats for the nodes and indices - Map tierSpecificStats = calculateStats( - nodesStatsResponse.getNodes(), - indicesToTiers, - routingNodes - ); - - delegate.onResponse(new XPackUsageFeatureResponse(new DataTiersFeatureSetUsage(tierSpecificStats))); - })); - } - - // Visible for testing - // Takes a registry of indices and returns a mapping of index name to which tier it most prefers. Always 1 to 1, some may filter out. - static Map tierIndices(Map indices) { - Map indexByTier = new HashMap<>(); - indices.entrySet().forEach(entry -> { - String tierPref = entry.getValue().getSettings().get(DataTier.TIER_PREFERENCE); - if (Strings.hasText(tierPref)) { - String[] tiers = tierPref.split(","); - if (tiers.length > 0) { - indexByTier.put(entry.getKey(), tiers[0]); - } - } - }); - return indexByTier; - } - - /** - * Accumulator to hold intermediate data tier stats before final calculation. - */ - private static class TierStatsAccumulator { - int nodeCount = 0; - Set indexNames = new HashSet<>(); - int totalShardCount = 0; - long totalByteCount = 0; - long docCount = 0; - int primaryShardCount = 0; - long primaryByteCount = 0L; - final TDigestState valueSketch = TDigestState.create(1000); - } - - // Visible for testing - static Map calculateStats( - List nodesStats, - Map indexByTier, - RoutingNodes routingNodes - ) { - Map statsAccumulators = new HashMap<>(); - for (NodeStats nodeStats : nodesStats) { - aggregateDataTierNodeCounts(nodeStats, statsAccumulators); - aggregateDataTierIndexStats(nodeStats, routingNodes, indexByTier, statsAccumulators); - } - Map results = new HashMap<>(); - for (Map.Entry entry : statsAccumulators.entrySet()) { - results.put(entry.getKey(), calculateFinalTierStats(entry.getValue())); - } - return results; - } - - /** - * Determine which data tiers this node belongs to (if any), and increment the node counts for those tiers. - */ - private static void aggregateDataTierNodeCounts(NodeStats nodeStats, Map tiersStats) { - nodeStats.getNode() - .getRoles() - .stream() - .map(DiscoveryNodeRole::roleName) - .filter(DataTier::validTierName) - .forEach(tier -> tiersStats.computeIfAbsent(tier, k -> new TierStatsAccumulator()).nodeCount++); - } - - /** - * Locate which indices are hosted on the node specified by the NodeStats, then group and aggregate the available index stats by tier. - */ - private static void aggregateDataTierIndexStats( - NodeStats nodeStats, - RoutingNodes routingNodes, - Map indexByTier, - Map accumulators - ) { - final RoutingNode node = routingNodes.node(nodeStats.getNode().getId()); - if (node != null) { - StreamSupport.stream(node.spliterator(), false) - .map(ShardRouting::index) - .distinct() - .forEach(index -> classifyIndexAndCollectStats(index, nodeStats, indexByTier, node, accumulators)); - } - } - - /** - * Determine which tier an index belongs in, then accumulate its stats into that tier's stats. - */ - private static void classifyIndexAndCollectStats( - Index index, - NodeStats nodeStats, - Map indexByTier, - RoutingNode node, - Map accumulators - ) { - // Look up which tier this index belongs to (its most preferred) - String indexTier = indexByTier.get(index.getName()); - if (indexTier != null) { - final TierStatsAccumulator accumulator = accumulators.computeIfAbsent(indexTier, k -> new TierStatsAccumulator()); - accumulator.indexNames.add(index.getName()); - aggregateDataTierShardStats(nodeStats, index, node, accumulator); - } - } - - /** - * Collect shard-level data tier stats from shard stats contained in the node stats response. - */ - private static void aggregateDataTierShardStats(NodeStats nodeStats, Index index, RoutingNode node, TierStatsAccumulator accumulator) { - // Shard based stats - final List allShardStats = nodeStats.getIndices().getShardStats(index); - if (allShardStats != null) { - for (IndexShardStats shardStat : allShardStats) { - accumulator.totalByteCount += shardStat.getTotal().getStore().totalDataSetSizeInBytes(); - accumulator.docCount += shardStat.getTotal().getDocs().getCount(); - - // Accumulate stats about started shards - ShardRouting shardRouting = node.getByShardId(shardStat.getShardId()); - if (shardRouting != null && shardRouting.state() == ShardRoutingState.STARTED) { - accumulator.totalShardCount += 1; - - // Accumulate stats about started primary shards - StoreStats primaryStoreStats = shardStat.getPrimary().getStore(); - if (primaryStoreStats != null) { - // if primaryStoreStats is null, it means there is no primary on the node in question - accumulator.primaryShardCount++; - long primarySize = primaryStoreStats.totalDataSetSizeInBytes(); - accumulator.primaryByteCount += primarySize; - accumulator.valueSketch.add(primarySize); - } - } - } - } - } - - private static DataTiersFeatureSetUsage.TierSpecificStats calculateFinalTierStats(TierStatsAccumulator accumulator) { - long primaryShardSizeMedian = (long) accumulator.valueSketch.quantile(0.5); - long primaryShardSizeMAD = computeMedianAbsoluteDeviation(accumulator.valueSketch); - return new DataTiersFeatureSetUsage.TierSpecificStats( - accumulator.nodeCount, - accumulator.indexNames.size(), - accumulator.totalShardCount, - accumulator.primaryShardCount, - accumulator.docCount, - accumulator.totalByteCount, - accumulator.primaryByteCount, - primaryShardSizeMedian, - primaryShardSizeMAD - ); - } - - // Visible for testing - static long computeMedianAbsoluteDeviation(TDigestState valuesSketch) { - if (valuesSketch.size() == 0) { - return 0; - } else { - final double approximateMedian = valuesSketch.quantile(0.5); - final TDigestState approximatedDeviationsSketch = TDigestState.createUsingParamsFrom(valuesSketch); - valuesSketch.centroids().forEach(centroid -> { - final double deviation = Math.abs(approximateMedian - centroid.mean()); - approximatedDeviationsSketch.add(deviation, centroid.count()); - }); - - return (long) approximatedDeviationsSketch.quantile(0.5); - } - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 6d019e50f9d5f..ac16631bacb73 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.datastreams.DataStreamFeatureSetUsage; import org.elasticsearch.xpack.core.datastreams.DataStreamLifecycleFeatureSetUsage; +import org.elasticsearch.xpack.core.datatiers.DataTiersFeatureSetUsage; import org.elasticsearch.xpack.core.downsample.DownsampleShardStatus; import org.elasticsearch.xpack.core.enrich.EnrichFeatureSetUsage; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java new file mode 100644 index 0000000000000..97934cbda09ab --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core; + +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.xpack.core.datatiers.NodesDataTiersUsageTransportAction; + +import java.util.Set; + +/** + * Provides the XPack features that this version of the code supports + */ +public class XPackFeatures implements FeatureSpecification { + + @Override + public Set getFeatures() { + return Set.of( + NodesDataTiersUsageTransportAction.LOCALLY_PRECALCULATED_STATS_FEATURE // Added in 8.12 + ); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index d02e3f43d80cb..66534cccff064 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -98,6 +98,9 @@ import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; +import org.elasticsearch.xpack.core.datatiers.DataTiersInfoTransportAction; +import org.elasticsearch.xpack.core.datatiers.DataTiersUsageTransportAction; +import org.elasticsearch.xpack.core.datatiers.NodesDataTiersUsageTransportAction; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.rest.action.RestXPackInfoAction; import org.elasticsearch.xpack.core.rest.action.RestXPackUsageAction; @@ -362,6 +365,7 @@ public Collection createComponents(PluginServices services) { actions.add(new ActionHandler<>(XPackUsageFeatureAction.DATA_STREAM_LIFECYCLE, DataStreamLifecycleUsageTransportAction.class)); actions.add(new ActionHandler<>(XPackUsageFeatureAction.HEALTH, HealthApiUsageTransportAction.class)); actions.add(new ActionHandler<>(XPackUsageFeatureAction.REMOTE_CLUSTERS, RemoteClusterUsageTransportAction.class)); + actions.add(new ActionHandler<>(NodesDataTiersUsageTransportAction.TYPE, NodesDataTiersUsageTransportAction.class)); return actions; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/DataTiersFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersFeatureSetUsage.java similarity index 98% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/DataTiersFeatureSetUsage.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersFeatureSetUsage.java index 0bf21f66b4888..f990118763bad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/DataTiersFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersFeatureSetUsage.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.core; +package org.elasticsearch.xpack.core.datatiers; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; @@ -16,6 +16,8 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.XPackFeatureSet; +import org.elasticsearch.xpack.core.XPackField; import java.io.IOException; import java.util.Collections; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/DataTiersInfoTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersInfoTransportAction.java similarity index 91% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/DataTiersInfoTransportAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersInfoTransportAction.java index 6134813dc4651..3af1945c53d3f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/DataTiersInfoTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersInfoTransportAction.java @@ -5,11 +5,12 @@ * 2.0. */ -package org.elasticsearch.xpack.core; +package org.elasticsearch.xpack.core.datatiers; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackInfoFeatureTransportAction; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersUsageTransportAction.java new file mode 100644 index 0000000000000..728309926302a --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersUsageTransportAction.java @@ -0,0 +1,255 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.datatiers; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.DataTier; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.indices.NodeIndicesStats; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.search.aggregations.metrics.TDigestState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +public class DataTiersUsageTransportAction extends XPackUsageFeatureTransportAction { + + private final Client client; + private final FeatureService featureService; + + @Inject + public DataTiersUsageTransportAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client, + FeatureService featureService + ) { + super( + XPackUsageFeatureAction.DATA_TIERS.name(), + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver + ); + this.client = client; + this.featureService = featureService; + } + + @Override + protected void masterOperation( + Task task, + XPackUsageRequest request, + ClusterState state, + ActionListener listener + ) { + if (featureService.clusterHasFeature(state, NodesDataTiersUsageTransportAction.LOCALLY_PRECALCULATED_STATS_FEATURE)) { + new ParentTaskAssigningClient(client, clusterService.localNode(), task).admin() + .cluster() + .execute( + NodesDataTiersUsageTransportAction.TYPE, + new NodesDataTiersUsageTransportAction.NodesRequest(), + listener.delegateFailureAndWrap((delegate, response) -> { + // Generate tier specific stats for the nodes and indices + delegate.onResponse( + new XPackUsageFeatureResponse( + new DataTiersFeatureSetUsage( + aggregateStats(response.getNodes(), getIndicesGroupedByTier(state, response.getNodes())) + ) + ) + ); + }) + ); + } else { + new ParentTaskAssigningClient(client, clusterService.localNode(), task).admin() + .cluster() + .prepareNodesStats() + .setIndices(new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store)) + .execute(listener.delegateFailureAndWrap((delegate, nodesStatsResponse) -> { + List response = nodesStatsResponse.getNodes() + .stream() + .map( + nodeStats -> new NodeDataTiersUsage(nodeStats.getNode(), precalculateLocalStatsFromNodeStats(nodeStats, state)) + ) + .toList(); + delegate.onResponse( + new XPackUsageFeatureResponse( + new DataTiersFeatureSetUsage(aggregateStats(response, getIndicesGroupedByTier(state, response))) + ) + ); + })); + } + } + + // Visible for testing + static Map> getIndicesGroupedByTier(ClusterState state, List nodes) { + Set indices = nodes.stream() + .map(nodeResponse -> state.getRoutingNodes().node(nodeResponse.getNode().getId())) + .filter(Objects::nonNull) + .flatMap(node -> StreamSupport.stream(node.spliterator(), false)) + .map(ShardRouting::getIndexName) + .collect(Collectors.toSet()); + Map> indicesByTierPreference = new HashMap<>(); + for (String indexName : indices) { + IndexMetadata indexMetadata = state.metadata().index(indexName); + // If the index was deleted in the meantime, skip + if (indexMetadata == null) { + continue; + } + List tierPreference = indexMetadata.getTierPreference(); + if (tierPreference.isEmpty() == false) { + indicesByTierPreference.computeIfAbsent(tierPreference.get(0), ignored -> new HashSet<>()).add(indexName); + } + } + return indicesByTierPreference; + } + + /** + * Accumulator to hold intermediate data tier stats before final calculation. + */ + private static class TierStatsAccumulator { + int nodeCount = 0; + Set indexNames = new HashSet<>(); + int totalShardCount = 0; + long totalByteCount = 0; + long docCount = 0; + int primaryShardCount = 0; + long primaryByteCount = 0L; + final TDigestState valueSketch = TDigestState.create(1000); + } + + // Visible for testing + static Map aggregateStats( + List nodeDataTiersUsages, + Map> tierPreference + ) { + Map statsAccumulators = new HashMap<>(); + for (String tier : tierPreference.keySet()) { + statsAccumulators.put(tier, new TierStatsAccumulator()); + statsAccumulators.get(tier).indexNames.addAll(tierPreference.get(tier)); + } + for (NodeDataTiersUsage nodeDataTiersUsage : nodeDataTiersUsages) { + aggregateDataTierNodeCounts(nodeDataTiersUsage, statsAccumulators); + aggregateDataTierIndexStats(nodeDataTiersUsage, statsAccumulators); + } + Map results = new HashMap<>(); + for (Map.Entry entry : statsAccumulators.entrySet()) { + results.put(entry.getKey(), aggregateFinalTierStats(entry.getValue())); + } + return results; + } + + /** + * Determine which data tiers each node belongs to (if any), and increment the node counts for those tiers. + */ + private static void aggregateDataTierNodeCounts(NodeDataTiersUsage nodeStats, Map tiersStats) { + nodeStats.getNode() + .getRoles() + .stream() + .map(DiscoveryNodeRole::roleName) + .filter(DataTier::validTierName) + .forEach(tier -> tiersStats.computeIfAbsent(tier, k -> new TierStatsAccumulator()).nodeCount++); + } + + /** + * Iterate the preferred tiers of the indices for a node and aggregate their stats. + */ + private static void aggregateDataTierIndexStats(NodeDataTiersUsage nodeDataTiersUsage, Map accumulators) { + for (Map.Entry entry : nodeDataTiersUsage.getUsageStatsByTier().entrySet()) { + String tier = entry.getKey(); + NodeDataTiersUsage.UsageStats usage = entry.getValue(); + if (DataTier.validTierName(tier)) { + TierStatsAccumulator accumulator = accumulators.computeIfAbsent(tier, k -> new TierStatsAccumulator()); + accumulator.docCount += usage.getDocCount(); + accumulator.totalByteCount += usage.getTotalSize(); + accumulator.totalShardCount += usage.getTotalShardCount(); + for (Long primaryShardSize : usage.getPrimaryShardSizes()) { + accumulator.primaryShardCount += 1; + accumulator.primaryByteCount += primaryShardSize; + accumulator.valueSketch.add(primaryShardSize); + } + } + } + } + + private static DataTiersFeatureSetUsage.TierSpecificStats aggregateFinalTierStats(TierStatsAccumulator accumulator) { + long primaryShardSizeMedian = (long) accumulator.valueSketch.quantile(0.5); + long primaryShardSizeMAD = computeMedianAbsoluteDeviation(accumulator.valueSketch); + return new DataTiersFeatureSetUsage.TierSpecificStats( + accumulator.nodeCount, + accumulator.indexNames.size(), + accumulator.totalShardCount, + accumulator.primaryShardCount, + accumulator.docCount, + accumulator.totalByteCount, + accumulator.primaryByteCount, + primaryShardSizeMedian, + primaryShardSizeMAD + ); + } + + // Visible for testing + static long computeMedianAbsoluteDeviation(TDigestState valuesSketch) { + if (valuesSketch.size() == 0) { + return 0; + } else { + final double approximateMedian = valuesSketch.quantile(0.5); + final TDigestState approximatedDeviationsSketch = TDigestState.createUsingParamsFrom(valuesSketch); + valuesSketch.centroids().forEach(centroid -> { + final double deviation = Math.abs(approximateMedian - centroid.mean()); + approximatedDeviationsSketch.add(deviation, centroid.count()); + }); + + return (long) approximatedDeviationsSketch.quantile(0.5); + } + } + + /** + * In this method we use {@link NodesDataTiersUsageTransportAction#aggregateStats(RoutingNode, Metadata, NodeIndicesStats)} + * to precalculate the stats we need from {@link NodeStats} just like we do in NodesDataTiersUsageTransportAction. + * This way we can be backwards compatible without duplicating the calculation. This is only meant to be used to be + * backwards compatible and it should be removed afterwords. + */ + private static Map precalculateLocalStatsFromNodeStats(NodeStats nodeStats, ClusterState state) { + RoutingNode routingNode = state.getRoutingNodes().node(nodeStats.getNode().getId()); + if (routingNode == null) { + return Map.of(); + } + + return NodesDataTiersUsageTransportAction.aggregateStats(routingNode, state.metadata(), nodeStats.getIndices()); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodeDataTiersUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodeDataTiersUsage.java new file mode 100644 index 0000000000000..c1903a2910629 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodeDataTiersUsage.java @@ -0,0 +1,113 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.datatiers; + +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * Data tier usage statistics on a specific node. The statistics groups the indices, shard sizes, shard counts based + * on their tier preference. + */ +public class NodeDataTiersUsage extends BaseNodeResponse { + + private final Map usageStatsByTier; + + public static class UsageStats implements Writeable { + private final List primaryShardSizes; + private int totalShardCount; + private long docCount; + private long totalSize; + + public UsageStats() { + this.primaryShardSizes = new ArrayList<>(); + this.totalShardCount = 0; + this.docCount = 0; + this.totalSize = 0; + } + + public UsageStats(List primaryShardSizes, int totalShardCount, long docCount, long totalSize) { + this.primaryShardSizes = primaryShardSizes; + this.totalShardCount = totalShardCount; + this.docCount = docCount; + this.totalSize = totalSize; + } + + static UsageStats read(StreamInput in) throws IOException { + return new UsageStats(in.readCollectionAsList(StreamInput::readVLong), in.readVInt(), in.readVLong(), in.readVLong()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(primaryShardSizes, StreamOutput::writeVLong); + out.writeVInt(totalShardCount); + out.writeVLong(docCount); + out.writeVLong(totalSize); + } + + public void addPrimaryShardSize(long primaryShardSize) { + primaryShardSizes.add(primaryShardSize); + } + + public void incrementTotalSize(long totalSize) { + this.totalSize += totalSize; + } + + public void incrementDocCount(long docCount) { + this.docCount += docCount; + } + + public void incrementTotalShardCount(int totalShardCount) { + this.totalShardCount += totalShardCount; + } + + public List getPrimaryShardSizes() { + return primaryShardSizes; + } + + public int getTotalShardCount() { + return totalShardCount; + } + + public long getDocCount() { + return docCount; + } + + public long getTotalSize() { + return totalSize; + } + } + + public NodeDataTiersUsage(StreamInput in) throws IOException { + super(in); + usageStatsByTier = in.readMap(UsageStats::read); + } + + public NodeDataTiersUsage(DiscoveryNode node, Map usageStatsByTier) { + super(node); + this.usageStatsByTier = usageStatsByTier; + } + + public Map getUsageStatsByTier() { + return Map.copyOf(usageStatsByTier); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(usageStatsByTier, (o, v) -> v.writeTo(o)); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java new file mode 100644 index 0000000000000..06a3b47d47a65 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java @@ -0,0 +1,216 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.datatiers; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.action.admin.indices.stats.IndexShardStats; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.index.store.StoreStats; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.indices.NodeIndicesStats; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +/** + * Sources locally data tier usage stats mainly indices and shard sizes grouped by preferred data tier. + */ +public class NodesDataTiersUsageTransportAction extends TransportNodesAction< + NodesDataTiersUsageTransportAction.NodesRequest, + NodesDataTiersUsageTransportAction.NodesResponse, + NodesDataTiersUsageTransportAction.NodeRequest, + NodeDataTiersUsage> { + + public static final ActionType TYPE = ActionType.localOnly("cluster:monitor/nodes/data_tier_usage"); + public static final NodeFeature LOCALLY_PRECALCULATED_STATS_FEATURE = new NodeFeature("usage.data_tiers.precalculate_stats"); + + private static final CommonStatsFlags STATS_FLAGS = new CommonStatsFlags().clear() + .set(CommonStatsFlags.Flag.Docs, true) + .set(CommonStatsFlags.Flag.Store, true); + + private final IndicesService indicesService; + + @Inject + public NodesDataTiersUsageTransportAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + IndicesService indicesService, + ActionFilters actionFilters + ) { + super( + TYPE.name(), + clusterService, + transportService, + actionFilters, + NodeRequest::new, + threadPool.executor(ThreadPool.Names.MANAGEMENT) + ); + this.indicesService = indicesService; + } + + @Override + protected NodesResponse newResponse(NodesRequest request, List responses, List failures) { + return new NodesResponse(clusterService.getClusterName(), responses, failures); + } + + @Override + protected NodeRequest newNodeRequest(NodesRequest request) { + return NodeRequest.INSTANCE; + } + + @Override + protected NodeDataTiersUsage newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException { + return new NodeDataTiersUsage(in); + } + + @Override + protected NodeDataTiersUsage nodeOperation(NodeRequest nodeRequest, Task task) { + assert task instanceof CancellableTask; + + DiscoveryNode localNode = clusterService.localNode(); + NodeIndicesStats nodeIndicesStats = indicesService.stats(STATS_FLAGS, true); + ClusterState state = clusterService.state(); + RoutingNode routingNode = state.getRoutingNodes().node(localNode.getId()); + Map usageStatsByTier = aggregateStats(routingNode, state.metadata(), nodeIndicesStats); + return new NodeDataTiersUsage(clusterService.localNode(), usageStatsByTier); + } + + // For bwc & testing purposes + static Map aggregateStats( + RoutingNode routingNode, + Metadata metadata, + NodeIndicesStats nodeIndicesStats + ) { + if (routingNode == null) { + return Map.of(); + } + Map usageStatsByTier = new HashMap<>(); + Set localIndices = StreamSupport.stream(routingNode.spliterator(), false) + .map(routing -> routing.index().getName()) + .collect(Collectors.toSet()); + for (String indexName : localIndices) { + IndexMetadata indexMetadata = metadata.index(indexName); + if (indexMetadata == null) { + continue; + } + String tier = indexMetadata.getTierPreference().isEmpty() ? null : indexMetadata.getTierPreference().get(0); + if (tier != null) { + NodeDataTiersUsage.UsageStats usageStats = usageStatsByTier.computeIfAbsent( + tier, + ignored -> new NodeDataTiersUsage.UsageStats() + ); + List allShardStats = nodeIndicesStats.getShardStats(indexMetadata.getIndex()); + if (allShardStats != null) { + for (IndexShardStats indexShardStats : allShardStats) { + usageStats.incrementTotalSize(indexShardStats.getTotal().getStore().totalDataSetSizeInBytes()); + usageStats.incrementDocCount(indexShardStats.getTotal().getDocs().getCount()); + + ShardRouting shardRouting = routingNode.getByShardId(indexShardStats.getShardId()); + if (shardRouting != null && shardRouting.state() == ShardRoutingState.STARTED) { + usageStats.incrementTotalShardCount(1); + + // Accumulate stats about started primary shards + StoreStats primaryStoreStats = indexShardStats.getPrimary().getStore(); + if (shardRouting.primary() && primaryStoreStats != null) { + usageStats.addPrimaryShardSize(primaryStoreStats.totalDataSetSizeInBytes()); + } + } + } + } + } + } + return usageStatsByTier; + } + + public static class NodesRequest extends BaseNodesRequest { + + public NodesRequest() { + super((String[]) null); + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + } + } + + public static class NodeRequest extends TransportRequest { + + static final NodeRequest INSTANCE = new NodeRequest(); + + public NodeRequest(StreamInput in) throws IOException { + super(in); + } + + public NodeRequest() { + + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + } + } + + public static class NodesResponse extends BaseNodesResponse { + + public NodesResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readCollectionAsList(NodeDataTiersUsage::new); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeCollection(nodes); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java index a72cbad790a68..22a2c3a880ce5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.core.ilm; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.downsample.DownsampleConfig; import org.elasticsearch.client.internal.Client; @@ -32,6 +34,7 @@ import java.util.Objects; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.action.downsample.DownsampleConfig.generateDownsampleIndexName; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -40,6 +43,8 @@ */ public class DownsampleAction implements LifecycleAction { + private static final Logger logger = LogManager.getLogger(DownsampleAction.class); + public static final String NAME = "downsample"; public static final String DOWNSAMPLED_INDEX_PREFIX = "downsample-"; public static final String CONDITIONAL_TIME_SERIES_CHECK_KEY = BranchingStep.NAME + "-on-timeseries-check"; @@ -155,7 +160,30 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { (index, clusterState) -> { IndexMetadata indexMetadata = clusterState.metadata().index(index); assert indexMetadata != null : "invalid cluster metadata. index [" + index.getName() + "] metadata not found"; - return IndexSettings.MODE.get(indexMetadata.getSettings()) == IndexMode.TIME_SERIES; + if (IndexSettings.MODE.get(indexMetadata.getSettings()) != IndexMode.TIME_SERIES) { + return false; + } + + if (index.getName().equals(generateDownsampleIndexName(DOWNSAMPLED_INDEX_PREFIX, indexMetadata, fixedInterval))) { + var downsampleStatus = IndexMetadata.INDEX_DOWNSAMPLE_STATUS.get(indexMetadata.getSettings()); + if (downsampleStatus == IndexMetadata.DownsampleTaskStatus.UNKNOWN) { + // This isn't a downsample index, but it has the name of our target downsample index - very bad, we'll skip the + // downsample action to avoid blocking the lifecycle of this index - if there + // is another downsample action configured in the next phase, it'll be able to proceed successfully + logger.warn( + "index [{}] as part of policy [{}] cannot be downsampled at interval [{}] in phase [{}] because it has" + + " the name of the target downsample index and is itself not a downsampled index. Skipping the downsample " + + "action.", + index.getName(), + indexMetadata.getLifecyclePolicyName(), + fixedInterval, + phase + ); + } + return false; + } + + return true; } ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java index 7ab8e41cd2453..ffaa8489929ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java @@ -150,13 +150,14 @@ private static void checkUniqueness(int id, String uniqueId) { public static final MlConfigVersion V_10 = registerMlConfigVersion(10_00_00_99, "4B940FD9-BEDD-4589-8E08-02D9B480B22D"); // V_11 is used in ELSER v2 package configs - public static final MlConfigVersion V_11 = registerMlConfigVersion(11_00_00_99, "79CB2950-57C7-11EE-AE5D-0800200C9A66"); + public static final MlConfigVersion V_11 = registerMlConfigVersion(11_00_0_0_99, "79CB2950-57C7-11EE-AE5D-0800200C9A66"); + public static final MlConfigVersion V_12 = registerMlConfigVersion(12_00_0_0_99, "Trained model config prefix strings added"); /** * Reference to the most recent Ml config version. * This should be the Ml config version with the highest id. */ - public static final MlConfigVersion CURRENT = V_11; + public static final MlConfigVersion CURRENT = V_12; /** * Reference to the first MlConfigVersion that is detached from the diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java index 70ab69ae94e19..7cef2bed04ce3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskState; import org.elasticsearch.xpack.core.ml.utils.MemoryTrackedTaskState; +import java.time.Instant; import java.util.Collection; import java.util.Collections; import java.util.Set; @@ -194,6 +195,17 @@ public static JobState getJobStateModifiedForReassignments(@Nullable PersistentT return jobState; } + public static Instant getLastJobTaskStateChangeTime(String jobId, @Nullable PersistentTasksCustomMetadata tasks) { + PersistentTasksCustomMetadata.PersistentTask task = getJobTask(jobId, tasks); + if (task != null) { + JobTaskState jobTaskState = (JobTaskState) task.getState(); + if (jobTaskState != null) { + return jobTaskState.getLastStateChangeTime(); + } + } + return null; + } + public static SnapshotUpgradeState getSnapshotUpgradeState( String jobId, String snapshotId, @@ -260,6 +272,17 @@ public static DataFrameAnalyticsState getDataFrameAnalyticsState(@Nullable Persi return state; } + public static Instant getLastDataFrameAnalyticsTaskStateChangeTime(String analyticsId, @Nullable PersistentTasksCustomMetadata tasks) { + PersistentTasksCustomMetadata.PersistentTask task = getDataFrameAnalyticsTask(analyticsId, tasks); + if (task != null) { + DataFrameAnalyticsTaskState taskState = (DataFrameAnalyticsTaskState) task.getState(); + if (taskState != null) { + return taskState.getLastStateChangeTime(); + } + } + return null; + } + /** * The job Ids of anomaly detector job tasks. * All anomaly detector jobs are returned regardless of the status of the diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java index 61e52935f46e9..296aec12b1a63 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -88,6 +89,7 @@ public static Builder parseRequest(String id, XContentParser parser) { // input and so cannot construct a document. private final List textInput; private boolean highPriority; + private TrainedModelPrefixStrings.PrefixType prefixType = TrainedModelPrefixStrings.PrefixType.NONE; /** * Build a request from a list of documents as maps. @@ -190,6 +192,11 @@ public Request(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { highPriority = in.readBoolean(); } + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { + prefixType = in.readEnum(TrainedModelPrefixStrings.PrefixType.class); + } else { + prefixType = TrainedModelPrefixStrings.PrefixType.NONE; + } } public int numberOfDocuments() { @@ -232,6 +239,14 @@ public void setHighPriority(boolean highPriority) { this.highPriority = highPriority; } + public void setPrefixType(TrainedModelPrefixStrings.PrefixType prefixType) { + this.prefixType = prefixType; + } + + public TrainedModelPrefixStrings.PrefixType getPrefixType() { + return prefixType; + } + @Override public ActionRequestValidationException validate() { return null; @@ -253,6 +268,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { out.writeBoolean(highPriority); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { + out.writeEnum(prefixType); + } } @Override @@ -266,7 +284,8 @@ public boolean equals(Object o) { && Objects.equals(inferenceTimeout, that.inferenceTimeout) && Objects.equals(objectsToInfer, that.objectsToInfer) && Objects.equals(textInput, that.textInput) - && (highPriority == that.highPriority); + && (highPriority == that.highPriority) + && (prefixType == that.prefixType); } @Override @@ -276,7 +295,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, @Override public int hashCode() { - return Objects.hash(id, objectsToInfer, update, previouslyLicensed, inferenceTimeout, textInput, highPriority); + return Objects.hash(id, objectsToInfer, update, previouslyLicensed, inferenceTimeout, textInput, highPriority, prefixType); } public static class Builder { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java index 524d5f84a177b..806f935d5f394 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -102,6 +103,7 @@ public static Request.Builder parseRequest(String id, XContentParser parser) { // and do know which field the model expects to find its // input and so cannot construct a document. private final List textInput; + private TrainedModelPrefixStrings.PrefixType prefixType = TrainedModelPrefixStrings.PrefixType.NONE; public static Request forDocs(String id, InferenceConfigUpdate update, List> docs, TimeValue inferenceTimeout) { return new Request( @@ -156,6 +158,11 @@ public Request(StreamInput in) throws IOException { } else { textInput = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { + prefixType = in.readEnum(TrainedModelPrefixStrings.PrefixType.class); + } else { + prefixType = TrainedModelPrefixStrings.PrefixType.NONE; + } } public String getId() { @@ -200,6 +207,14 @@ public boolean isHighPriority() { return highPriority; } + public void setPrefixType(TrainedModelPrefixStrings.PrefixType prefixType) { + this.prefixType = prefixType; + } + + public TrainedModelPrefixStrings.PrefixType getPrefixType() { + return prefixType; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); @@ -226,6 +241,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) { out.writeOptionalStringCollection(textInput); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { + out.writeEnum(prefixType); + } } @Override @@ -243,12 +261,13 @@ public boolean equals(Object o) { && Objects.equals(update, that.update) && Objects.equals(inferenceTimeout, that.inferenceTimeout) && Objects.equals(highPriority, that.highPriority) - && Objects.equals(textInput, that.textInput); + && Objects.equals(textInput, that.textInput) + && (prefixType == that.prefixType); } @Override public int hashCode() { - return Objects.hash(id, update, docs, inferenceTimeout, highPriority, textInput); + return Objects.hash(id, update, docs, inferenceTimeout, highPriority, textInput, prefixType); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java index e6fdc7886ce53..8d4b601a38aad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsTaskState.java @@ -6,42 +6,57 @@ */ package org.elasticsearch.xpack.core.ml.dataframe; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import org.elasticsearch.xpack.core.ml.MlTasks; +import org.elasticsearch.xpack.core.ml.utils.MlTaskState; import java.io.IOException; +import java.time.Instant; import java.util.Objects; -public class DataFrameAnalyticsTaskState implements PersistentTaskState { +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class DataFrameAnalyticsTaskState implements PersistentTaskState, MlTaskState { public static final String NAME = MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME; - private static ParseField STATE = new ParseField("state"); - private static ParseField ALLOCATION_ID = new ParseField("allocation_id"); - private static ParseField REASON = new ParseField("reason"); + private static final ParseField STATE = new ParseField("state"); + private static final ParseField ALLOCATION_ID = new ParseField("allocation_id"); + private static final ParseField REASON = new ParseField("reason"); + private static final ParseField LAST_STATE_CHANGE_TIME = new ParseField("last_state_change_time"); private final DataFrameAnalyticsState state; private final long allocationId; private final String reason; + private final Instant lastStateChangeTime; private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( NAME, true, - a -> new DataFrameAnalyticsTaskState((DataFrameAnalyticsState) a[0], (long) a[1], (String) a[2]) + a -> new DataFrameAnalyticsTaskState((DataFrameAnalyticsState) a[0], (long) a[1], (String) a[2], (Instant) a[3]) ); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), DataFrameAnalyticsState::fromString, STATE); PARSER.declareLong(ConstructingObjectParser.constructorArg(), ALLOCATION_ID); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), REASON); + PARSER.declareField( + optionalConstructorArg(), + p -> TimeUtils.parseTimeFieldToInstant(p, LAST_STATE_CHANGE_TIME.getPreferredName()), + LAST_STATE_CHANGE_TIME, + ObjectParser.ValueType.VALUE + ); } public static DataFrameAnalyticsTaskState fromXContent(XContentParser parser) { @@ -52,27 +67,49 @@ public static DataFrameAnalyticsTaskState fromXContent(XContentParser parser) { } } - public DataFrameAnalyticsTaskState(DataFrameAnalyticsState state, long allocationId, @Nullable String reason) { + public DataFrameAnalyticsTaskState( + DataFrameAnalyticsState state, + long allocationId, + @Nullable String reason, + @Nullable Instant lastStateChangeTime + ) { this.state = Objects.requireNonNull(state); this.allocationId = allocationId; this.reason = reason; + // Round to millisecond to avoid serialization round trip differences + this.lastStateChangeTime = (lastStateChangeTime != null) ? Instant.ofEpochMilli(lastStateChangeTime.toEpochMilli()) : null; } public DataFrameAnalyticsTaskState(StreamInput in) throws IOException { this.state = DataFrameAnalyticsState.fromStream(in); this.allocationId = in.readLong(); this.reason = in.readOptionalString(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_STATE_CHANGE_TIMESTAMPS)) { + lastStateChangeTime = in.readOptionalInstant(); + } else { + lastStateChangeTime = null; + } } public DataFrameAnalyticsState getState() { return state; } + public long getAllocationId() { + return allocationId; + } + @Nullable public String getReason() { return reason; } + @Override + @Nullable + public Instant getLastStateChangeTime() { + return lastStateChangeTime; + } + public boolean isStatusStale(PersistentTasksCustomMetadata.PersistentTask task) { return allocationId != task.getAllocationId(); } @@ -87,6 +124,9 @@ public void writeTo(StreamOutput out) throws IOException { state.writeTo(out); out.writeLong(allocationId); out.writeOptionalString(reason); + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_STATE_CHANGE_TIMESTAMPS)) { + out.writeOptionalInstant(lastStateChangeTime); + } } @Override @@ -97,6 +137,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (reason != null) { builder.field(REASON.getPreferredName(), reason); } + if (lastStateChangeTime != null) { + builder.timeField( + LAST_STATE_CHANGE_TIME.getPreferredName(), + LAST_STATE_CHANGE_TIME.getPreferredName() + "_string", + lastStateChangeTime.toEpochMilli() + ); + } builder.endObject(); return builder; } @@ -106,11 +153,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DataFrameAnalyticsTaskState that = (DataFrameAnalyticsTaskState) o; - return allocationId == that.allocationId && state == that.state && Objects.equals(reason, that.reason); + return allocationId == that.allocationId + && state == that.state + && Objects.equals(reason, that.reason) + && Objects.equals(lastStateChangeTime, that.lastStateChangeTime); } @Override public int hashCode() { - return Objects.hash(state, allocationId, reason); + return Objects.hash(state, allocationId, reason, lastStateChangeTime); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java index 9dfa2d51f0fc0..b469d35b90383 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java @@ -100,6 +100,7 @@ public class TrainedModelConfig implements ToXContentObject, Writeable { public static final ParseField INFERENCE_CONFIG = new ParseField("inference_config"); public static final ParseField LOCATION = new ParseField("location"); public static final ParseField MODEL_PACKAGE = new ParseField("model_package"); + public static final ParseField PREFIX_STRINGS = new ParseField("prefix_strings"); public static final ParseField PER_DEPLOYMENT_MEMORY_BYTES = new ParseField("per_deployment_memory_bytes"); public static final ParseField PER_ALLOCATION_MEMORY_BYTES = new ParseField("per_allocation_memory_bytes"); @@ -170,6 +171,11 @@ private static ObjectParser createParser(boole MODEL_PACKAGE ); parser.declareString(TrainedModelConfig.Builder::setPlatformArchitecture, PLATFORM_ARCHITECTURE); + parser.declareObject( + TrainedModelConfig.Builder::setPrefixStrings, + (p, c) -> TrainedModelPrefixStrings.fromXContent(p, ignoreUnknownFields), + PREFIX_STRINGS + ); return parser; } @@ -198,6 +204,7 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo private final ModelPackageConfig modelPackageConfig; private Boolean fullDefinition; private String platformArchitecture; + private TrainedModelPrefixStrings prefixStrings; TrainedModelConfig( String modelId, @@ -217,7 +224,8 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo InferenceConfig inferenceConfig, TrainedModelLocation location, ModelPackageConfig modelPackageConfig, - String platformArchitecture + String platformArchitecture, + TrainedModelPrefixStrings prefixStrings ) { this.modelId = ExceptionsHelper.requireNonNull(modelId, MODEL_ID); this.modelType = modelType; @@ -245,6 +253,7 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo this.location = location; this.modelPackageConfig = modelPackageConfig; this.platformArchitecture = platformArchitecture; + this.prefixStrings = prefixStrings; } private static TrainedModelInput handleDefaultInput(TrainedModelInput input, TrainedModelType modelType) { @@ -289,6 +298,9 @@ public TrainedModelConfig(StreamInput in) throws IOException { } else { platformArchitecture = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { + prefixStrings = in.readOptionalWriteable(TrainedModelPrefixStrings::new); + } } public boolean isPackagedModel() { @@ -435,6 +447,10 @@ public String getPlatformArchitecture() { return platformArchitecture; } + public TrainedModelPrefixStrings getPrefixStrings() { + return prefixStrings; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(modelId); @@ -469,6 +485,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED)) { out.writeOptionalString(platformArchitecture); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { + out.writeOptionalWriteable(prefixStrings); + } } @Override @@ -531,6 +551,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (location != null) { writeNamedObject(builder, params, LOCATION.getPreferredName(), location); } + if (prefixStrings != null) { + builder.field(PREFIX_STRINGS.getPreferredName(), prefixStrings); + } if (params.paramAsBoolean(DEFINITION_STATUS, false) && fullDefinition != null) { builder.field("fully_defined", fullDefinition); } @@ -565,7 +588,8 @@ public boolean equals(Object o) { && Objects.equals(inferenceConfig, that.inferenceConfig) && Objects.equals(metadata, that.metadata) && Objects.equals(location, that.location) - && Objects.equals(platformArchitecture, that.platformArchitecture); + && Objects.equals(platformArchitecture, that.platformArchitecture) + && Objects.equals(prefixStrings, that.prefixStrings); } @Override @@ -588,7 +612,8 @@ public int hashCode() { inferenceConfig, defaultFieldMap, location, - platformArchitecture + platformArchitecture, + prefixStrings ); } @@ -614,6 +639,7 @@ public static class Builder { private Long perDeploymentMemoryBytes; private Long perAllocationMemoryBytes; private String platformArchitecture; + private TrainedModelPrefixStrings prefixStrings; public Builder() {} @@ -636,6 +662,7 @@ public Builder(TrainedModelConfig config) { this.location = config.location; this.modelPackageConfig = config.modelPackageConfig; this.platformArchitecture = config.platformArchitecture; + this.prefixStrings = config.prefixStrings; } public Builder setModelId(String modelId) { @@ -733,6 +760,11 @@ public Builder setPlatformArchitecture(String platformArchitecture) { return this; } + public Builder setPrefixStrings(TrainedModelPrefixStrings prefixStrings) { + this.prefixStrings = prefixStrings; + return this; + } + public Builder setModelAliases(Set modelAliases) { if (modelAliases == null || modelAliases.isEmpty()) { return this; @@ -1053,7 +1085,8 @@ public TrainedModelConfig build() { inferenceConfig, location, modelPackageConfig, - platformArchitecture + platformArchitecture, + prefixStrings ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelPrefixStrings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelPrefixStrings.java new file mode 100644 index 0000000000000..749cbb4a7c1ea --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelPrefixStrings.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.inference; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public record TrainedModelPrefixStrings(String ingestPrefix, String searchPrefix) implements ToXContentObject, Writeable { + + public enum PrefixType { + INGEST, + SEARCH, + NONE + } + + public static final ParseField INGEST_PREFIX = new ParseField("ingest"); + public static final ParseField SEARCH_PREFIX = new ParseField("search"); + public static final String NAME = "trained_model_config_prefix_strings"; + + private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + private static final ConstructingObjectParser STRICT_PARSER = createParser(false); + + @SuppressWarnings("unchecked") + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + NAME, + ignoreUnknownFields, + a -> new TrainedModelPrefixStrings((String) a[0], (String) a[1]) + ); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), INGEST_PREFIX); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), SEARCH_PREFIX); + return parser; + } + + public static TrainedModelPrefixStrings fromXContent(XContentParser parser, boolean lenient) throws IOException { + return lenient ? LENIENT_PARSER.parse(parser, null) : STRICT_PARSER.parse(parser, null); + } + + public TrainedModelPrefixStrings(StreamInput in) throws IOException { + this(in.readOptionalString(), in.readOptionalString()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (ingestPrefix != null) { + builder.field(INGEST_PREFIX.getPreferredName(), ingestPrefix); + } + if (searchPrefix != null) { + builder.field(SEARCH_PREFIX.getPreferredName(), searchPrefix); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(ingestPrefix); + out.writeOptionalString(searchPrefix); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java index 6c8fc6fec4e0e..49ffca8f32d26 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java @@ -50,7 +50,8 @@ public final class InferenceIndexConstants { private static final String MAPPINGS_VERSION_VARIABLE = "xpack.ml.version"; // 2 added support for platform specific models - public static final int INFERENCE_INDEX_MAPPINGS_VERSION = 2; + // 3 added prefix strings configuration + public static final int INFERENCE_INDEX_MAPPINGS_VERSION = 3; public static String mapping() { return TemplateUtils.loadTemplate( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java index 19095ee52fe08..536cce95df527 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfig.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -21,6 +20,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.common.time.TimeUtils; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; @@ -69,7 +69,8 @@ private static ConstructingObjectParser createParser(b (String) a[9], // model_type tags, (String) a[11], // vocabulary file - (String) a[12] // platform architecture + (String) a[12], // platform architecture + (TrainedModelPrefixStrings) a[13] ); } ); @@ -95,6 +96,11 @@ private static ConstructingObjectParser createParser(b parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), TrainedModelConfig.TAGS); parser.declareString(ConstructingObjectParser.optionalConstructorArg(), VOCABULARY_FILE); parser.declareString(ConstructingObjectParser.optionalConstructorArg(), PLATFORM_ARCHITECTURE); + parser.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> TrainedModelPrefixStrings.fromXContent(p, lenient), + TrainedModelConfig.PREFIX_STRINGS + ); return parser; } @@ -122,6 +128,7 @@ public static ModelPackageConfig fromXContentLenient(XContentParser parser) thro private final List tags; private final String vocabularyFile; private final String platformArchitecture; + private final TrainedModelPrefixStrings prefixStrings; public ModelPackageConfig( String packagedModelId, @@ -136,7 +143,8 @@ public ModelPackageConfig( String modelType, List tags, String vocabularyFile, - String platformArchitecture + String platformArchitecture, + TrainedModelPrefixStrings prefixStrings ) { this.packagedModelId = ExceptionsHelper.requireNonNull(packagedModelId, PACKAGED_MODEL_ID); this.modelRepository = modelRepository; @@ -154,6 +162,7 @@ public ModelPackageConfig( this.tags = tags == null ? Collections.emptyList() : Collections.unmodifiableList(tags); this.vocabularyFile = vocabularyFile; this.platformArchitecture = platformArchitecture; + this.prefixStrings = prefixStrings; } public ModelPackageConfig(StreamInput in) throws IOException { @@ -174,6 +183,11 @@ public ModelPackageConfig(StreamInput in) throws IOException { } else { platformArchitecture = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { + prefixStrings = in.readOptionalWriteable(TrainedModelPrefixStrings::new); + } else { + prefixStrings = null; + } } public String getPackagedModelId() { @@ -228,6 +242,10 @@ public String getPlatformArchitecture() { return platformArchitecture; } + public TrainedModelPrefixStrings getPrefixStrings() { + return prefixStrings; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -268,6 +286,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (Strings.isNullOrEmpty(platformArchitecture) == false) { builder.field(PLATFORM_ARCHITECTURE.getPreferredName(), platformArchitecture); } + if (prefixStrings != null) { + builder.field(TrainedModelConfig.PREFIX_STRINGS.getPreferredName(), prefixStrings); + } builder.endObject(); return builder; @@ -290,6 +311,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.ML_PACKAGE_LOADER_PLATFORM_ADDED)) { out.writeOptionalString(platformArchitecture); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { + out.writeOptionalWriteable(prefixStrings); + } } @Override @@ -313,7 +337,8 @@ public boolean equals(Object o) { && Objects.equals(modelType, that.modelType) && Objects.equals(tags, that.tags) && Objects.equals(vocabularyFile, that.vocabularyFile) - && Objects.equals(platformArchitecture, that.platformArchitecture); + && Objects.equals(platformArchitecture, that.platformArchitecture) + && Objects.equals(prefixStrings, that.prefixStrings); } @Override @@ -331,7 +356,8 @@ public int hashCode() { modelType, tags, vocabularyFile, - platformArchitecture + platformArchitecture, + prefixStrings ); } @@ -355,6 +381,7 @@ public static class Builder { private List tags; private String vocabularyFile; private String platformArchitecture; + private TrainedModelPrefixStrings prefixStrings; public Builder(ModelPackageConfig modelPackageConfig) { this.packagedModelId = modelPackageConfig.packagedModelId; @@ -370,6 +397,7 @@ public Builder(ModelPackageConfig modelPackageConfig) { this.tags = modelPackageConfig.tags; this.vocabularyFile = modelPackageConfig.vocabularyFile; this.platformArchitecture = modelPackageConfig.platformArchitecture; + this.prefixStrings = modelPackageConfig.prefixStrings; } public Builder setPackedModelId(String packagedModelId) { @@ -437,9 +465,13 @@ public Builder setPlatformArchitecture(String platformArchitecture) { return this; } + public Builder setPrefixStrings(TrainedModelPrefixStrings prefixStrings) { + this.prefixStrings = prefixStrings; + return this; + } + /** - * Reset all fields which are only part of the package metadata, but not be part - * of the config. + * Reset (clear) all fields which are part to the model configuration */ public Builder resetPackageOnlyFields() { this.description = null; @@ -447,15 +479,7 @@ public Builder resetPackageOnlyFields() { this.metadata = null; this.modelType = null; this.tags = null; - return this; - } - - public Builder validate(boolean forCreation) { - ActionRequestValidationException validationException = null; - - if (validationException != null) { - throw validationException; - } + this.prefixStrings = null; return this; } @@ -473,7 +497,8 @@ public ModelPackageConfig build() { modelType, tags, vocabularyFile, - platformArchitecture + platformArchitecture, + prefixStrings ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java index 3db1165026193..c07cb0cf9c91a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java @@ -6,41 +6,53 @@ */ package org.elasticsearch.xpack.core.ml.job.config; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import org.elasticsearch.xpack.core.ml.MlTasks; +import org.elasticsearch.xpack.core.ml.utils.MlTaskState; import java.io.IOException; +import java.time.Instant; import java.util.Objects; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class JobTaskState implements PersistentTaskState { +public class JobTaskState implements PersistentTaskState, MlTaskState { public static final String NAME = MlTasks.JOB_TASK_NAME; - private static ParseField STATE = new ParseField("state"); - private static ParseField ALLOCATION_ID = new ParseField("allocation_id"); - private static ParseField REASON = new ParseField("reason"); + private static final ParseField STATE = new ParseField("state"); + private static final ParseField ALLOCATION_ID = new ParseField("allocation_id"); + private static final ParseField REASON = new ParseField("reason"); + private static final ParseField LAST_STATE_CHANGE_TIME = new ParseField("last_state_change_time"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( NAME, true, - args -> new JobTaskState((JobState) args[0], (Long) args[1], (String) args[2]) + args -> new JobTaskState((JobState) args[0], (Long) args[1], (String) args[2], (Instant) args[3]) ); static { PARSER.declareString(constructorArg(), JobState::fromString, STATE); PARSER.declareLong(constructorArg(), ALLOCATION_ID); PARSER.declareString(optionalConstructorArg(), REASON); + PARSER.declareField( + optionalConstructorArg(), + p -> TimeUtils.parseTimeFieldToInstant(p, LAST_STATE_CHANGE_TIME.getPreferredName()), + LAST_STATE_CHANGE_TIME, + ObjectParser.ValueType.VALUE + ); } public static JobTaskState fromXContent(XContentParser parser) { @@ -54,28 +66,46 @@ public static JobTaskState fromXContent(XContentParser parser) { private final JobState state; private final long allocationId; private final String reason; + private final Instant lastStateChangeTime; - public JobTaskState(JobState state, long allocationId, @Nullable String reason) { + public JobTaskState(JobState state, long allocationId, @Nullable String reason, @Nullable Instant lastStateChangeTime) { this.state = Objects.requireNonNull(state); this.allocationId = allocationId; this.reason = reason; + // Round to millisecond to avoid serialization round trip differences + this.lastStateChangeTime = (lastStateChangeTime != null) ? Instant.ofEpochMilli(lastStateChangeTime.toEpochMilli()) : null; } public JobTaskState(StreamInput in) throws IOException { state = JobState.fromStream(in); allocationId = in.readLong(); reason = in.readOptionalString(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_STATE_CHANGE_TIMESTAMPS)) { + lastStateChangeTime = in.readOptionalInstant(); + } else { + lastStateChangeTime = null; + } } public JobState getState() { return state; } + public long getAllocationId() { + return allocationId; + } + @Nullable public String getReason() { return reason; } + @Override + @Nullable + public Instant getLastStateChangeTime() { + return lastStateChangeTime; + } + /** * The job state stores the allocation ID at the time it was last set. * This method compares the allocation ID in the state with the allocation @@ -101,6 +131,9 @@ public void writeTo(StreamOutput out) throws IOException { state.writeTo(out); out.writeLong(allocationId); out.writeOptionalString(reason); + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_STATE_CHANGE_TIMESTAMPS)) { + out.writeOptionalInstant(lastStateChangeTime); + } } @Override @@ -111,6 +144,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (reason != null) { builder.field(REASON.getPreferredName(), reason); } + if (lastStateChangeTime != null) { + builder.timeField( + LAST_STATE_CHANGE_TIME.getPreferredName(), + LAST_STATE_CHANGE_TIME.getPreferredName() + "_string", + lastStateChangeTime.toEpochMilli() + ); + } builder.endObject(); return builder; } @@ -120,11 +160,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; JobTaskState that = (JobTaskState) o; - return state == that.state && Objects.equals(allocationId, that.allocationId) && Objects.equals(reason, that.reason); + return state == that.state + && Objects.equals(allocationId, that.allocationId) + && Objects.equals(reason, that.reason) + && Objects.equals(lastStateChangeTime, that.lastStateChangeTime); } @Override public int hashCode() { - return Objects.hash(state, allocationId, reason); + return Objects.hash(state, allocationId, reason, lastStateChangeTime); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java new file mode 100644 index 0000000000000..09a7d3827caf2 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlTaskState.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.utils; + +import org.elasticsearch.core.Nullable; + +import java.time.Instant; + +public interface MlTaskState { + + /** + * The time of the last state change. + */ + @Nullable + Instant getLastStateChangeTime(); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java index c011bd8af6c51..ec0ecfc909980 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java @@ -50,7 +50,7 @@ static ApplicationPrivilegeDescriptor parsePrivilege(XContentParser parser, Stri */ public PutPrivilegesRequestBuilder source(BytesReference source, XContentType xContentType) throws IOException { Objects.requireNonNull(xContentType); - // EMPTY is ok here because we never call namedObject + // NamedXContentRegistry.EMPTY is ok here because we never call namedObject try ( InputStream stream = source.streamInput(); XContentParser parser = xContentType.xContent() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java index 18a2be0e9b358..b0f1c78b0c99d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java @@ -64,6 +64,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; +import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.isDataStreamsLifecycleOnlyMode; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -71,18 +72,6 @@ * Abstracts the logic of managing versioned index templates, ingest pipelines and lifecycle policies for plugins that require such things. */ public abstract class IndexTemplateRegistry implements ClusterStateListener { - public static final String DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME = "data_streams.lifecycle_only.mode"; - - /** - * Check if {@link #DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME} is present and set to {@code true}, indicating that - * we're running in a cluster configuration that is only expecting to use data streams lifecycles. - * - * @param settings the node settings - * @return true if {@link #DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME} is present and set - */ - public static boolean isDataStreamsLifecycleOnlyMode(final Settings settings) { - return settings.getAsBoolean(DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME, false); - } private static final Logger logger = LogManager.getLogger(IndexTemplateRegistry.class); @@ -121,7 +110,7 @@ public IndexTemplateRegistry( /** * Returns the configured configurations for the lifecycle policies. Subclasses should provide * the ILM configurations and they will be loaded if we're not running data stream only mode (controlled via - * {@link #DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME}). + * {@link org.elasticsearch.cluster.metadata.DataStreamLifecycle#DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME}). * * The loaded lifecycle configurations will be installed if returned by {@link #getLifecyclePolicies()}. Child classes * have a chance to override {@link #getLifecyclePolicies()} in case they want additional control over if these diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IngestPipelineConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IngestPipelineConfig.java index a216030f1c2e0..2768355183687 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IngestPipelineConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IngestPipelineConfig.java @@ -12,6 +12,7 @@ import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Objects; /** @@ -22,6 +23,7 @@ public abstract class IngestPipelineConfig { protected final String resource; protected final int version; protected final String versionProperty; + protected final Map variables; /** * A list of this pipeline's dependencies, for example - such referred to through a pipeline processor. @@ -35,11 +37,23 @@ public IngestPipelineConfig(String id, String resource, int version, String vers } public IngestPipelineConfig(String id, String resource, int version, String versionProperty, List dependencies) { + this(id, resource, version, versionProperty, dependencies, Map.of()); + } + + public IngestPipelineConfig( + String id, + String resource, + int version, + String versionProperty, + List dependencies, + Map variables + ) { this.id = Objects.requireNonNull(id); this.resource = Objects.requireNonNull(resource); this.version = version; this.versionProperty = Objects.requireNonNull(versionProperty); this.dependencies = dependencies; + this.variables = Objects.requireNonNull(variables); } public String getId() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/JsonIngestPipelineConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/JsonIngestPipelineConfig.java index fc2ca7cbce186..05a27de40aadc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/JsonIngestPipelineConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/JsonIngestPipelineConfig.java @@ -12,6 +12,7 @@ import org.elasticsearch.xcontent.XContentType; import java.util.List; +import java.util.Map; public class JsonIngestPipelineConfig extends IngestPipelineConfig { public JsonIngestPipelineConfig(String id, String resource, int version, String versionProperty) { @@ -22,6 +23,17 @@ public JsonIngestPipelineConfig(String id, String resource, int version, String super(id, resource, version, versionProperty, dependencies); } + public JsonIngestPipelineConfig( + String id, + String resource, + int version, + String versionProperty, + List dependencies, + Map variables + ) { + super(id, resource, version, versionProperty, dependencies, variables); + } + @Override public XContentType getXContentType() { return XContentType.JSON; @@ -29,6 +41,6 @@ public XContentType getXContentType() { @Override public BytesReference loadConfig() { - return new BytesArray(TemplateUtils.loadTemplate(resource, String.valueOf(version), versionProperty)); + return new BytesArray(TemplateUtils.loadTemplate(resource, String.valueOf(version), versionProperty, variables)); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java index ad27607e47c5e..d0be0ad9cb697 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java @@ -98,7 +98,7 @@ public static void validate(String source) { } } - private static String replaceVariables(String input, String version, String versionProperty, Map variables) { + public static String replaceVariables(String input, String version, String versionProperty, Map variables) { String template = replaceVariable(input, versionProperty, version); for (Map.Entry variable : variables.entrySet()) { template = replaceVariable(template, variable.getKey(), variable.getValue()); diff --git a/x-pack/plugin/core/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/core/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification new file mode 100644 index 0000000000000..545918cbab502 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -0,0 +1,8 @@ +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0; you may not use this file except in compliance with the Elastic License +# 2.0. +# + +org.elasticsearch.xpack.core.XPackFeatures diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/DataTiersUsageTransportActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/DataTiersUsageTransportActionTests.java deleted file mode 100644 index 93e991b0fa5ae..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/DataTiersUsageTransportActionTests.java +++ /dev/null @@ -1,786 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core; - -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.indices.stats.CommonStats; -import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; -import org.elasticsearch.action.admin.indices.stats.IndexShardStats; -import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.IndexRoutingTable; -import org.elasticsearch.cluster.routing.IndexShardRoutingTable; -import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.cluster.routing.RoutingNodes; -import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.TestShardRouting; -import org.elasticsearch.cluster.routing.allocation.DataTier; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.PathUtils; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.shard.DocsStats; -import org.elasticsearch.index.shard.IndexLongFieldRange; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardPath; -import org.elasticsearch.index.store.StoreStats; -import org.elasticsearch.indices.NodeIndicesStats; -import org.elasticsearch.search.aggregations.metrics.TDigestState; -import org.elasticsearch.test.ESTestCase; - -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_CREATION_DATE; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class DataTiersUsageTransportActionTests extends ESTestCase { - - public void testCalculateMAD() { - assertThat(DataTiersUsageTransportAction.computeMedianAbsoluteDeviation(TDigestState.create(10)), equalTo(0L)); - - TDigestState sketch = TDigestState.create(randomDoubleBetween(1, 1000, false)); - sketch.add(1); - sketch.add(1); - sketch.add(2); - sketch.add(2); - sketch.add(4); - sketch.add(6); - sketch.add(9); - assertThat(DataTiersUsageTransportAction.computeMedianAbsoluteDeviation(sketch), equalTo(1L)); - } - - public void testTierIndices() { - IndexMetadata hotIndex1 = indexMetadata("hot-1", 1, 0, DataTier.DATA_HOT); - IndexMetadata hotIndex2 = indexMetadata("hot-2", 1, 0, DataTier.DATA_HOT); - IndexMetadata warmIndex1 = indexMetadata("warm-1", 1, 0, DataTier.DATA_WARM); - IndexMetadata coldIndex1 = indexMetadata("cold-1", 1, 0, DataTier.DATA_COLD); - IndexMetadata coldIndex2 = indexMetadata("cold-2", 1, 0, DataTier.DATA_COLD, DataTier.DATA_WARM); // Prefers cold over warm - IndexMetadata nonTiered = indexMetadata("non-tier", 1, 0); // No tier - - Map indices = new HashMap<>(); - indices.put("hot-1", hotIndex1); - indices.put("hot-2", hotIndex2); - indices.put("warm-1", warmIndex1); - indices.put("cold-1", coldIndex1); - indices.put("cold-2", coldIndex2); - indices.put("non-tier", nonTiered); - - Map tiers = DataTiersUsageTransportAction.tierIndices(indices); - assertThat(tiers.size(), equalTo(5)); - assertThat(tiers.get("hot-1"), equalTo(DataTier.DATA_HOT)); - assertThat(tiers.get("hot-2"), equalTo(DataTier.DATA_HOT)); - assertThat(tiers.get("warm-1"), equalTo(DataTier.DATA_WARM)); - assertThat(tiers.get("cold-1"), equalTo(DataTier.DATA_COLD)); - assertThat(tiers.get("cold-2"), equalTo(DataTier.DATA_COLD)); - assertThat(tiers.get("non-tier"), nullValue()); - } - - public void testCalculateStatsNoTiers() { - // Nodes: 0 Tiered Nodes, 1 Data Node - DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); - DiscoveryNode leader = newNode(0, DiscoveryNodeRole.MASTER_ROLE); - discoBuilder.add(leader); - discoBuilder.masterNodeId(leader.getId()); - - DiscoveryNode dataNode1 = newNode(1, DiscoveryNodeRole.DATA_ROLE); - discoBuilder.add(dataNode1); - - discoBuilder.localNodeId(dataNode1.getId()); - - // Indices: 1 Regular index - Metadata.Builder metadataBuilder = Metadata.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - - IndexMetadata index1 = indexMetadata("index_1", 3, 1); - metadataBuilder.put(index1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index1.getIndex()); - routeTestShardToNodes(index1, 0, indexRoutingTableBuilder, dataNode1); - routeTestShardToNodes(index1, 1, indexRoutingTableBuilder, dataNode1); - routeTestShardToNodes(index1, 2, indexRoutingTableBuilder, dataNode1); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - // Cluster State and create stats responses - ClusterState clusterState = ClusterState.builder(new ClusterName("test")) - .nodes(discoBuilder) - .metadata(metadataBuilder) - .routingTable(routingTableBuilder.build()) - .build(); - - long byteSize = randomLongBetween(1024L, 1024L * 1024L * 1024L * 30L); // 1 KB to 30 GB - long docCount = randomLongBetween(100L, 100000000L); // one hundred to one hundred million - List nodeStatsList = buildNodeStats(clusterState, byteSize, docCount); - - // Calculate usage - Map indexByTier = DataTiersUsageTransportAction.tierIndices(clusterState.metadata().indices()); - Map tierSpecificStats = DataTiersUsageTransportAction.calculateStats( - nodeStatsList, - indexByTier, - clusterState.getRoutingNodes() - ); - - // Verify - No results when no tiers present - assertThat(tierSpecificStats.size(), is(0)); - } - - public void testCalculateStatsTieredNodesOnly() { - // Nodes: 1 Data, 1 Hot, 1 Warm, 1 Cold, 1 Frozen - DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); - DiscoveryNode leader = newNode(0, DiscoveryNodeRole.MASTER_ROLE); - discoBuilder.add(leader); - discoBuilder.masterNodeId(leader.getId()); - - DiscoveryNode dataNode1 = newNode(1, DiscoveryNodeRole.DATA_ROLE); - discoBuilder.add(dataNode1); - DiscoveryNode hotNode1 = newNode(2, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); - discoBuilder.add(hotNode1); - DiscoveryNode warmNode1 = newNode(3, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); - discoBuilder.add(warmNode1); - DiscoveryNode coldNode1 = newNode(4, DiscoveryNodeRole.DATA_COLD_NODE_ROLE); - discoBuilder.add(coldNode1); - DiscoveryNode frozenNode1 = newNode(5, DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE); - discoBuilder.add(frozenNode1); - - discoBuilder.localNodeId(dataNode1.getId()); - - // Indices: 1 Regular index, not hosted on any tiers - Metadata.Builder metadataBuilder = Metadata.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - - IndexMetadata index1 = indexMetadata("index_1", 3, 1); - metadataBuilder.put(index1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index1.getIndex()); - routeTestShardToNodes(index1, 0, indexRoutingTableBuilder, dataNode1); - routeTestShardToNodes(index1, 1, indexRoutingTableBuilder, dataNode1); - routeTestShardToNodes(index1, 2, indexRoutingTableBuilder, dataNode1); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - // Cluster State and create stats responses - ClusterState clusterState = ClusterState.builder(new ClusterName("test")) - .nodes(discoBuilder) - .metadata(metadataBuilder) - .routingTable(routingTableBuilder.build()) - .build(); - - long byteSize = randomLongBetween(1024L, 1024L * 1024L * 1024L * 30L); // 1 KB to 30 GB - long docCount = randomLongBetween(100L, 100000000L); // one hundred to one hundred million - List nodeStatsList = buildNodeStats(clusterState, byteSize, docCount); - - // Calculate usage - Map indexByTier = DataTiersUsageTransportAction.tierIndices(clusterState.metadata().indices()); - Map tierSpecificStats = DataTiersUsageTransportAction.calculateStats( - nodeStatsList, - indexByTier, - clusterState.getRoutingNodes() - ); - - // Verify - Results are present but they lack index numbers because none are tiered - assertThat(tierSpecificStats.size(), is(4)); - - DataTiersFeatureSetUsage.TierSpecificStats hotStats = tierSpecificStats.get(DataTier.DATA_HOT); - assertThat(hotStats, is(notNullValue())); - assertThat(hotStats.nodeCount, is(1)); - assertThat(hotStats.indexCount, is(0)); - assertThat(hotStats.totalShardCount, is(0)); - assertThat(hotStats.docCount, is(0L)); - assertThat(hotStats.totalByteCount, is(0L)); - assertThat(hotStats.primaryShardCount, is(0)); - assertThat(hotStats.primaryByteCount, is(0L)); - assertThat(hotStats.primaryByteCountMedian, is(0L)); // All same size - assertThat(hotStats.primaryShardBytesMAD, is(0L)); // All same size - - DataTiersFeatureSetUsage.TierSpecificStats warmStats = tierSpecificStats.get(DataTier.DATA_WARM); - assertThat(warmStats, is(notNullValue())); - assertThat(warmStats.nodeCount, is(1)); - assertThat(warmStats.indexCount, is(0)); - assertThat(warmStats.totalShardCount, is(0)); - assertThat(warmStats.docCount, is(0L)); - assertThat(warmStats.totalByteCount, is(0L)); - assertThat(warmStats.primaryShardCount, is(0)); - assertThat(warmStats.primaryByteCount, is(0L)); - assertThat(warmStats.primaryByteCountMedian, is(0L)); // All same size - assertThat(warmStats.primaryShardBytesMAD, is(0L)); // All same size - - DataTiersFeatureSetUsage.TierSpecificStats coldStats = tierSpecificStats.get(DataTier.DATA_COLD); - assertThat(coldStats, is(notNullValue())); - assertThat(coldStats.nodeCount, is(1)); - assertThat(coldStats.indexCount, is(0)); - assertThat(coldStats.totalShardCount, is(0)); - assertThat(coldStats.docCount, is(0L)); - assertThat(coldStats.totalByteCount, is(0L)); - assertThat(coldStats.primaryShardCount, is(0)); - assertThat(coldStats.primaryByteCount, is(0L)); - assertThat(coldStats.primaryByteCountMedian, is(0L)); // All same size - assertThat(coldStats.primaryShardBytesMAD, is(0L)); // All same size - - DataTiersFeatureSetUsage.TierSpecificStats frozenStats = tierSpecificStats.get(DataTier.DATA_FROZEN); - assertThat(frozenStats, is(notNullValue())); - assertThat(frozenStats.nodeCount, is(1)); - assertThat(frozenStats.indexCount, is(0)); - assertThat(frozenStats.totalShardCount, is(0)); - assertThat(frozenStats.docCount, is(0L)); - assertThat(frozenStats.totalByteCount, is(0L)); - assertThat(frozenStats.primaryShardCount, is(0)); - assertThat(frozenStats.primaryByteCount, is(0L)); - assertThat(frozenStats.primaryByteCountMedian, is(0L)); // All same size - assertThat(frozenStats.primaryShardBytesMAD, is(0L)); // All same size - } - - public void testCalculateStatsTieredIndicesOnly() { - // Nodes: 3 Data, 0 Tiered - Only hosting indices on generic data nodes - int nodeId = 0; - DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); - DiscoveryNode leader = newNode(nodeId++, DiscoveryNodeRole.MASTER_ROLE); - discoBuilder.add(leader); - discoBuilder.masterNodeId(leader.getId()); - - DiscoveryNode dataNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_ROLE); - discoBuilder.add(dataNode1); - DiscoveryNode dataNode2 = newNode(nodeId++, DiscoveryNodeRole.DATA_ROLE); - discoBuilder.add(dataNode2); - DiscoveryNode dataNode3 = newNode(nodeId, DiscoveryNodeRole.DATA_ROLE); - discoBuilder.add(dataNode3); - - discoBuilder.localNodeId(dataNode1.getId()); - - // Indices: 1 Hot index, 2 Warm indices, 3 Cold indices - Metadata.Builder metadataBuilder = Metadata.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - - IndexMetadata hotIndex1 = indexMetadata("hot_index_1", 3, 1, DataTier.DATA_HOT); - metadataBuilder.put(hotIndex1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(hotIndex1.getIndex()); - routeTestShardToNodes(hotIndex1, 0, indexRoutingTableBuilder, dataNode1, dataNode2); - routeTestShardToNodes(hotIndex1, 1, indexRoutingTableBuilder, dataNode2, dataNode3); - routeTestShardToNodes(hotIndex1, 2, indexRoutingTableBuilder, dataNode3, dataNode1); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - IndexMetadata warmIndex1 = indexMetadata("warm_index_1", 1, 1, DataTier.DATA_WARM); - metadataBuilder.put(warmIndex1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(warmIndex1.getIndex()); - routeTestShardToNodes(warmIndex1, 0, indexRoutingTableBuilder, dataNode1, dataNode2); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - IndexMetadata warmIndex2 = indexMetadata("warm_index_2", 1, 1, DataTier.DATA_WARM); - metadataBuilder.put(warmIndex2, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(warmIndex2.getIndex()); - routeTestShardToNodes(warmIndex2, 0, indexRoutingTableBuilder, dataNode3, dataNode1); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - IndexMetadata coldIndex1 = indexMetadata("cold_index_1", 1, 0, DataTier.DATA_COLD); - metadataBuilder.put(coldIndex1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(coldIndex1.getIndex()); - routeTestShardToNodes(coldIndex1, 0, indexRoutingTableBuilder, dataNode1); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - IndexMetadata coldIndex2 = indexMetadata("cold_index_2", 1, 0, DataTier.DATA_COLD); - metadataBuilder.put(coldIndex2, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(coldIndex2.getIndex()); - routeTestShardToNodes(coldIndex2, 0, indexRoutingTableBuilder, dataNode2); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - IndexMetadata coldIndex3 = indexMetadata("cold_index_3", 1, 0, DataTier.DATA_COLD); - metadataBuilder.put(coldIndex3, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(coldIndex3.getIndex()); - routeTestShardToNodes(coldIndex3, 0, indexRoutingTableBuilder, dataNode3); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - // Cluster State and create stats responses - ClusterState clusterState = ClusterState.builder(new ClusterName("test")) - .nodes(discoBuilder) - .metadata(metadataBuilder) - .routingTable(routingTableBuilder.build()) - .build(); - - long byteSize = randomLongBetween(1024L, 1024L * 1024L * 1024L * 30L); // 1 KB to 30 GB - long docCount = randomLongBetween(100L, 100000000L); // one hundred to one hundred million - List nodeStatsList = buildNodeStats(clusterState, byteSize, docCount); - - // Calculate usage - Map indexByTier = DataTiersUsageTransportAction.tierIndices(clusterState.metadata().indices()); - Map tierSpecificStats = DataTiersUsageTransportAction.calculateStats( - nodeStatsList, - indexByTier, - clusterState.getRoutingNodes() - ); - - // Verify - Index stats exist for the tiers, but no tiered nodes are found - assertThat(tierSpecificStats.size(), is(3)); - - DataTiersFeatureSetUsage.TierSpecificStats hotStats = tierSpecificStats.get(DataTier.DATA_HOT); - assertThat(hotStats, is(notNullValue())); - assertThat(hotStats.nodeCount, is(0)); - assertThat(hotStats.indexCount, is(1)); - assertThat(hotStats.totalShardCount, is(6)); - assertThat(hotStats.docCount, is(6 * docCount)); - assertThat(hotStats.totalByteCount, is(6 * byteSize)); - assertThat(hotStats.primaryShardCount, is(3)); - assertThat(hotStats.primaryByteCount, is(3 * byteSize)); - assertThat(hotStats.primaryByteCountMedian, is(byteSize)); // All same size - assertThat(hotStats.primaryShardBytesMAD, is(0L)); // All same size - - DataTiersFeatureSetUsage.TierSpecificStats warmStats = tierSpecificStats.get(DataTier.DATA_WARM); - assertThat(warmStats, is(notNullValue())); - assertThat(warmStats.nodeCount, is(0)); - assertThat(warmStats.indexCount, is(2)); - assertThat(warmStats.totalShardCount, is(4)); - assertThat(warmStats.docCount, is(4 * docCount)); - assertThat(warmStats.totalByteCount, is(4 * byteSize)); - assertThat(warmStats.primaryShardCount, is(2)); - assertThat(warmStats.primaryByteCount, is(2 * byteSize)); - assertThat(warmStats.primaryByteCountMedian, is(byteSize)); // All same size - assertThat(warmStats.primaryShardBytesMAD, is(0L)); // All same size - - DataTiersFeatureSetUsage.TierSpecificStats coldStats = tierSpecificStats.get(DataTier.DATA_COLD); - assertThat(coldStats, is(notNullValue())); - assertThat(coldStats.nodeCount, is(0)); - assertThat(coldStats.indexCount, is(3)); - assertThat(coldStats.totalShardCount, is(3)); - assertThat(coldStats.docCount, is(3 * docCount)); - assertThat(coldStats.totalByteCount, is(3 * byteSize)); - assertThat(coldStats.primaryShardCount, is(3)); - assertThat(coldStats.primaryByteCount, is(3 * byteSize)); - assertThat(coldStats.primaryByteCountMedian, is(byteSize)); // All same size - assertThat(coldStats.primaryShardBytesMAD, is(0L)); // All same size - } - - public void testCalculateStatsReasonableCase() { - // Nodes: 3 Hot, 5 Warm, 1 Cold - int nodeId = 0; - DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); - DiscoveryNode leader = newNode(nodeId++, DiscoveryNodeRole.MASTER_ROLE); - discoBuilder.add(leader); - discoBuilder.masterNodeId(leader.getId()); - - DiscoveryNode hotNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); - discoBuilder.add(hotNode1); - DiscoveryNode hotNode2 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); - discoBuilder.add(hotNode2); - DiscoveryNode hotNode3 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); - discoBuilder.add(hotNode3); - DiscoveryNode warmNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); - discoBuilder.add(warmNode1); - DiscoveryNode warmNode2 = newNode(nodeId++, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); - discoBuilder.add(warmNode2); - DiscoveryNode warmNode3 = newNode(nodeId++, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); - discoBuilder.add(warmNode3); - DiscoveryNode warmNode4 = newNode(nodeId++, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); - discoBuilder.add(warmNode4); - DiscoveryNode warmNode5 = newNode(nodeId++, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); - discoBuilder.add(warmNode5); - DiscoveryNode coldNode1 = newNode(nodeId, DiscoveryNodeRole.DATA_COLD_NODE_ROLE); - discoBuilder.add(coldNode1); - - discoBuilder.localNodeId(hotNode1.getId()); - - // Indices: 1 Hot index, 2 Warm indices, 3 Cold indices - Metadata.Builder metadataBuilder = Metadata.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - - IndexMetadata hotIndex1 = indexMetadata("hot_index_1", 3, 1, DataTier.DATA_HOT); - metadataBuilder.put(hotIndex1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(hotIndex1.getIndex()); - routeTestShardToNodes(hotIndex1, 0, indexRoutingTableBuilder, hotNode1, hotNode2); - routeTestShardToNodes(hotIndex1, 1, indexRoutingTableBuilder, hotNode2, hotNode3); - routeTestShardToNodes(hotIndex1, 2, indexRoutingTableBuilder, hotNode3, hotNode1); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - IndexMetadata warmIndex1 = indexMetadata("warm_index_1", 1, 1, DataTier.DATA_WARM); - metadataBuilder.put(warmIndex1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(warmIndex1.getIndex()); - routeTestShardToNodes(warmIndex1, 0, indexRoutingTableBuilder, warmNode1, warmNode2); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - IndexMetadata warmIndex2 = indexMetadata("warm_index_2", 1, 1, DataTier.DATA_WARM); - metadataBuilder.put(warmIndex2, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(warmIndex2.getIndex()); - routeTestShardToNodes(warmIndex2, 0, indexRoutingTableBuilder, warmNode3, warmNode4); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - IndexMetadata coldIndex1 = indexMetadata("cold_index_1", 1, 0, DataTier.DATA_COLD); - metadataBuilder.put(coldIndex1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(coldIndex1.getIndex()); - routeTestShardToNodes(coldIndex1, 0, indexRoutingTableBuilder, coldNode1); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - IndexMetadata coldIndex2 = indexMetadata("cold_index_2", 1, 0, DataTier.DATA_COLD); - metadataBuilder.put(coldIndex2, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(coldIndex2.getIndex()); - routeTestShardToNodes(coldIndex2, 0, indexRoutingTableBuilder, coldNode1); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - IndexMetadata coldIndex3 = indexMetadata("cold_index_3", 1, 0, DataTier.DATA_COLD); - metadataBuilder.put(coldIndex3, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(coldIndex3.getIndex()); - routeTestShardToNodes(coldIndex3, 0, indexRoutingTableBuilder, coldNode1); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - // Cluster State and create stats responses - ClusterState clusterState = ClusterState.builder(new ClusterName("test")) - .nodes(discoBuilder) - .metadata(metadataBuilder) - .routingTable(routingTableBuilder.build()) - .build(); - - long byteSize = randomLongBetween(1024L, 1024L * 1024L * 1024L * 30L); // 1 KB to 30 GB - long docCount = randomLongBetween(100L, 100000000L); // one hundred to one hundred million - List nodeStatsList = buildNodeStats(clusterState, byteSize, docCount); - - // Calculate usage - Map indexByTier = DataTiersUsageTransportAction.tierIndices(clusterState.metadata().indices()); - Map tierSpecificStats = DataTiersUsageTransportAction.calculateStats( - nodeStatsList, - indexByTier, - clusterState.getRoutingNodes() - ); - - // Verify - Node and Index stats are both collected - assertThat(tierSpecificStats.size(), is(3)); - - DataTiersFeatureSetUsage.TierSpecificStats hotStats = tierSpecificStats.get(DataTier.DATA_HOT); - assertThat(hotStats, is(notNullValue())); - assertThat(hotStats.nodeCount, is(3)); - assertThat(hotStats.indexCount, is(1)); - assertThat(hotStats.totalShardCount, is(6)); - assertThat(hotStats.docCount, is(6 * docCount)); - assertThat(hotStats.totalByteCount, is(6 * byteSize)); - assertThat(hotStats.primaryShardCount, is(3)); - assertThat(hotStats.primaryByteCount, is(3 * byteSize)); - assertThat(hotStats.primaryByteCountMedian, is(byteSize)); // All same size - assertThat(hotStats.primaryShardBytesMAD, is(0L)); // All same size - - DataTiersFeatureSetUsage.TierSpecificStats warmStats = tierSpecificStats.get(DataTier.DATA_WARM); - assertThat(warmStats, is(notNullValue())); - assertThat(warmStats.nodeCount, is(5)); - assertThat(warmStats.indexCount, is(2)); - assertThat(warmStats.totalShardCount, is(4)); - assertThat(warmStats.docCount, is(4 * docCount)); - assertThat(warmStats.totalByteCount, is(4 * byteSize)); - assertThat(warmStats.primaryShardCount, is(2)); - assertThat(warmStats.primaryByteCount, is(2 * byteSize)); - assertThat(warmStats.primaryByteCountMedian, is(byteSize)); // All same size - assertThat(warmStats.primaryShardBytesMAD, is(0L)); // All same size - - DataTiersFeatureSetUsage.TierSpecificStats coldStats = tierSpecificStats.get(DataTier.DATA_COLD); - assertThat(coldStats, is(notNullValue())); - assertThat(coldStats.nodeCount, is(1)); - assertThat(coldStats.indexCount, is(3)); - assertThat(coldStats.totalShardCount, is(3)); - assertThat(coldStats.docCount, is(3 * docCount)); - assertThat(coldStats.totalByteCount, is(3 * byteSize)); - assertThat(coldStats.primaryShardCount, is(3)); - assertThat(coldStats.primaryByteCount, is(3 * byteSize)); - assertThat(coldStats.primaryByteCountMedian, is(byteSize)); // All same size - assertThat(coldStats.primaryShardBytesMAD, is(0L)); // All same size - } - - public void testCalculateStatsMixedTiers() { - // Nodes: 3 Hot+Warm - Nodes that are marked as part of multiple tiers - int nodeId = 0; - DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); - DiscoveryNode leader = newNode(nodeId++, DiscoveryNodeRole.MASTER_ROLE); - discoBuilder.add(leader); - discoBuilder.masterNodeId(leader.getId()); - - DiscoveryNode mixedNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); - discoBuilder.add(mixedNode1); - DiscoveryNode mixedNode2 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); - discoBuilder.add(mixedNode2); - DiscoveryNode mixedNode3 = newNode(nodeId, DiscoveryNodeRole.DATA_HOT_NODE_ROLE, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); - discoBuilder.add(mixedNode3); - - discoBuilder.localNodeId(mixedNode1.getId()); - - // Indices: 1 Hot index, 2 Warm indices - Metadata.Builder metadataBuilder = Metadata.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - - IndexMetadata hotIndex1 = indexMetadata("hot_index_1", 3, 1, DataTier.DATA_HOT); - metadataBuilder.put(hotIndex1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(hotIndex1.getIndex()); - routeTestShardToNodes(hotIndex1, 0, indexRoutingTableBuilder, mixedNode1, mixedNode2); - routeTestShardToNodes(hotIndex1, 1, indexRoutingTableBuilder, mixedNode3, mixedNode1); - routeTestShardToNodes(hotIndex1, 2, indexRoutingTableBuilder, mixedNode2, mixedNode3); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - IndexMetadata warmIndex1 = indexMetadata("warm_index_1", 1, 1, DataTier.DATA_WARM); - metadataBuilder.put(warmIndex1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(warmIndex1.getIndex()); - routeTestShardToNodes(warmIndex1, 0, indexRoutingTableBuilder, mixedNode1, mixedNode2); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - IndexMetadata warmIndex2 = indexMetadata("warm_index_2", 1, 1, DataTier.DATA_WARM); - metadataBuilder.put(warmIndex2, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(warmIndex2.getIndex()); - routeTestShardToNodes(warmIndex2, 0, indexRoutingTableBuilder, mixedNode3, mixedNode1); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - // Cluster State and create stats responses - ClusterState clusterState = ClusterState.builder(new ClusterName("test")) - .nodes(discoBuilder) - .metadata(metadataBuilder) - .routingTable(routingTableBuilder.build()) - .build(); - - long byteSize = randomLongBetween(1024L, 1024L * 1024L * 1024L * 30L); // 1 KB to 30 GB - long docCount = randomLongBetween(100L, 100000000L); // one hundred to one hundred million - List nodeStatsList = buildNodeStats(clusterState, byteSize, docCount); - - // Calculate usage - Map indexByTier = DataTiersUsageTransportAction.tierIndices(clusterState.metadata().indices()); - Map tierSpecificStats = DataTiersUsageTransportAction.calculateStats( - nodeStatsList, - indexByTier, - clusterState.getRoutingNodes() - ); - - // Verify - Index stats are separated by their preferred tier, instead of counted - // toward multiple tiers based on their current routing. Nodes are counted for each tier they are in. - assertThat(tierSpecificStats.size(), is(2)); - - DataTiersFeatureSetUsage.TierSpecificStats hotStats = tierSpecificStats.get(DataTier.DATA_HOT); - assertThat(hotStats, is(notNullValue())); - assertThat(hotStats.nodeCount, is(3)); - assertThat(hotStats.indexCount, is(1)); - assertThat(hotStats.totalShardCount, is(6)); - assertThat(hotStats.docCount, is(6 * docCount)); - assertThat(hotStats.totalByteCount, is(6 * byteSize)); - assertThat(hotStats.primaryShardCount, is(3)); - assertThat(hotStats.primaryByteCount, is(3 * byteSize)); - assertThat(hotStats.primaryByteCountMedian, is(byteSize)); // All same size - assertThat(hotStats.primaryShardBytesMAD, is(0L)); // All same size - - DataTiersFeatureSetUsage.TierSpecificStats warmStats = tierSpecificStats.get(DataTier.DATA_WARM); - assertThat(warmStats, is(notNullValue())); - assertThat(warmStats.nodeCount, is(3)); - assertThat(warmStats.indexCount, is(2)); - assertThat(warmStats.totalShardCount, is(4)); - assertThat(warmStats.docCount, is(4 * docCount)); - assertThat(warmStats.totalByteCount, is(4 * byteSize)); - assertThat(warmStats.primaryShardCount, is(2)); - assertThat(warmStats.primaryByteCount, is(2 * byteSize)); - assertThat(warmStats.primaryByteCountMedian, is(byteSize)); // All same size - assertThat(warmStats.primaryShardBytesMAD, is(0L)); // All same size - } - - public void testCalculateStatsStuckInWrongTier() { - // Nodes: 3 Hot, 0 Warm - Emulating indices stuck on non-preferred tiers - int nodeId = 0; - DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); - DiscoveryNode leader = newNode(nodeId++, DiscoveryNodeRole.MASTER_ROLE); - discoBuilder.add(leader); - discoBuilder.masterNodeId(leader.getId()); - - DiscoveryNode hotNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); - discoBuilder.add(hotNode1); - DiscoveryNode hotNode2 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); - discoBuilder.add(hotNode2); - DiscoveryNode hotNode3 = newNode(nodeId, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); - discoBuilder.add(hotNode3); - - discoBuilder.localNodeId(hotNode1.getId()); - - // Indices: 1 Hot index, 1 Warm index (Warm index is allocated to less preferred hot node because warm nodes are missing) - Metadata.Builder metadataBuilder = Metadata.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - - IndexMetadata hotIndex1 = indexMetadata("hot_index_1", 3, 1, DataTier.DATA_HOT); - metadataBuilder.put(hotIndex1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(hotIndex1.getIndex()); - routeTestShardToNodes(hotIndex1, 0, indexRoutingTableBuilder, hotNode1, hotNode2); - routeTestShardToNodes(hotIndex1, 1, indexRoutingTableBuilder, hotNode3, hotNode1); - routeTestShardToNodes(hotIndex1, 2, indexRoutingTableBuilder, hotNode2, hotNode3); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - IndexMetadata warmIndex1 = indexMetadata("warm_index_1", 1, 1, DataTier.DATA_WARM, DataTier.DATA_HOT); - metadataBuilder.put(warmIndex1, false).generateClusterUuidIfNeeded(); - { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(warmIndex1.getIndex()); - routeTestShardToNodes(warmIndex1, 0, indexRoutingTableBuilder, hotNode1, hotNode2); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - } - - // Cluster State and create stats responses - ClusterState clusterState = ClusterState.builder(new ClusterName("test")) - .nodes(discoBuilder) - .metadata(metadataBuilder) - .routingTable(routingTableBuilder.build()) - .build(); - - long byteSize = randomLongBetween(1024L, 1024L * 1024L * 1024L * 30L); // 1 KB to 30 GB - long docCount = randomLongBetween(100L, 100000000L); // one hundred to one hundred million - List nodeStatsList = buildNodeStats(clusterState, byteSize, docCount); - - // Calculate usage - Map indexByTier = DataTiersUsageTransportAction.tierIndices(clusterState.metadata().indices()); - Map tierSpecificStats = DataTiersUsageTransportAction.calculateStats( - nodeStatsList, - indexByTier, - clusterState.getRoutingNodes() - ); - - // Verify - Warm indices are still calculated separately from Hot ones, despite Warm nodes missing - assertThat(tierSpecificStats.size(), is(2)); - - DataTiersFeatureSetUsage.TierSpecificStats hotStats = tierSpecificStats.get(DataTier.DATA_HOT); - assertThat(hotStats, is(notNullValue())); - assertThat(hotStats.nodeCount, is(3)); - assertThat(hotStats.indexCount, is(1)); - assertThat(hotStats.totalShardCount, is(6)); - assertThat(hotStats.docCount, is(6 * docCount)); - assertThat(hotStats.totalByteCount, is(6 * byteSize)); - assertThat(hotStats.primaryShardCount, is(3)); - assertThat(hotStats.primaryByteCount, is(3 * byteSize)); - assertThat(hotStats.primaryByteCountMedian, is(byteSize)); // All same size - assertThat(hotStats.primaryShardBytesMAD, is(0L)); // All same size - - DataTiersFeatureSetUsage.TierSpecificStats warmStats = tierSpecificStats.get(DataTier.DATA_WARM); - assertThat(warmStats, is(notNullValue())); - assertThat(warmStats.nodeCount, is(0)); - assertThat(warmStats.indexCount, is(1)); - assertThat(warmStats.totalShardCount, is(2)); - assertThat(warmStats.docCount, is(2 * docCount)); - assertThat(warmStats.totalByteCount, is(2 * byteSize)); - assertThat(warmStats.primaryShardCount, is(1)); - assertThat(warmStats.primaryByteCount, is(byteSize)); - assertThat(warmStats.primaryByteCountMedian, is(byteSize)); // All same size - assertThat(warmStats.primaryShardBytesMAD, is(0L)); // All same size - } - - private static DiscoveryNode newNode(int nodeId, DiscoveryNodeRole... roles) { - return DiscoveryNodeUtils.builder("node_" + nodeId).roles(Set.of(roles)).build(); - } - - private static IndexMetadata indexMetadata(String indexName, int numberOfShards, int numberOfReplicas, String... dataTierPrefs) { - Settings.Builder settingsBuilder = indexSettings(IndexVersion.current(), numberOfShards, numberOfReplicas).put( - SETTING_CREATION_DATE, - System.currentTimeMillis() - ); - - if (dataTierPrefs.length > 1) { - StringBuilder tierBuilder = new StringBuilder(dataTierPrefs[0]); - for (int idx = 1; idx < dataTierPrefs.length; idx++) { - tierBuilder.append(',').append(dataTierPrefs[idx]); - } - settingsBuilder.put(DataTier.TIER_PREFERENCE, tierBuilder.toString()); - } else if (dataTierPrefs.length == 1) { - settingsBuilder.put(DataTier.TIER_PREFERENCE, dataTierPrefs[0]); - } - - return IndexMetadata.builder(indexName).settings(settingsBuilder.build()).timestampRange(IndexLongFieldRange.UNKNOWN).build(); - } - - private static void routeTestShardToNodes( - IndexMetadata index, - int shard, - IndexRoutingTable.Builder indexRoutingTableBuilder, - DiscoveryNode... nodes - ) { - ShardId shardId = new ShardId(index.getIndex(), shard); - IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); - boolean primary = true; - for (DiscoveryNode node : nodes) { - indexShardRoutingBuilder.addShard( - TestShardRouting.newShardRouting(shardId, node.getId(), null, primary, ShardRoutingState.STARTED) - ); - primary = false; - } - indexRoutingTableBuilder.addIndexShard(indexShardRoutingBuilder); - } - - private List buildNodeStats(ClusterState clusterState, long bytesPerShard, long docsPerShard) { - DiscoveryNodes nodes = clusterState.getNodes(); - RoutingNodes routingNodes = clusterState.getRoutingNodes(); - List nodeStatsList = new ArrayList<>(); - for (DiscoveryNode node : nodes) { - RoutingNode routingNode = routingNodes.node(node.getId()); - if (routingNode == null) { - continue; - } - Map> indexStats = new HashMap<>(); - for (ShardRouting shardRouting : routingNode) { - ShardId shardId = shardRouting.shardId(); - ShardStats shardStat = shardStat(bytesPerShard, docsPerShard, shardRouting); - IndexShardStats shardStats = new IndexShardStats(shardId, new ShardStats[] { shardStat }); - indexStats.computeIfAbsent(shardId.getIndex(), k -> new ArrayList<>()).add(shardStats); - } - NodeIndicesStats nodeIndexStats = new NodeIndicesStats(new CommonStats(), Collections.emptyMap(), indexStats, true); - nodeStatsList.add(mockNodeStats(node, nodeIndexStats)); - } - return nodeStatsList; - } - - private static ShardStats shardStat(long byteCount, long docCount, ShardRouting routing) { - StoreStats storeStats = new StoreStats(randomNonNegativeLong(), byteCount, 0L); - DocsStats docsStats = new DocsStats(docCount, 0L, byteCount); - - CommonStats commonStats = new CommonStats(CommonStatsFlags.ALL); - commonStats.getStore().add(storeStats); - commonStats.getDocs().add(docsStats); - - Path fakePath = PathUtils.get("test/dir/" + routing.shardId().getIndex().getUUID() + "/" + routing.shardId().id()); - ShardPath fakeShardPath = new ShardPath(false, fakePath, fakePath, routing.shardId()); - - return new ShardStats(routing, fakeShardPath, commonStats, null, null, null, false, 0); - } - - private static NodeStats mockNodeStats(DiscoveryNode node, NodeIndicesStats indexStats) { - NodeStats stats = mock(NodeStats.class); - when(stats.getNode()).thenReturn(node); - when(stats.getIndices()).thenReturn(indexStats); - return stats; - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/DataTierUsageFixtures.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/DataTierUsageFixtures.java new file mode 100644 index 0000000000000..63cc6e4d7914e --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/DataTierUsageFixtures.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.datatiers; + +import org.elasticsearch.action.admin.indices.stats.CommonStats; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.action.admin.indices.stats.IndexShardStats; +import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.allocation.DataTier; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.shard.DocsStats; +import org.elasticsearch.index.shard.IndexLongFieldRange; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardPath; +import org.elasticsearch.index.store.StoreStats; +import org.elasticsearch.indices.NodeIndicesStats; +import org.elasticsearch.test.ESTestCase; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_CREATION_DATE; + +class DataTierUsageFixtures extends ESTestCase { + + private static final CommonStats COMMON_STATS = new CommonStats( + CommonStatsFlags.NONE.set(CommonStatsFlags.Flag.Docs, true).set(CommonStatsFlags.Flag.Store, true) + ); + + static DiscoveryNode newNode(int nodeId, DiscoveryNodeRole... roles) { + return DiscoveryNodeUtils.builder("node_" + nodeId).roles(Set.of(roles)).build(); + } + + static void routeTestShardToNodes( + IndexMetadata index, + int shard, + IndexRoutingTable.Builder indexRoutingTableBuilder, + DiscoveryNode... nodes + ) { + ShardId shardId = new ShardId(index.getIndex(), shard); + IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); + boolean primary = true; + for (DiscoveryNode node : nodes) { + indexShardRoutingBuilder.addShard( + TestShardRouting.newShardRouting(shardId, node.getId(), null, primary, ShardRoutingState.STARTED) + ); + primary = false; + } + indexRoutingTableBuilder.addIndexShard(indexShardRoutingBuilder); + } + + static NodeIndicesStats buildNodeIndicesStats(RoutingNode routingNode, long bytesPerShard, long docsPerShard) { + Map> indexStats = new HashMap<>(); + for (ShardRouting shardRouting : routingNode) { + ShardId shardId = shardRouting.shardId(); + ShardStats shardStat = shardStat(bytesPerShard, docsPerShard, shardRouting); + IndexShardStats shardStats = new IndexShardStats(shardId, new ShardStats[] { shardStat }); + indexStats.computeIfAbsent(shardId.getIndex(), k -> new ArrayList<>()).add(shardStats); + } + return new NodeIndicesStats(COMMON_STATS, Map.of(), indexStats, true); + } + + private static ShardStats shardStat(long byteCount, long docCount, ShardRouting routing) { + StoreStats storeStats = new StoreStats(randomNonNegativeLong(), byteCount, 0L); + DocsStats docsStats = new DocsStats(docCount, 0L, byteCount); + Path fakePath = PathUtils.get("test/dir/" + routing.shardId().getIndex().getUUID() + "/" + routing.shardId().id()); + ShardPath fakeShardPath = new ShardPath(false, fakePath, fakePath, routing.shardId()); + CommonStats commonStats = new CommonStats(CommonStatsFlags.ALL); + commonStats.getStore().add(storeStats); + commonStats.getDocs().add(docsStats); + return new ShardStats(routing, fakeShardPath, commonStats, null, null, null, false, 0); + } + + static IndexMetadata indexMetadata(String indexName, int numberOfShards, int numberOfReplicas, String... dataTierPrefs) { + Settings.Builder settingsBuilder = indexSettings(IndexVersion.current(), numberOfShards, numberOfReplicas).put( + SETTING_CREATION_DATE, + System.currentTimeMillis() + ); + + if (dataTierPrefs.length > 1) { + StringBuilder tierBuilder = new StringBuilder(dataTierPrefs[0]); + for (int idx = 1; idx < dataTierPrefs.length; idx++) { + tierBuilder.append(',').append(dataTierPrefs[idx]); + } + settingsBuilder.put(DataTier.TIER_PREFERENCE, tierBuilder.toString()); + } else if (dataTierPrefs.length == 1) { + settingsBuilder.put(DataTier.TIER_PREFERENCE, dataTierPrefs[0]); + } + + return IndexMetadata.builder(indexName).settings(settingsBuilder.build()).timestampRange(IndexLongFieldRange.UNKNOWN).build(); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/DataTiersFeatureSetUsageTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/DataTiersFeatureSetUsageTests.java similarity index 97% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/DataTiersFeatureSetUsageTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/DataTiersFeatureSetUsageTests.java index e5f37dfb5764c..0951408441b3f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/DataTiersFeatureSetUsageTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/DataTiersFeatureSetUsageTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.core; +package org.elasticsearch.xpack.core.datatiers; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.io.stream.Writeable; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/DataTiersUsageTransportActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/DataTiersUsageTransportActionTests.java new file mode 100644 index 0000000000000..bb8dce7db0e23 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/DataTiersUsageTransportActionTests.java @@ -0,0 +1,535 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.datatiers; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.DataTier; +import org.elasticsearch.search.aggregations.metrics.TDigestState; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.IntStream; + +import static org.elasticsearch.xpack.core.datatiers.DataTierUsageFixtures.indexMetadata; +import static org.elasticsearch.xpack.core.datatiers.DataTierUsageFixtures.newNode; +import static org.elasticsearch.xpack.core.datatiers.DataTierUsageFixtures.routeTestShardToNodes; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class DataTiersUsageTransportActionTests extends ESTestCase { + + private long byteSize; + private long docCount; + + @Before + public void setup() { + byteSize = randomLongBetween(1024L, 1024L * 1024L * 1024L * 30L); // 1 KB to 30 GB + docCount = randomLongBetween(100L, 100000000L); // one hundred to one hundred million + } + + public void testTierIndices() { + DiscoveryNode dataNode = newNode(0, DiscoveryNodeRole.DATA_ROLE); + DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + discoBuilder.add(dataNode); + + IndexMetadata hotIndex1 = indexMetadata("hot-1", 1, 0, DataTier.DATA_HOT); + IndexMetadata hotIndex2 = indexMetadata("hot-2", 1, 0, DataTier.DATA_HOT); + IndexMetadata warmIndex1 = indexMetadata("warm-1", 1, 0, DataTier.DATA_WARM); + IndexMetadata coldIndex1 = indexMetadata("cold-1", 1, 0, DataTier.DATA_COLD); + IndexMetadata coldIndex2 = indexMetadata("cold-2", 1, 0, DataTier.DATA_COLD, DataTier.DATA_WARM); // Prefers cold over warm + IndexMetadata nonTiered = indexMetadata("non-tier", 1, 0); // No tier + IndexMetadata hotIndex3 = indexMetadata("hot-3", 1, 0, DataTier.DATA_HOT); + + Metadata.Builder metadataBuilder = Metadata.builder() + .put(hotIndex1, false) + .put(hotIndex2, false) + .put(warmIndex1, false) + .put(coldIndex1, false) + .put(coldIndex2, false) + .put(nonTiered, false) + .put(hotIndex3, false) + .generateClusterUuidIfNeeded(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + routingTableBuilder.add(getIndexRoutingTable(hotIndex1, dataNode)); + routingTableBuilder.add(getIndexRoutingTable(hotIndex2, dataNode)); + routingTableBuilder.add(getIndexRoutingTable(hotIndex2, dataNode)); + routingTableBuilder.add(getIndexRoutingTable(warmIndex1, dataNode)); + routingTableBuilder.add(getIndexRoutingTable(coldIndex1, dataNode)); + routingTableBuilder.add(getIndexRoutingTable(coldIndex2, dataNode)); + routingTableBuilder.add(getIndexRoutingTable(nonTiered, dataNode)); + ClusterState clusterState = ClusterState.builder(new ClusterName("test")) + .nodes(discoBuilder) + .metadata(metadataBuilder) + .routingTable(routingTableBuilder.build()) + .build(); + Map> result = DataTiersUsageTransportAction.getIndicesGroupedByTier( + clusterState, + List.of(new NodeDataTiersUsage(dataNode, Map.of(DataTier.DATA_WARM, createStats(5, 5, 0, 10)))) + ); + assertThat(result.keySet(), equalTo(Set.of(DataTier.DATA_HOT, DataTier.DATA_WARM, DataTier.DATA_COLD))); + assertThat(result.get(DataTier.DATA_HOT), equalTo(Set.of(hotIndex1.getIndex().getName(), hotIndex2.getIndex().getName()))); + assertThat(result.get(DataTier.DATA_WARM), equalTo(Set.of(warmIndex1.getIndex().getName()))); + assertThat(result.get(DataTier.DATA_COLD), equalTo(Set.of(coldIndex1.getIndex().getName(), coldIndex2.getIndex().getName()))); + } + + public void testCalculateMAD() { + assertThat(DataTiersUsageTransportAction.computeMedianAbsoluteDeviation(TDigestState.create(10)), equalTo(0L)); + + TDigestState sketch = TDigestState.create(randomDoubleBetween(1, 1000, false)); + sketch.add(1); + sketch.add(1); + sketch.add(2); + sketch.add(2); + sketch.add(4); + sketch.add(6); + sketch.add(9); + assertThat(DataTiersUsageTransportAction.computeMedianAbsoluteDeviation(sketch), equalTo(1L)); + } + + public void testCalculateStatsNoTiers() { + // Nodes: 0 Tiered Nodes, 1 Data Node, no indices on tiered nodes + DiscoveryNode leader = newNode(0, DiscoveryNodeRole.MASTER_ROLE); + DiscoveryNode dataNode1 = newNode(1, DiscoveryNodeRole.DATA_ROLE); + + List nodeDataTiersUsages = List.of( + new NodeDataTiersUsage(leader, Map.of()), + new NodeDataTiersUsage(dataNode1, Map.of()) + ); + Map tierSpecificStats = DataTiersUsageTransportAction.aggregateStats( + nodeDataTiersUsages, + Map.of() + ); + + // Verify - No results when no tiers present + assertThat(tierSpecificStats.size(), is(0)); + } + + public void testCalculateStatsTieredNodesOnly() { + // Nodes: 1 Data, 1 Hot, 1 Warm, 1 Cold, 1 Frozen + DiscoveryNode leader = newNode(0, DiscoveryNodeRole.MASTER_ROLE); + DiscoveryNode dataNode1 = newNode(1, DiscoveryNodeRole.DATA_ROLE); + DiscoveryNode hotNode1 = newNode(2, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); + DiscoveryNode warmNode1 = newNode(3, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); + DiscoveryNode coldNode1 = newNode(4, DiscoveryNodeRole.DATA_COLD_NODE_ROLE); + DiscoveryNode frozenNode1 = newNode(5, DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE); + + List nodeDataTiersUsages = List.of( + new NodeDataTiersUsage(leader, Map.of()), + new NodeDataTiersUsage(dataNode1, Map.of()), + new NodeDataTiersUsage(hotNode1, Map.of()), + new NodeDataTiersUsage(warmNode1, Map.of()), + new NodeDataTiersUsage(coldNode1, Map.of()), + new NodeDataTiersUsage(frozenNode1, Map.of()) + ); + + Map tierSpecificStats = DataTiersUsageTransportAction.aggregateStats( + nodeDataTiersUsages, + Map.of() + ); + + // Verify - Results are present, but they lack index numbers because none are tiered + assertThat(tierSpecificStats.size(), is(4)); + + DataTiersFeatureSetUsage.TierSpecificStats hotStats = tierSpecificStats.get(DataTier.DATA_HOT); + assertThat(hotStats, is(notNullValue())); + assertThat(hotStats.nodeCount, is(1)); + assertThat(hotStats.indexCount, is(0)); + assertThat(hotStats.totalShardCount, is(0)); + assertThat(hotStats.docCount, is(0L)); + assertThat(hotStats.totalByteCount, is(0L)); + assertThat(hotStats.primaryShardCount, is(0)); + assertThat(hotStats.primaryByteCount, is(0L)); + assertThat(hotStats.primaryByteCountMedian, is(0L)); // All same size + assertThat(hotStats.primaryShardBytesMAD, is(0L)); // All same size + + DataTiersFeatureSetUsage.TierSpecificStats warmStats = tierSpecificStats.get(DataTier.DATA_WARM); + assertThat(warmStats, is(notNullValue())); + assertThat(warmStats.nodeCount, is(1)); + assertThat(warmStats.indexCount, is(0)); + assertThat(warmStats.totalShardCount, is(0)); + assertThat(warmStats.docCount, is(0L)); + assertThat(warmStats.totalByteCount, is(0L)); + assertThat(warmStats.primaryShardCount, is(0)); + assertThat(warmStats.primaryByteCount, is(0L)); + assertThat(warmStats.primaryByteCountMedian, is(0L)); // All same size + assertThat(warmStats.primaryShardBytesMAD, is(0L)); // All same size + + DataTiersFeatureSetUsage.TierSpecificStats coldStats = tierSpecificStats.get(DataTier.DATA_COLD); + assertThat(coldStats, is(notNullValue())); + assertThat(coldStats.nodeCount, is(1)); + assertThat(coldStats.indexCount, is(0)); + assertThat(coldStats.totalShardCount, is(0)); + assertThat(coldStats.docCount, is(0L)); + assertThat(coldStats.totalByteCount, is(0L)); + assertThat(coldStats.primaryShardCount, is(0)); + assertThat(coldStats.primaryByteCount, is(0L)); + assertThat(coldStats.primaryByteCountMedian, is(0L)); // All same size + assertThat(coldStats.primaryShardBytesMAD, is(0L)); // All same size + + DataTiersFeatureSetUsage.TierSpecificStats frozenStats = tierSpecificStats.get(DataTier.DATA_FROZEN); + assertThat(frozenStats, is(notNullValue())); + assertThat(frozenStats.nodeCount, is(1)); + assertThat(frozenStats.indexCount, is(0)); + assertThat(frozenStats.totalShardCount, is(0)); + assertThat(frozenStats.docCount, is(0L)); + assertThat(frozenStats.totalByteCount, is(0L)); + assertThat(frozenStats.primaryShardCount, is(0)); + assertThat(frozenStats.primaryByteCount, is(0L)); + assertThat(frozenStats.primaryByteCountMedian, is(0L)); // All same size + assertThat(frozenStats.primaryShardBytesMAD, is(0L)); // All same size + } + + public void testCalculateStatsTieredIndicesOnly() { + // Nodes: 3 Data, 0 Tiered - Only hosting indices on generic data nodes + int nodeId = 0; + DiscoveryNode leader = newNode(nodeId++, DiscoveryNodeRole.MASTER_ROLE); + DiscoveryNode dataNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_ROLE); + DiscoveryNode dataNode2 = newNode(nodeId++, DiscoveryNodeRole.DATA_ROLE); + DiscoveryNode dataNode3 = newNode(nodeId, DiscoveryNodeRole.DATA_ROLE); + + // Indices: + // 1 Hot index: 3 primaries, 3 replicas one on each node + // 2 Warm indices, each index 1 primary one replica + // 3 Cold indices, each index 1 primary on a different node + String hotIndex = "hot_index_1"; + String warmIndex1 = "warm_index_1"; + String warmIndex2 = "warm_index_2"; + String coldIndex1 = "cold_index_1"; + String coldIndex2 = "cold_index_2"; + String coldIndex3 = "cold_index_3"; + + List nodeDataTiersUsages = List.of( + new NodeDataTiersUsage(leader, Map.of()), + new NodeDataTiersUsage( + dataNode1, + Map.of( + DataTier.DATA_HOT, + createStats(1, 2, docCount, byteSize), + DataTier.DATA_WARM, + createStats(0, 2, docCount, byteSize), + DataTier.DATA_COLD, + createStats(1, 1, docCount, byteSize) + ) + ), + new NodeDataTiersUsage( + dataNode2, + Map.of( + DataTier.DATA_HOT, + createStats(1, 2, docCount, byteSize), + DataTier.DATA_WARM, + createStats(1, 1, docCount, byteSize), + DataTier.DATA_COLD, + createStats(1, 1, docCount, byteSize) + ) + ), + new NodeDataTiersUsage( + dataNode3, + Map.of( + DataTier.DATA_HOT, + createStats(1, 2, docCount, byteSize), + DataTier.DATA_WARM, + createStats(1, 1, docCount, byteSize), + DataTier.DATA_COLD, + createStats(1, 1, docCount, byteSize) + ) + ) + ); + // Calculate usage + Map tierSpecificStats = DataTiersUsageTransportAction.aggregateStats( + nodeDataTiersUsages, + Map.of( + DataTier.DATA_HOT, + Set.of(hotIndex), + DataTier.DATA_WARM, + Set.of(warmIndex1, warmIndex2), + DataTier.DATA_COLD, + Set.of(coldIndex1, coldIndex2, coldIndex3) + ) + ); + + // Verify - Index stats exist for the tiers, but no tiered nodes are found + assertThat(tierSpecificStats.size(), is(3)); + + DataTiersFeatureSetUsage.TierSpecificStats hotStats = tierSpecificStats.get(DataTier.DATA_HOT); + assertThat(hotStats, is(notNullValue())); + assertThat(hotStats.nodeCount, is(0)); + assertThat(hotStats.indexCount, is(1)); + assertThat(hotStats.totalShardCount, is(6)); + assertThat(hotStats.docCount, is(6 * docCount)); + assertThat(hotStats.totalByteCount, is(6 * byteSize)); + assertThat(hotStats.primaryShardCount, is(3)); + assertThat(hotStats.primaryByteCount, is(3 * byteSize)); + assertThat(hotStats.primaryByteCountMedian, is(byteSize)); // All same size + assertThat(hotStats.primaryShardBytesMAD, is(0L)); // All same size + + DataTiersFeatureSetUsage.TierSpecificStats warmStats = tierSpecificStats.get(DataTier.DATA_WARM); + assertThat(warmStats, is(notNullValue())); + assertThat(warmStats.nodeCount, is(0)); + assertThat(warmStats.indexCount, is(2)); + assertThat(warmStats.totalShardCount, is(4)); + assertThat(warmStats.docCount, is(4 * docCount)); + assertThat(warmStats.totalByteCount, is(4 * byteSize)); + assertThat(warmStats.primaryShardCount, is(2)); + assertThat(warmStats.primaryByteCount, is(2 * byteSize)); + assertThat(warmStats.primaryByteCountMedian, is(byteSize)); // All same size + assertThat(warmStats.primaryShardBytesMAD, is(0L)); // All same size + + DataTiersFeatureSetUsage.TierSpecificStats coldStats = tierSpecificStats.get(DataTier.DATA_COLD); + assertThat(coldStats, is(notNullValue())); + assertThat(coldStats.nodeCount, is(0)); + assertThat(coldStats.indexCount, is(3)); + assertThat(coldStats.totalShardCount, is(3)); + assertThat(coldStats.docCount, is(3 * docCount)); + assertThat(coldStats.totalByteCount, is(3 * byteSize)); + assertThat(coldStats.primaryShardCount, is(3)); + assertThat(coldStats.primaryByteCount, is(3 * byteSize)); + assertThat(coldStats.primaryByteCountMedian, is(byteSize)); // All same size + assertThat(coldStats.primaryShardBytesMAD, is(0L)); // All same size + } + + public void testCalculateStatsReasonableCase() { + // Nodes: 3 Hot, 5 Warm, 1 Cold + int nodeId = 0; + DiscoveryNode leader = newNode(nodeId++, DiscoveryNodeRole.MASTER_ROLE); + DiscoveryNode hotNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); + DiscoveryNode hotNode2 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); + DiscoveryNode hotNode3 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); + DiscoveryNode warmNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); + DiscoveryNode warmNode2 = newNode(nodeId++, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); + DiscoveryNode warmNode3 = newNode(nodeId++, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); + DiscoveryNode warmNode4 = newNode(nodeId++, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); + DiscoveryNode warmNode5 = newNode(nodeId++, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); + DiscoveryNode coldNode1 = newNode(nodeId, DiscoveryNodeRole.DATA_COLD_NODE_ROLE); + + // Indices: + // 1 Hot index: 3 primaries, 3 replicas one on each node + // 2 Warm indices: each index has 1 primary and 1 replica residing in 4 nodes + // 3 Cold indices: 1 primary each on the cold node + String hotIndex1 = "hot_index_1"; + String warmIndex1 = "warm_index_1"; + String warmIndex2 = "warm_index_2"; + String coldIndex1 = "cold_index_1"; + String coldIndex2 = "cold_index_2"; + String coldIndex3 = "cold_index_3"; + + List nodeDataTiersUsages = List.of( + new NodeDataTiersUsage(leader, Map.of()), + new NodeDataTiersUsage(hotNode1, Map.of(DataTier.DATA_HOT, createStats(1, 2, docCount, byteSize))), + new NodeDataTiersUsage(hotNode2, Map.of(DataTier.DATA_HOT, createStats(1, 2, docCount, byteSize))), + new NodeDataTiersUsage(hotNode3, Map.of(DataTier.DATA_HOT, createStats(1, 2, docCount, byteSize))), + new NodeDataTiersUsage(warmNode1, Map.of(DataTier.DATA_WARM, createStats(1, 1, docCount, byteSize))), + new NodeDataTiersUsage(warmNode2, Map.of(DataTier.DATA_WARM, createStats(0, 1, docCount, byteSize))), + new NodeDataTiersUsage(warmNode3, Map.of(DataTier.DATA_WARM, createStats(1, 1, docCount, byteSize))), + new NodeDataTiersUsage(warmNode4, Map.of(DataTier.DATA_WARM, createStats(0, 1, docCount, byteSize))), + new NodeDataTiersUsage(warmNode5, Map.of()), + new NodeDataTiersUsage(coldNode1, Map.of(DataTier.DATA_COLD, createStats(3, 3, docCount, byteSize))) + + ); + // Calculate usage + Map tierSpecificStats = DataTiersUsageTransportAction.aggregateStats( + nodeDataTiersUsages, + Map.of( + DataTier.DATA_HOT, + Set.of(hotIndex1), + DataTier.DATA_WARM, + Set.of(warmIndex1, warmIndex2), + DataTier.DATA_COLD, + Set.of(coldIndex1, coldIndex2, coldIndex3) + ) + ); + + // Verify - Node and Index stats are both collected + assertThat(tierSpecificStats.size(), is(3)); + + DataTiersFeatureSetUsage.TierSpecificStats hotStats = tierSpecificStats.get(DataTier.DATA_HOT); + assertThat(hotStats, is(notNullValue())); + assertThat(hotStats.nodeCount, is(3)); + assertThat(hotStats.indexCount, is(1)); + assertThat(hotStats.totalShardCount, is(6)); + assertThat(hotStats.docCount, is(6 * docCount)); + assertThat(hotStats.totalByteCount, is(6 * byteSize)); + assertThat(hotStats.primaryShardCount, is(3)); + assertThat(hotStats.primaryByteCount, is(3 * byteSize)); + assertThat(hotStats.primaryByteCountMedian, is(byteSize)); // All same size + assertThat(hotStats.primaryShardBytesMAD, is(0L)); // All same size + + DataTiersFeatureSetUsage.TierSpecificStats warmStats = tierSpecificStats.get(DataTier.DATA_WARM); + assertThat(warmStats, is(notNullValue())); + assertThat(warmStats.nodeCount, is(5)); + assertThat(warmStats.indexCount, is(2)); + assertThat(warmStats.totalShardCount, is(4)); + assertThat(warmStats.docCount, is(4 * docCount)); + assertThat(warmStats.totalByteCount, is(4 * byteSize)); + assertThat(warmStats.primaryShardCount, is(2)); + assertThat(warmStats.primaryByteCount, is(2 * byteSize)); + assertThat(warmStats.primaryByteCountMedian, is(byteSize)); // All same size + assertThat(warmStats.primaryShardBytesMAD, is(0L)); // All same size + + DataTiersFeatureSetUsage.TierSpecificStats coldStats = tierSpecificStats.get(DataTier.DATA_COLD); + assertThat(coldStats, is(notNullValue())); + assertThat(coldStats.nodeCount, is(1)); + assertThat(coldStats.indexCount, is(3)); + assertThat(coldStats.totalShardCount, is(3)); + assertThat(coldStats.docCount, is(3 * docCount)); + assertThat(coldStats.totalByteCount, is(3 * byteSize)); + assertThat(coldStats.primaryShardCount, is(3)); + assertThat(coldStats.primaryByteCount, is(3 * byteSize)); + assertThat(coldStats.primaryByteCountMedian, is(byteSize)); // All same size + assertThat(coldStats.primaryShardBytesMAD, is(0L)); // All same size + } + + public void testCalculateStatsMixedTiers() { + // Nodes: 3 Hot+Warm - Nodes that are marked as part of multiple tiers + int nodeId = 0; + DiscoveryNode leader = newNode(nodeId++, DiscoveryNodeRole.MASTER_ROLE); + + DiscoveryNode mixedNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); + DiscoveryNode mixedNode2 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); + DiscoveryNode mixedNode3 = newNode(nodeId, DiscoveryNodeRole.DATA_HOT_NODE_ROLE, DiscoveryNodeRole.DATA_WARM_NODE_ROLE); + + String hotIndex1 = "hot_index_1"; + String warmIndex1 = "warm_index_1"; + String warmIndex2 = "warm_index_2"; + + // Indices: 1 Hot index, 2 Warm indices + List nodeDataTiersUsages = List.of( + new NodeDataTiersUsage(leader, Map.of()), + new NodeDataTiersUsage( + mixedNode1, + Map.of(DataTier.DATA_HOT, createStats(1, 2, docCount, byteSize), DataTier.DATA_WARM, createStats(1, 2, docCount, byteSize)) + ), + new NodeDataTiersUsage( + mixedNode2, + Map.of(DataTier.DATA_HOT, createStats(1, 2, docCount, byteSize), DataTier.DATA_WARM, createStats(0, 1, docCount, byteSize)) + ), + new NodeDataTiersUsage( + mixedNode3, + Map.of(DataTier.DATA_HOT, createStats(1, 2, docCount, byteSize), DataTier.DATA_WARM, createStats(1, 1, docCount, byteSize)) + ) + ); + + // Calculate usage + Map tierSpecificStats = DataTiersUsageTransportAction.aggregateStats( + nodeDataTiersUsages, + Map.of(DataTier.DATA_HOT, Set.of(hotIndex1), DataTier.DATA_WARM, Set.of(warmIndex1, warmIndex2)) + ); + + // Verify - Index stats are separated by their preferred tier, instead of counted + // toward multiple tiers based on their current routing. Nodes are counted for each tier they are in. + assertThat(tierSpecificStats.size(), is(2)); + + DataTiersFeatureSetUsage.TierSpecificStats hotStats = tierSpecificStats.get(DataTier.DATA_HOT); + assertThat(hotStats, is(notNullValue())); + assertThat(hotStats.nodeCount, is(3)); + assertThat(hotStats.indexCount, is(1)); + assertThat(hotStats.totalShardCount, is(6)); + assertThat(hotStats.docCount, is(6 * docCount)); + assertThat(hotStats.totalByteCount, is(6 * byteSize)); + assertThat(hotStats.primaryShardCount, is(3)); + assertThat(hotStats.primaryByteCount, is(3 * byteSize)); + assertThat(hotStats.primaryByteCountMedian, is(byteSize)); // All same size + assertThat(hotStats.primaryShardBytesMAD, is(0L)); // All same size + + DataTiersFeatureSetUsage.TierSpecificStats warmStats = tierSpecificStats.get(DataTier.DATA_WARM); + assertThat(warmStats, is(notNullValue())); + assertThat(warmStats.nodeCount, is(3)); + assertThat(warmStats.indexCount, is(2)); + assertThat(warmStats.totalShardCount, is(4)); + assertThat(warmStats.docCount, is(4 * docCount)); + assertThat(warmStats.totalByteCount, is(4 * byteSize)); + assertThat(warmStats.primaryShardCount, is(2)); + assertThat(warmStats.primaryByteCount, is(2 * byteSize)); + assertThat(warmStats.primaryByteCountMedian, is(byteSize)); // All same size + assertThat(warmStats.primaryShardBytesMAD, is(0L)); // All same size + } + + public void testCalculateStatsStuckInWrongTier() { + // Nodes: 3 Hot, 0 Warm - Emulating indices stuck on non-preferred tiers + int nodeId = 0; + DiscoveryNode leader = newNode(nodeId++, DiscoveryNodeRole.MASTER_ROLE); + DiscoveryNode hotNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); + DiscoveryNode hotNode2 = newNode(nodeId++, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); + DiscoveryNode hotNode3 = newNode(nodeId, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); + + String hotIndex1 = "hot_index_1"; + String warmIndex1 = "warm_index_1"; + + List nodeDataTiersUsages = List.of( + new NodeDataTiersUsage(leader, Map.of()), + new NodeDataTiersUsage( + hotNode1, + Map.of(DataTier.DATA_HOT, createStats(1, 2, docCount, byteSize), DataTier.DATA_WARM, createStats(1, 1, docCount, byteSize)) + ), + new NodeDataTiersUsage( + hotNode2, + Map.of(DataTier.DATA_HOT, createStats(1, 2, docCount, byteSize), DataTier.DATA_WARM, createStats(0, 1, docCount, byteSize)) + ), + new NodeDataTiersUsage(hotNode3, Map.of(DataTier.DATA_HOT, createStats(1, 2, docCount, byteSize))) + ); + + // Calculate usage + Map tierSpecificStats = DataTiersUsageTransportAction.aggregateStats( + nodeDataTiersUsages, + Map.of(DataTier.DATA_HOT, Set.of(hotIndex1), DataTier.DATA_WARM, Set.of(warmIndex1)) + ); + + // Verify - Warm indices are still calculated separately from Hot ones, despite Warm nodes missing + assertThat(tierSpecificStats.size(), is(2)); + + DataTiersFeatureSetUsage.TierSpecificStats hotStats = tierSpecificStats.get(DataTier.DATA_HOT); + assertThat(hotStats, is(notNullValue())); + assertThat(hotStats.nodeCount, is(3)); + assertThat(hotStats.indexCount, is(1)); + assertThat(hotStats.totalShardCount, is(6)); + assertThat(hotStats.docCount, is(6 * docCount)); + assertThat(hotStats.totalByteCount, is(6 * byteSize)); + assertThat(hotStats.primaryShardCount, is(3)); + assertThat(hotStats.primaryByteCount, is(3 * byteSize)); + assertThat(hotStats.primaryByteCountMedian, is(byteSize)); // All same size + assertThat(hotStats.primaryShardBytesMAD, is(0L)); // All same size + + DataTiersFeatureSetUsage.TierSpecificStats warmStats = tierSpecificStats.get(DataTier.DATA_WARM); + assertThat(warmStats, is(notNullValue())); + assertThat(warmStats.nodeCount, is(0)); + assertThat(warmStats.indexCount, is(1)); + assertThat(warmStats.totalShardCount, is(2)); + assertThat(warmStats.docCount, is(2 * docCount)); + assertThat(warmStats.totalByteCount, is(2 * byteSize)); + assertThat(warmStats.primaryShardCount, is(1)); + assertThat(warmStats.primaryByteCount, is(byteSize)); + assertThat(warmStats.primaryByteCountMedian, is(byteSize)); // All same size + assertThat(warmStats.primaryShardBytesMAD, is(0L)); // All same size + } + + private NodeDataTiersUsage.UsageStats createStats(int primaryShardCount, int totalNumberOfShards, long docCount, long byteSize) { + return new NodeDataTiersUsage.UsageStats( + primaryShardCount > 0 ? IntStream.range(0, primaryShardCount).mapToObj(i -> byteSize).toList() : List.of(), + totalNumberOfShards, + totalNumberOfShards * docCount, + totalNumberOfShards * byteSize + ); + } + + private IndexRoutingTable.Builder getIndexRoutingTable(IndexMetadata indexMetadata, DiscoveryNode node) { + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(indexMetadata.getIndex()); + routeTestShardToNodes(indexMetadata, 0, indexRoutingTableBuilder, node); + return indexRoutingTableBuilder; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportActionTests.java new file mode 100644 index 0000000000000..fb4291530d037 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportActionTests.java @@ -0,0 +1,214 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.datatiers; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.DataTier; +import org.elasticsearch.indices.NodeIndicesStats; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.core.datatiers.DataTierUsageFixtures.buildNodeIndicesStats; +import static org.elasticsearch.xpack.core.datatiers.DataTierUsageFixtures.indexMetadata; +import static org.elasticsearch.xpack.core.datatiers.DataTierUsageFixtures.newNode; +import static org.elasticsearch.xpack.core.datatiers.DataTierUsageFixtures.routeTestShardToNodes; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class NodesDataTiersUsageTransportActionTests extends ESTestCase { + + private long byteSize; + private long docCount; + + @Before + public void setup() { + byteSize = randomLongBetween(1024L, 1024L * 1024L * 1024L * 30L); // 1 KB to 30 GB + docCount = randomLongBetween(100L, 100000000L); // one hundred to one hundred million + } + + public void testCalculateStatsNoTiers() { + // Nodes: 0 Tiered Nodes, 1 Data Node + DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + DiscoveryNode dataNode1 = newNode(1, DiscoveryNodeRole.DATA_ROLE); + discoBuilder.add(dataNode1); + discoBuilder.localNodeId(dataNode1.getId()); + + // Indices: 1 Regular index + Metadata.Builder metadataBuilder = Metadata.builder(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + + IndexMetadata index1 = indexMetadata("index_1", 3, 1); + metadataBuilder.put(index1, false).generateClusterUuidIfNeeded(); + + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index1.getIndex()); + routeTestShardToNodes(index1, 0, indexRoutingTableBuilder, dataNode1); + routeTestShardToNodes(index1, 1, indexRoutingTableBuilder, dataNode1); + routeTestShardToNodes(index1, 2, indexRoutingTableBuilder, dataNode1); + routingTableBuilder.add(indexRoutingTableBuilder.build()); + + // Cluster State and create stats responses + ClusterState clusterState = ClusterState.builder(new ClusterName("test")) + .metadata(metadataBuilder) + .nodes(discoBuilder) + .routingTable(routingTableBuilder.build()) + .build(); + NodeIndicesStats nodeIndicesStats = buildNodeIndicesStats( + clusterState.getRoutingNodes().node(dataNode1.getId()), + byteSize, + docCount + ); + + // Calculate usage + Map usageStats = NodesDataTiersUsageTransportAction.aggregateStats( + clusterState.getRoutingNodes().node(dataNode1.getId()), + clusterState.metadata(), + nodeIndicesStats + ); + + // Verify - No results when no tiers present + assertThat(usageStats.size(), is(0)); + } + + public void testCalculateStatsNoIndices() { + // Nodes: 1 Data, 1 Hot, 1 Warm, 1 Cold, 1 Frozen + DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + DiscoveryNode dataNode1 = newNode(1, DiscoveryNodeRole.DATA_HOT_NODE_ROLE); + discoBuilder.add(dataNode1); + discoBuilder.localNodeId(dataNode1.getId()); + + // Indices: 1 Regular index, not hosted on any tiers + Metadata.Builder metadataBuilder = Metadata.builder(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + + // Cluster State and create stats responses + ClusterState clusterState = ClusterState.builder(new ClusterName("test")) + .metadata(metadataBuilder) + .nodes(discoBuilder) + .routingTable(routingTableBuilder.build()) + .build(); + NodeIndicesStats nodeIndicesStats = buildNodeIndicesStats( + clusterState.getRoutingNodes().node(dataNode1.getId()), + byteSize, + docCount + ); + + // Calculate usage + Map usageStats = NodesDataTiersUsageTransportAction.aggregateStats( + clusterState.getRoutingNodes().node(dataNode1.getId()), + clusterState.metadata(), + nodeIndicesStats + ); + + // Verify - No results when no tiers present + assertThat(usageStats.size(), is(0)); + } + + public void testCalculateStatsTieredIndicesOnly() { + // Nodes: 3 Data, 0 Tiered - Only hosting indices on generic data nodes + int nodeId = 0; + DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + + DiscoveryNode dataNode1 = newNode(nodeId++, DiscoveryNodeRole.DATA_ROLE); + discoBuilder.add(dataNode1); + DiscoveryNode dataNode2 = newNode(nodeId, DiscoveryNodeRole.DATA_ROLE); + discoBuilder.add(dataNode2); + + discoBuilder.localNodeId(dataNode1.getId()); + + // Indices: 1 Hot index, 2 Warm indices, 3 Cold indices + Metadata.Builder metadataBuilder = Metadata.builder(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + + IndexMetadata hotIndex1 = indexMetadata("hot_index_1", 3, 1, DataTier.DATA_HOT); + metadataBuilder.put(hotIndex1, false).generateClusterUuidIfNeeded(); + { + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(hotIndex1.getIndex()); + routeTestShardToNodes(hotIndex1, 0, indexRoutingTableBuilder, dataNode1, dataNode2); + routeTestShardToNodes(hotIndex1, 1, indexRoutingTableBuilder, dataNode2, dataNode1); + routingTableBuilder.add(indexRoutingTableBuilder.build()); + } + + IndexMetadata warmIndex1 = indexMetadata("warm_index_1", 1, 1, DataTier.DATA_WARM); + metadataBuilder.put(warmIndex1, false).generateClusterUuidIfNeeded(); + { + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(warmIndex1.getIndex()); + routeTestShardToNodes(warmIndex1, 0, indexRoutingTableBuilder, dataNode1, dataNode2); + routingTableBuilder.add(indexRoutingTableBuilder.build()); + } + IndexMetadata warmIndex2 = indexMetadata("warm_index_2", 1, 1, DataTier.DATA_WARM); + metadataBuilder.put(warmIndex2, false).generateClusterUuidIfNeeded(); + { + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(warmIndex2.getIndex()); + routeTestShardToNodes(warmIndex2, 0, indexRoutingTableBuilder, dataNode2, dataNode1); + routingTableBuilder.add(indexRoutingTableBuilder.build()); + } + + IndexMetadata coldIndex1 = indexMetadata("cold_index_1", 1, 0, DataTier.DATA_COLD); + metadataBuilder.put(coldIndex1, false).generateClusterUuidIfNeeded(); + { + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(coldIndex1.getIndex()); + routeTestShardToNodes(coldIndex1, 0, indexRoutingTableBuilder, dataNode1); + routingTableBuilder.add(indexRoutingTableBuilder.build()); + } + + // Cluster State and create stats responses + ClusterState clusterState = ClusterState.builder(new ClusterName("test")) + .nodes(discoBuilder) + .metadata(metadataBuilder) + .routingTable(routingTableBuilder.build()) + .build(); + NodeIndicesStats nodeIndicesStats = buildNodeIndicesStats( + clusterState.getRoutingNodes().node(dataNode1.getId()), + byteSize, + docCount + ); + + // Calculate usage + Map usageStats = NodesDataTiersUsageTransportAction.aggregateStats( + clusterState.getRoutingNodes().node(dataNode1.getId()), + clusterState.metadata(), + nodeIndicesStats + ); + + // Verify - Index stats exist for the tiers, but no tiered nodes are found + assertThat(usageStats.size(), is(3)); + + NodeDataTiersUsage.UsageStats hotStats = usageStats.get(DataTier.DATA_HOT); + assertThat(hotStats, is(notNullValue())); + assertThat(hotStats.getPrimaryShardSizes(), equalTo(List.of(byteSize))); + assertThat(hotStats.getTotalShardCount(), is(2)); + assertThat(hotStats.getDocCount(), is(hotStats.getTotalShardCount() * docCount)); + assertThat(hotStats.getTotalSize(), is(hotStats.getTotalShardCount() * byteSize)); + + NodeDataTiersUsage.UsageStats warmStats = usageStats.get(DataTier.DATA_WARM); + assertThat(warmStats, is(notNullValue())); + assertThat(warmStats.getPrimaryShardSizes(), equalTo(List.of(byteSize))); + assertThat(warmStats.getTotalShardCount(), is(2)); + assertThat(warmStats.getDocCount(), is(warmStats.getTotalShardCount() * docCount)); + assertThat(warmStats.getTotalSize(), is(warmStats.getTotalShardCount() * byteSize)); + + NodeDataTiersUsage.UsageStats coldStats = usageStats.get(DataTier.DATA_COLD); + assertThat(coldStats, is(notNullValue())); + assertThat(coldStats.getPrimaryShardSizes(), equalTo(List.of(byteSize))); + assertThat(coldStats.getTotalShardCount(), is(1)); + assertThat(coldStats.getDocCount(), is(coldStats.getTotalShardCount() * docCount)); + assertThat(coldStats.getTotalSize(), is(coldStats.getTotalShardCount() * byteSize)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DownsampleActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DownsampleActionTests.java index 109e8f87627ad..7cb93803de4ee 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DownsampleActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DownsampleActionTests.java @@ -6,8 +6,16 @@ */ package org.elasticsearch.xpack.core.ilm; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.test.EqualsHashCodeTestUtils; import org.elasticsearch.xcontent.XContentParser; @@ -19,7 +27,9 @@ import static org.elasticsearch.xpack.core.ilm.DownsampleAction.CONDITIONAL_DATASTREAM_CHECK_KEY; import static org.elasticsearch.xpack.core.ilm.DownsampleAction.CONDITIONAL_TIME_SERIES_CHECK_KEY; +import static org.elasticsearch.xpack.core.ilm.DownsampleAction.DOWNSAMPLED_INDEX_PREFIX; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class DownsampleActionTests extends AbstractActionTestCase { @@ -132,6 +142,92 @@ public void testToSteps() { assertThat(steps.get(14).getNextStepKey(), equalTo(nextStepKey)); } + public void testDownsamplingPrerequisitesStep() { + DateHistogramInterval fixedInterval = ConfigTestHelpers.randomInterval(); + DownsampleAction action = new DownsampleAction(fixedInterval, WAIT_TIMEOUT); + String phase = randomAlphaOfLengthBetween(1, 10); + StepKey nextStepKey = new StepKey( + randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10) + ); + { + // non time series indices skip the action + BranchingStep branchingStep = getFirstBranchingStep(action, phase, nextStepKey); + IndexMetadata indexMetadata = newIndexMeta("test", Settings.EMPTY); + + ClusterState state = ClusterState.builder(ClusterName.DEFAULT) + .metadata(Metadata.builder().put(indexMetadata, true).build()) + .build(); + + branchingStep.performAction(indexMetadata.getIndex(), state); + assertThat(branchingStep.getNextStepKey(), is(nextStepKey)); + } + { + // time series indices execute the action + BranchingStep branchingStep = getFirstBranchingStep(action, phase, nextStepKey); + Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES) + .put("index.routing_path", "uid") + .build(); + IndexMetadata indexMetadata = newIndexMeta("test", settings); + + ClusterState state = ClusterState.builder(ClusterName.DEFAULT) + .metadata(Metadata.builder().put(indexMetadata, true).build()) + .build(); + + branchingStep.performAction(indexMetadata.getIndex(), state); + assertThat(branchingStep.getNextStepKey().name(), is(CheckNotDataStreamWriteIndexStep.NAME)); + } + { + // already downsampled indices for the interval skip the action + BranchingStep branchingStep = getFirstBranchingStep(action, phase, nextStepKey); + Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES) + .put("index.routing_path", "uid") + .put(IndexMetadata.INDEX_DOWNSAMPLE_STATUS_KEY, IndexMetadata.DownsampleTaskStatus.SUCCESS) + .put(IndexMetadata.INDEX_DOWNSAMPLE_ORIGIN_NAME.getKey(), "test") + .build(); + String indexName = DOWNSAMPLED_INDEX_PREFIX + fixedInterval + "-test"; + IndexMetadata indexMetadata = newIndexMeta(indexName, settings); + + ClusterState state = ClusterState.builder(ClusterName.DEFAULT) + .metadata(Metadata.builder().put(indexMetadata, true).build()) + .build(); + + branchingStep.performAction(indexMetadata.getIndex(), state); + assertThat(branchingStep.getNextStepKey(), is(nextStepKey)); + } + { + // indices with the same name as the target downsample index that are NOT downsample indices skip the action + BranchingStep branchingStep = getFirstBranchingStep(action, phase, nextStepKey); + String indexName = DOWNSAMPLED_INDEX_PREFIX + fixedInterval + "-test"; + IndexMetadata indexMetadata = newIndexMeta(indexName, Settings.EMPTY); + + ClusterState state = ClusterState.builder(ClusterName.DEFAULT) + .metadata(Metadata.builder().put(indexMetadata, true).build()) + .build(); + + branchingStep.performAction(indexMetadata.getIndex(), state); + assertThat(branchingStep.getNextStepKey(), is(nextStepKey)); + } + } + + private static BranchingStep getFirstBranchingStep(DownsampleAction action, String phase, StepKey nextStepKey) { + List steps = action.toSteps(null, phase, nextStepKey); + assertNotNull(steps); + assertEquals(15, steps.size()); + + assertTrue(steps.get(0) instanceof BranchingStep); + assertThat(steps.get(0).getKey().name(), equalTo(CONDITIONAL_TIME_SERIES_CHECK_KEY)); + + return (BranchingStep) steps.get(0); + } + + public static IndexMetadata newIndexMeta(String name, Settings indexSettings) { + return IndexMetadata.builder(name).settings(indexSettings(IndexVersion.current(), 1, 1).put(indexSettings)).build(); + } + public void testEqualsAndHashCode() { EqualsHashCodeTestUtils.checkEqualsAndHashCode(createTestInstance(), this::copy, this::notCopy); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java index eecca03e55406..3efe2dc04ea19 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java @@ -111,19 +111,17 @@ public void testCalculateUsage() { new ComposableIndexTemplateMetadata( Collections.singletonMap( "mytemplate", - new ComposableIndexTemplate( - Collections.singletonList("myds"), - new Template( - Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, "mypolicy").build(), - null, - null - ), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false) - ) + ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("myds")) + .template( + new Template( + Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, "mypolicy").build(), + null, + null + ) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ) ) ) @@ -163,15 +161,13 @@ public void testCalculateUsage() { new ComposableIndexTemplateMetadata( Collections.singletonMap( "mytemplate", - new ComposableIndexTemplate( - Collections.singletonList("myds"), - new Template(Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, "mypolicy").build(), null, null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false) - ) + ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("myds")) + .template( + new Template(Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, "mypolicy").build(), null, null) + ) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() ) ) ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlTasksTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlTasksTests.java index ae4f451a49d2f..c11cefed137e9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlTasksTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlTasksTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskState; import java.net.InetAddress; +import java.time.Instant; import java.util.Set; import java.util.stream.Collectors; @@ -50,7 +51,10 @@ public void testGetJobState() { ); assertEquals(JobState.OPENING, MlTasks.getJobState("foo", tasksBuilder.build())); - tasksBuilder.updateTaskState(MlTasks.jobTaskId("foo"), new JobTaskState(JobState.OPENED, tasksBuilder.getLastAllocationId(), null)); + tasksBuilder.updateTaskState( + MlTasks.jobTaskId("foo"), + new JobTaskState(JobState.OPENED, tasksBuilder.getLastAllocationId(), null, Instant.now()) + ); assertEquals(JobState.OPENED, MlTasks.getJobState("foo", tasksBuilder.build())); } @@ -327,7 +331,7 @@ public void testNonFailedJobTasksOnNode() { new OpenJobAction.JobParams("foo-1"), new PersistentTasksCustomMetadata.Assignment("node-1", "test assignment") ); - tasksBuilder.updateTaskState(MlTasks.jobTaskId("job-1"), new JobTaskState(JobState.FAILED, 1, "testing")); + tasksBuilder.updateTaskState(MlTasks.jobTaskId("job-1"), new JobTaskState(JobState.FAILED, 1, "testing", Instant.now())); tasksBuilder.addTask( MlTasks.jobTaskId("job-2"), MlTasks.JOB_TASK_NAME, @@ -335,7 +339,7 @@ public void testNonFailedJobTasksOnNode() { new PersistentTasksCustomMetadata.Assignment("node-1", "test assignment") ); if (randomBoolean()) { - tasksBuilder.updateTaskState(MlTasks.jobTaskId("job-2"), new JobTaskState(JobState.OPENED, 2, "testing")); + tasksBuilder.updateTaskState(MlTasks.jobTaskId("job-2"), new JobTaskState(JobState.OPENED, 2, "testing", Instant.now())); } tasksBuilder.addTask( MlTasks.jobTaskId("job-3"), @@ -344,7 +348,7 @@ public void testNonFailedJobTasksOnNode() { new PersistentTasksCustomMetadata.Assignment("node-2", "test assignment") ); if (randomBoolean()) { - tasksBuilder.updateTaskState(MlTasks.jobTaskId("job-3"), new JobTaskState(JobState.FAILED, 3, "testing")); + tasksBuilder.updateTaskState(MlTasks.jobTaskId("job-3"), new JobTaskState(JobState.FAILED, 3, "testing", Instant.now())); } assertThat(MlTasks.nonFailedJobTasksOnNode(tasksBuilder.build(), "node-1"), contains(hasProperty("id", equalTo("job-job-2")))); @@ -514,7 +518,7 @@ private static PersistentTasksCustomMetadata.PersistentTask createDataFrameAn if (state != null) { builder.updateTaskState( MlTasks.dataFrameAnalyticsTaskId(jobId), - new DataFrameAnalyticsTaskState(state, builder.getLastAllocationId() - (isStale ? 1 : 0), null) + new DataFrameAnalyticsTaskState(state, builder.getLastAllocationId() - (isStale ? 1 : 0), null, Instant.now()) ); } PersistentTasksCustomMetadata tasks = builder.build(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java index 69c1b23a5ff85..fcfc396313016 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import org.elasticsearch.xpack.core.ml.action.InferModelAction.Request; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfigUpdateTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdateTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.FillMaskConfigUpdate; @@ -66,6 +67,9 @@ protected Request createTestInstance() { ); request.setHighPriority(randomBoolean()); + if (randomBoolean()) { + request.setPrefixType(randomFrom(TrainedModelPrefixStrings.PrefixType.values())); + } return request; } @@ -79,8 +83,9 @@ protected Request mutateInstance(Request instance) { var update = instance.getUpdate(); var previouslyLicensed = instance.isPreviouslyLicensed(); var timeout = instance.getInferenceTimeout(); + var prefixType = instance.getPrefixType(); - int change = randomIntBetween(0, 6); + int change = randomIntBetween(0, 7); switch (change) { case 0: modelId = modelId + "foo"; @@ -111,12 +116,17 @@ protected Request mutateInstance(Request instance) { case 6: timeout = TimeValue.timeValueSeconds(timeout.getSeconds() - 1); break; + case 7: + prefixType = TrainedModelPrefixStrings.PrefixType.values()[(prefixType.ordinal() + 1) % TrainedModelPrefixStrings.PrefixType + .values().length]; + break; default: throw new IllegalStateException(); } var r = new Request(modelId, update, objectsToInfer, textInput, timeout, previouslyLicensed); r.setHighPriority(highPriority); + r.setPrefixType(prefixType); return r; } @@ -211,6 +221,18 @@ protected Request mutateInstanceForVersion(Request instance, TransportVersion ve ); r.setHighPriority(false); return r; + } else if (version.before(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { + var r = new Request( + instance.getId(), + adjustedUpdate, + instance.getObjectsToInfer(), + instance.getTextInput(), + instance.getInferenceTimeout(), + instance.isPreviouslyLicensed() + ); + r.setHighPriority(instance.isHighPriority()); + r.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE); + return r; } return instance; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java index e417235e4a094..e7d7a7e0926d1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdateTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfigUpdateTests; @@ -36,9 +37,10 @@ protected Writeable.Reader instanceRe @Override protected InferTrainedModelDeploymentAction.Request createTestInstance() { boolean createQueryStringRequest = randomBoolean(); + InferTrainedModelDeploymentAction.Request request; if (createQueryStringRequest) { - return InferTrainedModelDeploymentAction.Request.forTextInput( + request = InferTrainedModelDeploymentAction.Request.forTextInput( randomAlphaOfLength(4), randomBoolean() ? null : randomInferenceConfigUpdate(), Arrays.asList(generateRandomStringArray(4, 7, false)), @@ -50,13 +52,16 @@ protected InferTrainedModelDeploymentAction.Request createTestInstance() { () -> randomMap(1, 3, () -> Tuple.tuple(randomAlphaOfLength(7), randomAlphaOfLength(7))) ); - return InferTrainedModelDeploymentAction.Request.forDocs( + request = InferTrainedModelDeploymentAction.Request.forDocs( randomAlphaOfLength(4), randomBoolean() ? null : randomInferenceConfigUpdate(), docs, randomBoolean() ? null : TimeValue.parseTimeValue(randomTimeValue(), "timeout") ); } + request.setHighPriority(randomBoolean()); + request.setPrefixType(randomFrom(TrainedModelPrefixStrings.PrefixType.values())); + return request; } @Override @@ -66,8 +71,7 @@ protected InferTrainedModelDeploymentAction.Request mutateInstance(InferTrainedM @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(); - entries.addAll(new MlInferenceNamedXContentProvider().getNamedWriteables()); + List entries = new ArrayList<>(new MlInferenceNamedXContentProvider().getNamedWriteables()); return new NamedWriteableRegistry(entries); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index e08e61a6554a7..f6c859830119b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -8,13 +8,11 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.aggregations.AggregationsPlugin; import org.elasticsearch.aggregations.pipeline.DerivativePipelineAggregationBuilder; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -68,7 +66,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class DatafeedUpdateTests extends AbstractXContentSerializingTestCase { @@ -77,9 +74,6 @@ public class DatafeedUpdateTests extends AbstractXContentSerializingTestCase { + + private boolean lenient = randomBoolean(); + + public static TrainedModelPrefixStrings randomInstance() { + boolean noNullMembers = randomBoolean(); + if (noNullMembers) { + return new TrainedModelPrefixStrings(randomAlphaOfLength(5), randomAlphaOfLength(5)); + } else { + boolean firstIsNull = randomBoolean(); + return new TrainedModelPrefixStrings(firstIsNull ? null : randomAlphaOfLength(5), firstIsNull ? randomAlphaOfLength(5) : null); + } + } + + @Override + protected Writeable.Reader instanceReader() { + return TrainedModelPrefixStrings::new; + } + + @Override + protected TrainedModelPrefixStrings createTestInstance() { + return randomInstance(); + } + + @Override + protected boolean supportsUnknownFields() { + return lenient; + } + + @Override + protected TrainedModelPrefixStrings mutateInstance(TrainedModelPrefixStrings instance) throws IOException { + return null; + } + + @Override + protected TrainedModelPrefixStrings doParseInstance(XContentParser parser) throws IOException { + return TrainedModelPrefixStrings.fromXContent(parser, lenient); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfigTests.java index b5e82a5da75b2..bf1d74f044c1e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ModelPackageConfigTests.java @@ -18,6 +18,8 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStringsTests; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import java.io.IOException; @@ -45,12 +47,14 @@ public static ModelPackageConfig randomModulePackageConfig() { randomFrom(TrainedModelType.values()).toString(), randomBoolean() ? Arrays.asList(generateRandomStringArray(randomIntBetween(0, 5), 15, false)) : null, randomBoolean() ? randomAlphaOfLength(10) : null, - randomBoolean() ? randomAlphaOfLength(10) : null + randomBoolean() ? randomAlphaOfLength(10) : null, + TrainedModelPrefixStringsTests.randomInstance() + // randomBoolean() ? TrainedModelPrefixStringsTests.randomInstance() : null ); } public static ModelPackageConfig mutateModelPackageConfig(ModelPackageConfig instance) { - switch (between(0, 12)) { + switch (between(0, 13)) { case 0: return new ModelPackageConfig.Builder(instance).setPackedModelId(randomAlphaOfLength(15)).build(); case 1: @@ -87,6 +91,12 @@ public static ModelPackageConfig mutateModelPackageConfig(ModelPackageConfig ins return new ModelPackageConfig.Builder(instance).setVocabularyFile(randomAlphaOfLength(15)).build(); case 12: return new ModelPackageConfig.Builder(instance).setPlatformArchitecture(randomAlphaOfLength(15)).build(); + case 13: { + TrainedModelPrefixStrings mutatedPrefixes = instance.getPrefixStrings() == null + ? TrainedModelPrefixStringsTests.randomInstance() + : null; + return new ModelPackageConfig.Builder(instance).setPrefixStrings(mutatedPrefixes).build(); + } default: throw new AssertionError("Illegal randomisation branch"); } @@ -114,10 +124,14 @@ protected ModelPackageConfig mutateInstance(ModelPackageConfig instance) { @Override protected ModelPackageConfig mutateInstanceForVersion(ModelPackageConfig instance, TransportVersion version) { + var builder = new ModelPackageConfig.Builder(instance); if (version.before(TransportVersions.ML_PACKAGE_LOADER_PLATFORM_ADDED)) { - return new ModelPackageConfig.Builder(instance).setPlatformArchitecture(null).build(); + builder.setPlatformArchitecture(null); + } + if (version.before(TransportVersions.ML_TRAINED_MODEL_PREFIX_STRINGS_ADDED)) { + builder.setPrefixStrings(null); } - return instance; + return builder.build(); } private static Map randomInferenceConfigAsMap() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/test/TestRestrictedIndices.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/test/TestRestrictedIndices.java index 92c3a65ffeaa3..9e26444040b03 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/test/TestRestrictedIndices.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/test/TestRestrictedIndices.java @@ -110,15 +110,10 @@ public class TestRestrictedIndices { ".fleet-actions-results", "fleet actions results", SystemDataStreamDescriptor.Type.EXTERNAL, - new ComposableIndexTemplate( - List.of(".fleet-actions-results"), - null, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ), + ComposableIndexTemplate.builder() + .indexPatterns(List.of(".fleet-actions-results")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(), Map.of(), List.of("fleet", "kibana"), null diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java index ff0e99e4a1e17..db9cf91681199 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java @@ -344,15 +344,15 @@ public void testAutomaticRollover() throws Exception { Metadata.builder(Objects.requireNonNull(state).metadata()) .put( entry.getKey(), - new ComposableIndexTemplate( - template.indexPatterns(), - template.template(), - template.composedOf(), - template.priority(), - 2L, - template.metadata(), - template.getDataStreamTemplate() - ) + ComposableIndexTemplate.builder() + .indexPatterns(template.indexPatterns()) + .template(template.template()) + .componentTemplates(template.composedOf()) + .priority(template.priority()) + .version(2L) + .metadata(template.metadata()) + .dataStreamTemplate(template.getDataStreamTemplate()) + .build() ) ) .build(); @@ -831,38 +831,23 @@ public void testFindRolloverTargetDataStreams() { ) .build(); - ComposableIndexTemplate it1 = new ComposableIndexTemplate( - List.of("ds1*", "ds2*", "ds3*"), - null, - null, - 100L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate it1 = ComposableIndexTemplate.builder() + .indexPatterns(List.of("ds1*", "ds2*", "ds3*")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); - ComposableIndexTemplate it2 = new ComposableIndexTemplate( - List.of("ds2*"), - null, - null, - 200L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate it2 = ComposableIndexTemplate.builder() + .indexPatterns(List.of("ds2*")) + .priority(200L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); - ComposableIndexTemplate it5 = new ComposableIndexTemplate( - List.of("ds5*"), - null, - null, - 200L, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate it5 = ComposableIndexTemplate.builder() + .indexPatterns(List.of("ds5*")) + .priority(200L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); state = ClusterState.builder(state) .metadata(Metadata.builder(state.metadata()).put("it1", it1).put("it2", it2).put("it5", it5)) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/180-days@lifecycle.json b/x-pack/plugin/core/template-resources/src/main/resources/180-days@lifecycle.json index 7929d4cb5594c..0fcaddb9a02ce 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/180-days@lifecycle.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/180-days@lifecycle.json @@ -33,5 +33,6 @@ "_meta": { "description": "built-in ILM policy using the hot, warm, and cold phases with a retention of 180 days", "managed": true - } + }, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/30-days@lifecycle.json b/x-pack/plugin/core/template-resources/src/main/resources/30-days@lifecycle.json index 6d5a12b39762d..5764b75299ced 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/30-days@lifecycle.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/30-days@lifecycle.json @@ -29,5 +29,6 @@ "_meta": { "description": "built-in ILM policy using the hot and warm phases with a retention of 30 days", "managed": true - } + }, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/365-days@lifecycle.json b/x-pack/plugin/core/template-resources/src/main/resources/365-days@lifecycle.json index 3d2340245f117..4398b14387dec 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/365-days@lifecycle.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/365-days@lifecycle.json @@ -33,5 +33,6 @@ "_meta": { "description": "built-in ILM policy using the hot, warm, and cold phases with a retention of 365 days", "managed": true - } + }, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/7-days@lifecycle.json b/x-pack/plugin/core/template-resources/src/main/resources/7-days@lifecycle.json index 2c5778e5af1db..1a1f74beac516 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/7-days@lifecycle.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/7-days@lifecycle.json @@ -29,5 +29,6 @@ "_meta": { "description": "built-in ILM policy using the hot and warm phases with a retention of 7 days", "managed": true - } + }, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/90-days@lifecycle.json b/x-pack/plugin/core/template-resources/src/main/resources/90-days@lifecycle.json index cae4e7c83a064..e0d2487c8961a 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/90-days@lifecycle.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/90-days@lifecycle.json @@ -33,5 +33,6 @@ "_meta": { "description": "built-in ILM policy using the hot, warm, and cold phases with a retention of 90 days", "managed": true - } + }, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/data-streams@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/data-streams@mappings.json index f87c0e79b7c45..96bbeca8f7ac8 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/data-streams@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/data-streams@mappings.json @@ -63,5 +63,6 @@ "description": "general mapping conventions for data streams", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json index fc29fc98dca96..f1d03531e4b6b 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json @@ -190,5 +190,6 @@ "description": "dynamic mappings based on ECS, installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json index 709ce5d3abbd0..651e1c84da73a 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json @@ -9,7 +9,7 @@ "pipeline": { "default_name": "ent-search-generic-ingestion", "default_extract_binary_content": true, - "default_run_ml_inference": false, + "default_run_ml_inference": true, "default_reduce_whitespace": true }, "version": ${xpack.application.connector.template.version} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json index a4388d671eb0d..b92942ff010d6 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json @@ -173,5 +173,6 @@ "description": "default kibana reporting template installed by elasticsearch", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs@default-pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@default-pipeline.json index 518ff3cece752..d8dc9cca5ea7c 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/logs@default-pipeline.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@default-pipeline.json @@ -20,5 +20,6 @@ "description": "default pipeline for the logs index template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs@json-pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@json-pipeline.json index cebeccd344324..e3b0f85642a46 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/logs@json-pipeline.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@json-pipeline.json @@ -44,5 +44,6 @@ "description": "automatic parsing of JSON log messages", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs@lifecycle.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@lifecycle.json index 6bce19aaaab49..5b58c138d785f 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/logs@lifecycle.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@lifecycle.json @@ -12,5 +12,6 @@ "_meta": { "description": "default policy for the logs index template installed by x-pack", "managed": true - } + }, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@mappings.json index 7417d4809559d..82cbf7e478a27 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/logs@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@mappings.json @@ -23,5 +23,6 @@ "description": "default mappings for the logs index template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json index cc61f195402fe..908b01027e833 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json @@ -20,5 +20,6 @@ "description": "default settings for the logs index template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs@template.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@template.json index b41b2d0453c89..f9b945d75f4f8 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/logs@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@template.json @@ -14,5 +14,6 @@ "description": "default logs template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics@lifecycle.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@lifecycle.json index 3c37e8db4a7da..daa07659e559e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/metrics@lifecycle.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/metrics@lifecycle.json @@ -12,5 +12,6 @@ "_meta": { "description": "default policy for the metrics index template installed by x-pack", "managed": true - } + }, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@mappings.json index 5741b441256f9..4e48f6b7adaed 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/metrics@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/metrics@mappings.json @@ -53,5 +53,6 @@ "description": "default mappings for the metrics index template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics@settings.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@settings.json index 1a13139bb18a4..3a0e6feeaede4 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/metrics@settings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/metrics@settings.json @@ -16,5 +16,6 @@ "description": "default settings for the metrics index template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json index a596314bc9e8c..464df09ffe2ce 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json @@ -12,5 +12,6 @@ "description": "default metrics template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics@tsdb-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@tsdb-settings.json index cbcad39ef78d0..6a64ff9be5473 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/metrics@tsdb-settings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/metrics@tsdb-settings.json @@ -15,5 +15,6 @@ "description": "default settings for the metrics index template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json index 77634546e0e6e..2dbc4bac8bd00 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json @@ -149,6 +149,9 @@ }, "vocab": { "enabled": false + }, + "prefix_strings": { + "enabled": false } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/synthetics@lifecycle.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@lifecycle.json index 1e4220725177d..aa2cf5489b45f 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/synthetics@lifecycle.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@lifecycle.json @@ -12,5 +12,6 @@ "_meta": { "description": "default policy for the synthetics index template installed by x-pack", "managed": true - } + }, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/synthetics@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@mappings.json index 9e3e56e3261d0..81b85285450c7 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/synthetics@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@mappings.json @@ -17,5 +17,6 @@ "description": "default mappings for the synthetics index template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/synthetics@settings.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@settings.json index 27ced96be36e3..04d68d083bf9f 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/synthetics@settings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@settings.json @@ -13,5 +13,6 @@ "description": "default settings for the synthetics index template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/synthetics@template.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@template.json index 6369bd5a82c15..344426541b8c5 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/synthetics@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@template.json @@ -12,5 +12,6 @@ "description": "default synthetics template installed by x-pack", "managed": true }, - "version": ${xpack.stack.template.version} + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} } diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDriver.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDriver.java index d704f3bf93c54..db6ab6d01613d 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDriver.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDriver.java @@ -138,16 +138,12 @@ private static void putComposableIndexTemplate( ) throws IOException { PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); request.indexTemplate( - new ComposableIndexTemplate( - patterns, - new Template(settings, mappings == null ? null : mappings, null, lifecycle), - null, - null, - null, - metadata, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(new Template(settings, mappings == null ? null : mappings, null, lifecycle)) + .metadata(metadata) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client.execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 9bf580673df2e..c0abab1234133 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -1434,16 +1434,11 @@ private String createDataStream() throws Exception { null ); - ComposableIndexTemplate template = new ComposableIndexTemplate( - List.of(dataStreamName + "*"), - indexTemplate, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(false, false), - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName + "*")) + .template(indexTemplate) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build(); PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(dataStreamName + "_template") .indexTemplate(template); assertAcked(client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet()); diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java index 33abf925085a5..95640f4625849 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java @@ -206,7 +206,11 @@ private void putComposableIndexTemplate(final String id, final List patt null ); request.indexTemplate( - new ComposableIndexTemplate(patterns, template, null, null, null, null, new ComposableIndexTemplate.DataStreamTemplate(), null) + ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(template) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index a2ad6a59e54fe..4e001d38bf279 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -279,6 +279,7 @@ public void onResponse(DeleteResponse deleteResponse) { public void onFailure(Exception e) { if (e instanceof IndexNotFoundException) { listener.onFailure(new ResourceNotFoundException(resourceName)); + return; } listener.onFailure(e); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java index 731408d3c6ef8..ebd78119ab7d5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java @@ -8,10 +8,13 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.common.ParsingException; @@ -19,6 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.HeaderWarning; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; @@ -209,18 +213,35 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws queryRewriteContext.registerAsyncAction((client, listener) -> { Client clientWithOrigin = new OriginSettingClient(client, ENT_SEARCH_ORIGIN); - clientWithOrigin.get(getRequest, listener.delegateFailureAndWrap((l, getResponse) -> { - if (getResponse.isExists() == false) { - throw new ResourceNotFoundException("query ruleset " + rulesetId + " not found"); + clientWithOrigin.get(getRequest, new ActionListener<>() { + @Override + public void onResponse(GetResponse getResponse) { + if (getResponse.isExists() == false) { + throw new ResourceNotFoundException("query ruleset " + rulesetId + " not found"); + } + QueryRuleset queryRuleset = QueryRuleset.fromXContentBytes( + rulesetId, + getResponse.getSourceAsBytesRef(), + XContentType.JSON + ); + for (QueryRule rule : queryRuleset.rules()) { + rule.applyRule(appliedRules, matchCriteria); + } + pinnedIdsSetOnce.set(appliedRules.pinnedIds().stream().distinct().toList()); + pinnedDocsSetOnce.set(appliedRules.pinnedDocs().stream().distinct().toList()); + listener.onResponse(null); } - QueryRuleset queryRuleset = QueryRuleset.fromXContentBytes(rulesetId, getResponse.getSourceAsBytesRef(), XContentType.JSON); - for (QueryRule rule : queryRuleset.rules()) { - rule.applyRule(appliedRules, matchCriteria); + + @Override + public void onFailure(Exception e) { + Throwable cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof IndexNotFoundException) { + listener.onFailure(new ResourceNotFoundException("query ruleset " + rulesetId + " not found")); + } else { + listener.onFailure(e); + } } - pinnedIdsSetOnce.set(appliedRules.pinnedIds().stream().distinct().toList()); - pinnedDocsSetOnce.set(appliedRules.pinnedDocs().stream().distinct().toList()); - listener.onResponse(null); - })); + }); }); QueryBuilder newOrganicQuery = organicQuery.rewrite(queryRewriteContext); diff --git a/x-pack/plugin/eql/build.gradle b/x-pack/plugin/eql/build.gradle index 0da3095cc7a95..fc11e04c4ede2 100644 --- a/x-pack/plugin/eql/build.gradle +++ b/x-pack/plugin/eql/build.gradle @@ -19,7 +19,6 @@ dependencies { testImplementation project(':test:framework') testImplementation(testArtifact(project(xpackModule('core')))) - testImplementation(testArtifact(project(xpackModule('security')))) testImplementation(testArtifact(project(xpackModule('ql')))) testImplementation project(path: ':modules:reindex') testImplementation project(path: ':modules:parent-join') diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java index 2f57bc021a1c0..194c2c7fde459 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java @@ -65,6 +65,7 @@ import static org.elasticsearch.xpack.ql.parser.ParserUtils.source; import static org.elasticsearch.xpack.ql.parser.ParserUtils.text; import static org.elasticsearch.xpack.ql.tree.Source.synthetic; +import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; public abstract class LogicalPlanBuilder extends ExpressionBuilder { @@ -102,7 +103,7 @@ public Object visitStatement(StatementContext ctx) { if (ctx.pipe().size() > 0) { throw new ParsingException(source(ctx.pipe().get(0)), "Samples do not support pipes yet"); } - return new LimitWithOffset(plan.source(), new Literal(Source.EMPTY, params.size(), DataTypes.INTEGER), 0, plan); + return new LimitWithOffset(plan.source(), new Literal(Source.EMPTY, params.size(), INTEGER), 0, plan); } // // Add implicit blocks @@ -125,7 +126,7 @@ public Object visitStatement(StatementContext ctx) { plan = new OrderBy(defaultOrderSource, plan, orders); // add the default limit only if specified - Literal defaultSize = new Literal(synthetic(""), params.size(), DataTypes.INTEGER); + Literal defaultSize = new Literal(synthetic(""), params.size(), INTEGER); Source defaultLimitSource = synthetic(""); LogicalPlan previous = plan; @@ -521,8 +522,16 @@ private Expression onlyOnePipeArgument(Source source, String pipeName, List exps) { Expression expression = onlyOnePipeArgument(source, pipeName, exps); + boolean foldableInt = expression.foldable() && expression.dataType().isInteger(); + Number value = null; - if (expression.dataType().isInteger() == false || expression.foldable() == false || (int) expression.fold() < 0) { + if (foldableInt) { + try { + value = (Number) expression.fold(); + } catch (ArithmeticException ae) {} + } + + if (foldableInt == false || value == null || value.intValue() != value.longValue() || value.intValue() < 0) { throw new ParsingException( expression.source(), "Pipe [{}] expects a positive integer but found [{}]", @@ -531,6 +540,8 @@ private Expression pipeIntArgument(Source source, String pipeName, List { buffers.put(seqNo, output); onSeqNoCompleted(seqNo); }, e -> { + input.releaseBlocks(); onFailure(e); onSeqNoCompleted(seqNo); })); @@ -81,6 +86,8 @@ public void addInput(Page input) { */ protected abstract void performAsync(Page inputPage, ActionListener listener); + protected abstract void doClose(); + private void onFailure(Exception e) { failure.getAndUpdate(first -> { if (first == null) { @@ -105,6 +112,9 @@ private void onSeqNoCompleted(long seqNo) { if (checkpoint.getPersistedCheckpoint() < checkpoint.getProcessedCheckpoint()) { notifyIfBlocked(); } + if (failure.get() != null) { + discardPages(); + } } private void notifyIfBlocked() { @@ -123,18 +133,39 @@ private void notifyIfBlocked() { private void checkFailure() { Exception e = failure.get(); if (e != null) { + discardPages(); throw ExceptionsHelper.convertToElastic(e); } } + private void discardPages() { + long nextCheckpoint; + while ((nextCheckpoint = checkpoint.getPersistedCheckpoint() + 1) <= checkpoint.getProcessedCheckpoint()) { + Page page = buffers.remove(nextCheckpoint); + checkpoint.markSeqNoAsPersisted(nextCheckpoint); + if (page != null) { + Releasables.closeExpectNoException(page::releaseBlocks); + } + } + } + + @Override + public final void close() { + finish(); + discardPages(); + doClose(); + } + @Override public void finish() { finished = true; + if (failure.get() != null) { + discardPages(); + } } @Override public boolean isFinished() { - checkFailure(); return finished && checkpoint.getPersistedCheckpoint() == checkpoint.getMaxSeqNo(); } @@ -154,6 +185,7 @@ public Page getOutput() { @Override public SubscribableListener isBlocked() { + // TODO: Add an exchange service between async operation instead? if (finished) { return Operator.NOT_BLOCKED; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index 4c808907cda91..b8a66410167ab 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -8,22 +8,27 @@ package org.elasticsearch.compute.operator; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; @@ -36,8 +41,11 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.LongStream; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class AsyncOperatorTests extends ESTestCase { @@ -80,16 +88,17 @@ protected int remaining() { @Override protected Page createPage(int positionOffset, int length) { - LongVector.Builder builder = LongVector.newVectorBuilder(length); - for (int i = 0; i < length; i++) { - builder.appendLong(ids.get(currentPosition++)); + try (LongVector.Builder builder = blockFactory.newLongVectorBuilder(length)) { + for (int i = 0; i < length; i++) { + builder.appendLong(ids.get(currentPosition++)); + } + return new Page(builder.build().asBlock()); } - return new Page(builder.build().asBlock()); } }; int maxConcurrentRequests = randomIntBetween(1, 10); AsyncOperator asyncOperator = new AsyncOperator(maxConcurrentRequests) { - final LookupService lookupService = new LookupService(threadPool, dict, maxConcurrentRequests); + final LookupService lookupService = new LookupService(threadPool, driverContext.blockFactory(), dict, maxConcurrentRequests); @Override protected void performAsync(Page inputPage, ActionListener listener) { @@ -97,25 +106,27 @@ protected void performAsync(Page inputPage, ActionListener listener) { } @Override - public void close() { + public void doClose() { } }; Iterator it = ids.iterator(); SinkOperator outputOperator = new PageConsumerOperator(page -> { - assertThat(page.getBlockCount(), equalTo(2)); - LongBlock b1 = page.getBlock(0); - BytesRefBlock b2 = page.getBlock(1); - BytesRef scratch = new BytesRef(); - for (int i = 0; i < page.getPositionCount(); i++) { - assertTrue(it.hasNext()); - long key = b1.getLong(i); - assertThat(key, equalTo(it.next())); - String v = dict.get(key); - if (v == null) { - assertTrue(b2.isNull(i)); - } else { - assertThat(b2.getBytesRef(i, scratch), equalTo(new BytesRef(v))); + try (Releasable ignored = page::releaseBlocks) { + assertThat(page.getBlockCount(), equalTo(2)); + LongBlock b1 = page.getBlock(0); + BytesRefBlock b2 = page.getBlock(1); + BytesRef scratch = new BytesRef(); + for (int i = 0; i < page.getPositionCount(); i++) { + assertTrue(it.hasNext()); + long key = b1.getLong(i); + assertThat(key, equalTo(it.next())); + String v = dict.get(key); + if (v == null) { + assertTrue(b2.isNull(i)); + } else { + assertThat(b2.getBytesRef(i, scratch), equalTo(new BytesRef(v))); + } } } }); @@ -126,6 +137,7 @@ public void close() { } public void testStatus() { + DriverContext driverContext = driverContext(); Map> handlers = new HashMap<>(); AsyncOperator operator = new AsyncOperator(2) { @Override @@ -134,47 +146,108 @@ protected void performAsync(Page inputPage, ActionListener listener) { } @Override - public void close() { + protected void doClose() { } }; assertTrue(operator.isBlocked().isDone()); assertTrue(operator.needsInput()); - Page page1 = new Page(Block.constantNullBlock(1)); + Page page1 = new Page(driverContext.blockFactory().newConstantNullBlock(1)); operator.addInput(page1); assertFalse(operator.isBlocked().isDone()); SubscribableListener blocked1 = operator.isBlocked(); assertTrue(operator.needsInput()); - Page page2 = new Page(Block.constantNullBlock(2)); + Page page2 = new Page(driverContext.blockFactory().newConstantNullBlock(2)); operator.addInput(page2); assertFalse(operator.needsInput()); // reached the max outstanding requests assertFalse(operator.isBlocked().isDone()); assertThat(operator.isBlocked(), equalTo(blocked1)); - Page page3 = new Page(Block.constantNullBlock(3)); + Page page3 = new Page(driverContext.blockFactory().newConstantNullBlock(3)); handlers.remove(page1).onResponse(page3); + page1.releaseBlocks(); assertFalse(operator.needsInput()); // still have 2 outstanding requests assertTrue(operator.isBlocked().isDone()); assertTrue(blocked1.isDone()); - assertThat(operator.getOutput(), equalTo(page3)); + page3.releaseBlocks(); + assertTrue(operator.needsInput()); assertFalse(operator.isBlocked().isDone()); + Page page4 = new Page(driverContext.blockFactory().newConstantNullBlock(3)); + handlers.remove(page2).onResponse(page4); + page2.releaseBlocks(); + assertThat(operator.getOutput(), equalTo(page4)); + page4.releaseBlocks(); operator.close(); } + public void testFailure() throws Exception { + DriverContext driverContext = driverContext(); + final SequenceLongBlockSourceOperator sourceOperator = new SequenceLongBlockSourceOperator( + driverContext.blockFactory(), + LongStream.range(0, 100 * 1024) + ); + int maxConcurrentRequests = randomIntBetween(1, 10); + AtomicBoolean failed = new AtomicBoolean(); + AsyncOperator asyncOperator = new AsyncOperator(maxConcurrentRequests) { + @Override + protected void performAsync(Page inputPage, ActionListener listener) { + ActionRunnable command = new ActionRunnable<>(listener) { + @Override + protected void doRun() { + if (randomInt(100) < 10) { + failed.set(true); + throw new ElasticsearchException("simulated"); + } + int positionCount = inputPage.getBlock(0).getPositionCount(); + IntBlock block = driverContext.blockFactory().newConstantIntBlockWith(between(1, 100), positionCount); + listener.onResponse(inputPage.appendPage(new Page(block))); + } + }; + if (randomBoolean()) { + command.run(); + } else { + TimeValue delay = TimeValue.timeValueMillis(randomIntBetween(0, 50)); + threadPool.schedule(command, delay, threadPool.executor(ESQL_TEST_EXECUTOR)); + } + } + + @Override + protected void doClose() { + + } + }; + SinkOperator outputOperator = new PageConsumerOperator(Page::releaseBlocks); + PlainActionFuture future = new PlainActionFuture<>(); + Driver driver = new Driver(driverContext, sourceOperator, List.of(asyncOperator), outputOperator, () -> {}); + Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 1000), future); + assertBusy(() -> { + assertTrue(asyncOperator.isFinished()); + assertTrue(future.isDone()); + }); + if (failed.get()) { + ElasticsearchException error = expectThrows(ElasticsearchException.class, future::actionGet); + assertThat(error.getMessage(), containsString("simulated")); + } else { + future.actionGet(); + } + } + static class LookupService { private final ThreadPool threadPool; private final Map dict; private final int maxConcurrentRequests; private final AtomicInteger pendingRequests = new AtomicInteger(); + private final BlockFactory blockFactory; - LookupService(ThreadPool threadPool, Map dict, int maxConcurrentRequests) { + LookupService(ThreadPool threadPool, BlockFactory blockFactory, Map dict, int maxConcurrentRequests) { this.threadPool = threadPool; this.dict = dict; + this.blockFactory = blockFactory; this.maxConcurrentRequests = maxConcurrentRequests; } @@ -184,20 +257,21 @@ public void lookupAsync(Page input, ActionListener listener) { ActionRunnable command = new ActionRunnable<>(listener) { @Override protected void doRun() { + int current = pendingRequests.decrementAndGet(); + assert current >= 0 : "pending requests must be non-negative"; LongBlock ids = input.getBlock(0); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(ids.getPositionCount()); - for (int i = 0; i < ids.getPositionCount(); i++) { - String v = dict.get(ids.getLong(i)); - if (v != null) { - builder.appendBytesRef(new BytesRef(v)); - } else { - builder.appendNull(); + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(ids.getPositionCount())) { + for (int i = 0; i < ids.getPositionCount(); i++) { + String v = dict.get(ids.getLong(i)); + if (v != null) { + builder.appendBytesRef(new BytesRef(v)); + } else { + builder.appendNull(); + } } + Page result = input.appendPage(new Page(builder.build())); + listener.onResponse(result); } - int current = pendingRequests.decrementAndGet(); - assert current >= 0 : "pending requests must be non-negative"; - Page result = input.appendBlock(builder.build()); - listener.onResponse(result); } }; TimeValue delay = TimeValue.timeValueMillis(randomIntBetween(0, 50)); @@ -205,13 +279,30 @@ protected void doRun() { } } - /** - * A {@link DriverContext} with a nonBreakingBigArrays. - */ - DriverContext driverContext() { - return new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - BlockFactory.getNonBreakingInstance() - ); + protected DriverContext driverContext() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + BlockFactory factory = new MockBlockFactory(breaker, bigArrays); + blockFactories.add(factory); + return new DriverContext(bigArrays, factory); + } + + private final List breakers = new ArrayList<>(); + private final List blockFactories = new ArrayList<>(); + + @After + public void allBreakersEmpty() throws Exception { + // first check that all big arrays are released, which can affect breakers + MockBigArrays.ensureAllArraysAreReleased(); + + for (CircuitBreaker breaker : breakers) { + for (var factory : blockFactories) { + if (factory instanceof MockBlockFactory mockBlockFactory) { + mockBlockFactory.ensureAllBlocksAreReleased(); + } + } + assertThat("Unexpected used in breaker: " + breaker, breaker.getUsed(), equalTo(0L)); + } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java index 38ba64f78523e..cf29bf6c6a9d6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java @@ -127,7 +127,7 @@ protected void performAsync(Page page, ActionListener listener) { } @Override - public void close() { + protected void doClose() { } } diff --git a/x-pack/plugin/esql/qa/server/build.gradle b/x-pack/plugin/esql/qa/server/build.gradle index f8a43c52f5ca7..12c3a9d951383 100644 --- a/x-pack/plugin/esql/qa/server/build.gradle +++ b/x-pack/plugin/esql/qa/server/build.gradle @@ -19,7 +19,7 @@ subprojects { } - if (project.name != 'security') { + if (project.name != 'security' && project.name != 'mixed-cluster' ) { // The security project just configures its subprojects apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle new file mode 100644 index 0000000000000..10f993124652d --- /dev/null +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -0,0 +1,50 @@ + +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.internal.info.BuildParams +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask + +apply plugin: 'elasticsearch.internal-testclusters' +apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.bwc-test' +apply plugin: 'elasticsearch.rest-resources' + +dependencies { + testImplementation project(xpackModule('esql:qa:testFixtures')) + testImplementation project(xpackModule('esql:qa:server')) +} + +restResources { + restApi { + include '_common', 'bulk', 'indices', 'esql', 'xpack', 'enrich' + } +} + +BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> + + if (bwcVersion != VersionProperties.getElasticsearchVersion() && bwcVersion.onOrAfter(Version.fromString("8.11.0"))) { + /* This project runs the ESQL spec tests against a 4 node cluster where two of the nodes has a different minor. */ + def baseCluster = testClusters.register(baseName) { + versions = [bwcVersion.toString(), bwcVersion.toString(), project.version, project.version] + numberOfNodes = 4 + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'false' + } + + tasks.register("${baseName}#mixedClusterTest", StandaloneRestIntegTestTask) { + useCluster baseCluster + mustRunAfter("precommit") + nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) + nonInputProperties.systemProperty('tests.clustername', baseName) + systemProperty 'tests.bwc_nodes_version', bwcVersion.toString().replace('-SNAPSHOT', '') + systemProperty 'tests.new_nodes_version', project.version.toString().replace('-SNAPSHOT', '') + onlyIf("BWC tests disabled") { project.bwc_tests_enabled } + } + + tasks.register(bwcTaskName(bwcVersion)) { + dependsOn "${baseName}#mixedClusterTest" + } + } +} + diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/test/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/test/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java new file mode 100644 index 0000000000000..b8dab3641c2a0 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/test/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.mixed; + +import org.elasticsearch.Version; +import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; +import org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; + +import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; + +public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { + + static final Version bwcVersion = Version.fromString(System.getProperty("tests.bwc_nodes_version")); + static final Version newVersion = Version.fromString(System.getProperty("tests.new_nodes_version")); + + public MixedClusterEsqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { + super(fileName, groupName, testName, lineNumber, testCase); + } + + @Override + protected void shouldSkipTest(String testName) { + assumeTrue("Test " + testName + " is skipped on " + bwcVersion, isEnabled(testName, bwcVersion)); + assumeTrue("Test " + testName + " is skipped on " + newVersion, isEnabled(testName, newVersion)); + } +} diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 776a2e732e5e9..5397681e231fd 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.http.HttpEntity; +import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.xcontent.XContentHelper; @@ -90,13 +91,17 @@ public boolean logResults() { public final void test() throws Throwable { try { - assumeTrue("Test " + testName + " is not enabled", isEnabled(testName)); + shouldSkipTest(testName); doTest(); } catch (Exception e) { throw reworkException(e); } } + protected void shouldSkipTest(String testName) { + assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, Version.CURRENT)); + } + protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); Map answer = runEsql(builder.query(testCase.query).build(), testCase.expectedWarnings); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index 988d77a11beef..3ccf61b3a15ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -9,6 +9,7 @@ import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.compute.data.Block; @@ -22,9 +23,9 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Tuple; import org.elasticsearch.logging.Logger; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.ql.util.StringUtils; -import org.elasticsearch.xpack.versionfield.Version; import org.supercsv.io.CsvListReader; import org.supercsv.prefs.CsvPreference; @@ -42,6 +43,8 @@ import java.util.Locale; import java.util.Map; import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import static org.elasticsearch.common.Strings.delimitedListToStringArray; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -57,8 +60,51 @@ public final class CsvTestUtils { private CsvTestUtils() {} - public static boolean isEnabled(String testName) { - return testName.endsWith("-Ignore") == false; + public static boolean isEnabled(String testName, Version version) { + if (testName.endsWith("-Ignore")) { + return false; + } + Tuple skipRange = skipVersionRange(testName); + if (skipRange != null && version.onOrAfter(skipRange.v1()) && version.onOrBefore(skipRange.v2())) { + return false; + } + return true; + } + + private static final Pattern INSTRUCTION_PATTERN = Pattern.compile("#\\[(.*?)]"); + + public static Map extractInstructions(String testName) { + Matcher matcher = INSTRUCTION_PATTERN.matcher(testName); + Map pairs = new HashMap<>(); + if (matcher.find()) { + String[] groups = matcher.group(1).split(","); + for (String group : groups) { + String[] kv = group.split(":"); + if (kv.length != 2) { + throw new IllegalArgumentException("expected instruction in [k1:v1,k2:v2] format; got " + matcher.group(1)); + } + pairs.put(kv[0].trim(), kv[1].trim()); + } + } + return pairs; + } + + public static Tuple skipVersionRange(String testName) { + Map pairs = extractInstructions(testName); + String versionRange = pairs.get("skip"); + if (versionRange != null) { + String[] skipVersions = versionRange.split("-"); + if (skipVersions.length != 2) { + throw new IllegalArgumentException("malformed version range : " + versionRange); + } + String lower = skipVersions[0].trim(); + String upper = skipVersions[1].trim(); + return Tuple.tuple( + lower.isEmpty() ? VersionUtils.getFirstVersion() : Version.fromString(lower), + upper.isEmpty() ? Version.CURRENT : Version.fromString(upper) + ); + } + return null; } public static Tuple> loadPageFromCsv(URL source) throws Exception { @@ -333,7 +379,7 @@ public enum Type { : ((BytesRef) l).compareTo((BytesRef) r), BytesRef.class ), - VERSION(v -> new Version(v).toBytesRef(), BytesRef.class), + VERSION(v -> new org.elasticsearch.xpack.versionfield.Version(v).toBytesRef(), BytesRef.class), NULL(s -> null, Void.class), DATETIME( x -> x == null ? null : DateFormatters.from(UTC_DATE_TIME_FORMATTER.parse(x)).toInstant().toEpochMilli(), diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index f85dbeda7f6bc..8b94c022aaf6a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -182,7 +182,7 @@ string:keyword |datetime:date // end::to_datetime-str-result[] ; -convertFromUnsignedLong +convertFromUnsignedLong#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] row ul = [9223372036854775808, 520128000000] | eval dt = to_datetime(ul); warning:Line 1:58: evaluation of [to_datetime(ul)] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:58: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [long] range diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 9485bf800dd18..b8dd2c6e3cee1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -63,7 +63,7 @@ long:long |ul:ul [501379200000, 520128000000] |[501379200000, 520128000000] ; -convertDoubleToUL +convertDoubleToUL#[skip:-8.11.99, reason:ql exceptions updated in 8.12] row d = 123.4 | eval ul = to_ul(d), overflow = to_ul(1e20); warning:Line 1:48: evaluation of [to_ul(1e20)] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:48: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E20] out of [unsigned_long] range @@ -120,7 +120,7 @@ int:integer |long:long [5013792, 520128] |[5013792, 520128] ; -convertULToLong +convertULToLong#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] row ul = [9223372036854775807, 9223372036854775808] | eval long = to_long(ul); warning:Line 1:67: evaluation of [to_long(ul)] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:67: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [long] range @@ -161,7 +161,7 @@ str1:keyword |str2:keyword |str3:keyword |long1:long |long2:long |long3:long // end::to_long-str-result[] ; -convertDoubleToLong +convertDoubleToLong#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] row d = 123.4 | eval d2l = to_long(d), overflow = to_long(1e19); warning:Line 1:51: evaluation of [to_long(1e19)] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:51: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E19] out of [long] range @@ -179,7 +179,7 @@ int:integer |ii:integer [5013792, 520128] |[5013792, 520128] ; -convertLongToInt +convertLongToInt#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] // tag::to_int-long[] ROW long = [5013792, 2147483647, 501379200000] | EVAL int = TO_INTEGER(long) @@ -194,7 +194,7 @@ long:long |int:integer // end::to_int-long-result[] ; -convertULToInt +convertULToInt#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warning:Line 1:57: evaluation of [to_int(ul)] failed, treating result as null. Only first 20 failures recorded. // UL conversion to int dips into long; not the most efficient, but it's how SQL does it too. @@ -229,7 +229,7 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer |overflow:in 2147483647 |2147483647.2 |2147483647 |2147483647 |null |null ; -convertDoubleToInt +convertDoubleToInt#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warning:Line 1:54: evaluation of [to_integer(1e19)] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:54: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E19] out of [long] range @@ -473,7 +473,7 @@ ROW deg = [90, 180, 270] [90, 180, 270] | [1.5707963267948966, 3.141592653589793, 4.71238898038469] ; -warningWithFromSource +warningWithFromSource#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] from employees | sort emp_no | limit 1 | eval x = to_long(emp_no) * 10000000 | eval y = to_int(x) > 1 | keep y; warning:Line 1:89: evaluation of [to_int(x)] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:89: org.elasticsearch.xpack.ql.InvalidArgumentException: [100010000000] out of [integer] range diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 0b45f9ac5aea4..68bf4108ffcd1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -5,7 +5,7 @@ v:long 1 ; -showFunctions +showFunctions#[skip:-8.11.99] show functions; name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean @@ -94,7 +94,7 @@ trim |? trim(arg1:?) ; -showFunctionsSynopsis +showFunctionsSynopsis#[skip:-8.11.99] show functions | keep synopsis; synopsis:keyword diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index acf42d908ed66..884cadaeceb16 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -682,14 +682,14 @@ c:l | job_positions:s 4 |Tech Lead ; -duplicateAggregationsWithoutGrouping +duplicateAggregationsWithoutGrouping#[skip:-8.11.99] from employees | eval x = salary | stats c = count(), m = min(x), m1 = min(salary), c1 = count(1); c:l | m:i | m1:i | c1:l 100 | 25324 | 25324 | 100 ; -duplicateAggregationsWithGrouping +duplicateAggregationsWithGrouping#[skip:-8.11.99] from employees | eval x = salary | stats c = count(), m = min(x), m1 = min(salary), c1 = count(1) by gender | sort gender; c:l| m:i | m1:i | c1:l| gender:s diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 5134e05b4cc3d..768353a1c8d35 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -12,11 +12,13 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -29,6 +31,7 @@ import java.util.List; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; @TestLogging(value = "org.elasticsearch.xpack.esql.session:DEBUG", reason = "to better understand planning") @@ -76,6 +79,24 @@ protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), EsqlPlugin.class); } + protected void setRequestCircuitBreakerLimit(ByteSizeValue limit) { + if (limit != null) { + assertAcked( + clusterAdmin().prepareUpdateSettings() + .setPersistentSettings( + Settings.builder().put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), limit).build() + ) + ); + } else { + assertAcked( + clusterAdmin().prepareUpdateSettings() + .setPersistentSettings( + Settings.builder().putNull(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey()).build() + ) + ); + } + } + protected EsqlQueryResponse run(String esqlCommands) { return run(esqlCommands, randomPragmas()); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java new file mode 100644 index 0000000000000..b855fbd15be12 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java @@ -0,0 +1,351 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.apache.lucene.tests.util.LuceneTestCase; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.EnrichPlugin; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; +import org.junit.After; +import org.junit.Before; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.transport.AbstractSimpleTransportTestCase.IGNORE_DESERIALIZATION_ERRORS_SETTING; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/102184") +public class EnrichIT extends AbstractEsqlIntegTestCase { + + @Override + protected Collection> nodePlugins() { + List> plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(EsqlPlugin.class); + plugins.add(InternalExchangePlugin.class); + plugins.add(LocalStateEnrich.class); + plugins.add(IngestCommonPlugin.class); + plugins.add(ReindexPlugin.class); + plugins.add(InternalTransportSettingPlugin.class); + return plugins; + } + + public static class InternalTransportSettingPlugin extends Plugin { + @Override + public List> getSettings() { + return List.of(IGNORE_DESERIALIZATION_ERRORS_SETTING); + } + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(XPackSettings.SECURITY_ENABLED.getKey(), false) + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "128mb") + /* + * Force standard settings for the request breaker or we may not break at all. + * Without this we can randomly decide to use the `noop` breaker for request + * and it won't break..... + */ + .put( + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.getDefault(Settings.EMPTY) + ) + .put( + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getDefault(Settings.EMPTY) + ) + .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(500, 2000))) + // allow reading pages from network can trip the circuit breaker + .put(IGNORE_DESERIALIZATION_ERRORS_SETTING.getKey(), true) + .build(); + } + + @Override + protected EsqlQueryResponse run(EsqlQueryRequest request) { + final Client client; + if (randomBoolean()) { + client = client(randomFrom(clusterService().state().nodes().getCoordinatingOnlyNodes().values()).getName()); + } else { + client = client(); + } + if (randomBoolean()) { + setRequestCircuitBreakerLimit(ByteSizeValue.ofBytes(between(256, 4096))); + try { + return client.execute(EsqlQueryAction.INSTANCE, request).actionGet(2, TimeUnit.MINUTES); + } catch (Exception e) { + logger.info("request failed", e); + ensureBlocksReleased(); + } finally { + setRequestCircuitBreakerLimit(null); + } + } + return client.execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); + } + + @Before + public void setupEnrichPolicies() { + client().admin() + .indices() + .prepareCreate("songs") + .setMapping("song_id", "type=keyword", "title", "type=keyword", "artist", "type=keyword", "length", "type=double") + .get(); + record Song(String id, String title, String artist, double length) { + + } + var songs = List.of( + new Song("s1", "Hotel California", "Eagles", 7.12), + new Song("s2", "In The End", "Linkin Park", 3.36), + new Song("s3", "Numb", "Linkin Park", 3.05), + new Song("s4", "The Sound Of Silence", "Disturbed", 4.08) + ); + for (var s : songs) { + client().prepareIndex("songs").setSource("song_id", s.id, "title", s.title, "artist", s.artist, "length", s.length).get(); + } + client().admin().indices().prepareRefresh("songs").get(); + EnrichPolicy policy = new EnrichPolicy("match", null, List.of("songs"), "song_id", List.of("title", "artist", "length")); + client().execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request("songs", policy)).actionGet(); + client().execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request("songs")).actionGet(); + assertAcked(client().admin().indices().prepareDelete("songs")); + } + + @After + public void cleanEnrichPolicies() { + cluster().wipe(Set.of()); + client().execute(DeleteEnrichPolicyAction.INSTANCE, new DeleteEnrichPolicyAction.Request("songs")); + } + + @Before + public void setupMainIndex() { + var localListens = List.of( + new Listen(1, "s3", 1.5), + new Listen(2, "s2", 2.0), + new Listen(3, "s1", 0.5), + new Listen(4, "s3", 1.0), + new Listen(5, "s1", 2.5), + new Listen(6, "s1", 0.25), + new Listen(7, "s2", 3.0) + ); + client().admin() + .indices() + .prepareCreate("listens") + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) + .setMapping("timestamp", "type=long", "song_id", "type=keyword", "duration", "type=double") + .get(); + for (Listen listen : localListens) { + client().prepareIndex("listens") + .setSource("timestamp", listen.timestamp, "song_id", listen.songId, "duration", listen.duration) + .get(); + } + client().admin().indices().prepareRefresh("listens").get(); + } + + @Before + public void ensureAtLeastOneCoordinatingNodeOnly() { + if (clusterService().state().nodes().getCoordinatingOnlyNodes().isEmpty()) { + internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); + } + } + + record Listen(long timestamp, String songId, double duration) { + + } + + private static String enrichSongCommand() { + String command = " ENRICH songs "; + if (randomBoolean()) { + command += " ON song_id "; + } + if (randomBoolean()) { + command += " WITH artist, title, length "; + } + return command; + } + + public void testSumDurationByArtist() { + Function> extractStats = resp -> { + List columns = resp.columns(); + assertThat(columns, hasSize(2)); + assertThat(columns.get(0).name(), equalTo("sum(duration)")); + assertThat(columns.get(0).type(), equalTo("double")); + assertThat(columns.get(1).name(), equalTo("artist")); + assertThat(columns.get(1).type(), equalTo("keyword")); + Iterator> rows = resp.values(); + Map actualValues = new HashMap<>(); + while (rows.hasNext()) { + Iterator row = rows.next(); + Object v = row.next(); + Object k = row.next(); + actualValues.put((String) k, (Double) v); + } + return actualValues; + }; + + var statsCommands = List.of( + enrichSongCommand() + " | STATS sum(duration) by artist", + "STATS duration = sum(duration) by song_id | " + enrichSongCommand() + " | STATS sum(duration) by artist" + ); + for (String statsCommand : statsCommands) { + try (var resp = run("from listens* | " + statsCommand)) { + assertThat(extractStats.apply(resp), equalTo(Map.of("Eagles", 3.25, "Linkin Park", 7.5))); + } + } + } + + public void testAvgDurationByArtist() { + Function> extractStats = resp -> { + List columns = resp.columns(); + assertThat(columns, hasSize(2)); + assertThat(columns.get(0).name(), equalTo("avg(duration)")); + assertThat(columns.get(0).type(), equalTo("double")); + assertThat(columns.get(1).name(), equalTo("artist")); + assertThat(columns.get(1).type(), equalTo("keyword")); + Iterator> rows = resp.values(); + Map actualValues = new HashMap<>(); + while (rows.hasNext()) { + Iterator row = rows.next(); + Object v = row.next(); + Object k = row.next(); + actualValues.put((String) k, (Double) v); + } + return actualValues; + }; + try (var resp = run("from listens* | " + enrichSongCommand() + " | STATS avg(duration) by artist")) { + Map stats = extractStats.apply(resp); + assertThat(stats.keySet(), containsInAnyOrder("Eagles", "Linkin Park")); + assertThat(stats.get("Eagles"), closeTo(1.08333, 0.1)); + assertThat(stats.get("Linkin Park"), closeTo(1.875, 0.1)); + } + } + + public void testListeningRatio() { + Function> extractStats = resp -> { + List columns = resp.columns(); + assertThat(columns, hasSize(2)); + assertThat(columns.get(0).name(), equalTo("ratio")); + assertThat(columns.get(0).type(), equalTo("double")); + assertThat(columns.get(1).name(), equalTo("artist")); + assertThat(columns.get(1).type(), equalTo("keyword")); + Iterator> rows = resp.values(); + Map actualValues = new HashMap<>(); + while (rows.hasNext()) { + Iterator row = rows.next(); + Object v = row.next(); + Object k = row.next(); + actualValues.put((String) k, (Double) v); + } + return actualValues; + }; + + var statsCommand = "STATS d = sum(duration), l = sum(length) by artist | EVAL ratio=d /l | KEEP ratio, artist"; + try (var resp = run("from listens* | " + enrichSongCommand() + "|" + statsCommand)) { + Map stats = extractStats.apply(resp); + assertThat(stats.keySet(), containsInAnyOrder("Eagles", "Linkin Park")); + assertThat(stats.get("Eagles"), closeTo(0.1521, 0.05)); + assertThat(stats.get("Linkin Park"), closeTo(0.585, 0.05)); + } + } + + public void testFilterAfterEnrich() { + try (var resp = run("from listens* | " + enrichSongCommand() + " | WHERE length < 3.2 | limit 10 | KEEP artist,title")) { + Iterator row = resp.values().next(); + assertThat(row.next(), equalTo("Linkin Park")); + assertThat(row.next(), equalTo("Numb")); + } + } + + public void testTopN() { + try (var resp = run("from listens* | sort timestamp DESC | limit 1 |" + enrichSongCommand() + " | KEEP timestamp, artist")) { + Iterator row = resp.values().next(); + assertThat(row.next(), equalTo(7L)); + assertThat(row.next(), equalTo("Linkin Park")); + } + try (var resp = run("from listens* | " + enrichSongCommand() + " | sort timestamp DESC | limit 1 | KEEP timestamp, artist")) { + Iterator row = resp.values().next(); + assertThat(row.next(), equalTo(7L)); + assertThat(row.next(), equalTo("Linkin Park")); + } + } + + public static class LocalStateEnrich extends LocalStateCompositeXPackPlugin { + + public LocalStateEnrich(final Settings settings, final Path configPath) throws Exception { + super(settings, configPath); + + plugins.add(new EnrichPlugin(settings) { + @Override + protected XPackLicenseState getLicenseState() { + return this.getLicenseState(); + } + }); + } + + public static class EnrichTransportXPackInfoAction extends TransportXPackInfoAction { + @Inject + public EnrichTransportXPackInfoAction( + TransportService transportService, + ActionFilters actionFilters, + LicenseService licenseService, + NodeClient client + ) { + super(transportService, actionFilters, licenseService, client); + } + + @Override + protected List infoActions() { + return Collections.singletonList(XPackInfoFeatureAction.ENRICH); + } + } + + @Override + protected Class> getInfoAction() { + return EnrichTransportXPackInfoAction.class; + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index 342df5209ec95..55a21cd7e4403 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -26,7 +26,6 @@ import java.util.List; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.transport.AbstractSimpleTransportTestCase.IGNORE_DESERIALIZATION_ERRORS_SETTING; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -74,24 +73,6 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { .build(); } - private void setRequestCircuitBreakerLimit(ByteSizeValue limit) { - if (limit != null) { - assertAcked( - clusterAdmin().prepareUpdateSettings() - .setPersistentSettings( - Settings.builder().put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), limit).build() - ) - ); - } else { - assertAcked( - clusterAdmin().prepareUpdateSettings() - .setPersistentSettings( - Settings.builder().putNull(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey()).build() - ) - ); - } - } - @Override protected EsqlQueryResponse run(EsqlQueryRequest request) { setRequestCircuitBreakerLimit(ByteSizeValue.ofBytes(between(256, 2048))); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java deleted file mode 100644 index 56ea27e360c1d..0000000000000 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.lookup; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.Driver; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.DriverRunner; -import org.elasticsearch.compute.operator.OutputOperator; -import org.elasticsearch.compute.operator.SourceOperator; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.tasks.CancellableTask; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; -import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; -import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; -import org.elasticsearch.xpack.esql.plugin.TransportEsqlQueryAction; -import org.elasticsearch.xpack.ql.expression.FieldAttribute; -import org.elasticsearch.xpack.ql.expression.NamedExpression; -import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.type.EsField; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.Executor; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Function; - -import static org.hamcrest.Matchers.equalTo; - -public class EnrichLookupIT extends AbstractEsqlIntegTestCase { - - public void testSimple() { - ElasticsearchAssertions.assertAcked( - client().admin() - .indices() - .prepareCreate("users") - .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) - .setMapping( - "uid", - "type=keyword,doc_values=false", - "name", - "type=keyword,index=false", - "city", - "type=keyword,index=false", - "joined", - "type=date,index=false,format=yyyy-MM-dd" - ) - ); - List> users = List.of( - Map.of("uid", "j1", "name", "John", "city", "New York/NY", "joined", "2020-03-01"), - Map.of("uid", "m4", "name", "Mike", "city", "Boston/MA", "joined", "2010-06-20"), - Map.of("uid", "j2", "name", "Jack", "city", "Austin/TX", "joined", "1999-11-03") - ); - for (Map user : users) { - client().prepareIndex("users").setSource(user).get(); - if (randomBoolean()) { - client().admin().indices().prepareRefresh("users").get(); - } - } - if (randomBoolean()) { - client().admin().indices().prepareForceMerge("users").setMaxNumSegments(1).get(); - } - client().admin().indices().prepareRefresh("users").get(); - List enrichAttributes = List.of( - new FieldAttribute(Source.EMPTY, "name", new EsField("name", DataTypes.KEYWORD, Map.of(), true)), - new FieldAttribute(Source.EMPTY, "city", new EsField("city", DataTypes.KEYWORD, Map.of(), true)), - new FieldAttribute(Source.EMPTY, "joined", new EsField("joined", DataTypes.DATETIME, Map.of(), true)) - ); - - DiscoveryNode clientNode = randomFrom(clusterService().state().nodes().stream().toList()); - TransportEsqlQueryAction queryAction = internalCluster().getInstance(TransportEsqlQueryAction.class, clientNode.getName()); - TransportService transportService = internalCluster().getInstance(TransportService.class, clientNode.getName()); - - EsqlQueryRequest parentRequest = new EsqlQueryRequest(); - parentRequest.query("FROM index"); - CancellableTask parentTask = (CancellableTask) transportService.getTaskManager().register("test", "test-action", parentRequest); - EnrichLookupOperator enrichOperator = new EnrichLookupOperator( - "test-session", - parentTask, - randomIntBetween(1, 3), - 0, - queryAction.enrichLookupService(), - "users", - "match", - "uid", - enrichAttributes - ); - BytesRefBlock userBlock = BytesRefBlock.newBlockBuilder(5) - .appendBytesRef(new BytesRef("j1")) - .appendNull() - .appendBytesRef(new BytesRef("j2")) - .appendBytesRef(new BytesRef("j1")) - .appendBytesRef(new BytesRef("m3")) - .build(); - SourceOperator sourceOperator = sourceOperator(userBlock); - - AtomicReference outputPage = new AtomicReference<>(); - OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), page -> { - outputPage.getAndUpdate(current -> { - if (current == null) { - return page; - } - Block.Builder[] builders = new Block.Builder[current.getBlockCount()]; - for (int i = 0; i < current.getBlockCount(); i++) { - ElementType elementType = current.getBlock(i).elementType(); - if (elementType == ElementType.NULL) { - elementType = page.getBlock(i).elementType(); - } - builders[i] = elementType.newBlockBuilder(1); - builders[i].copyFrom(current.getBlock(i), 0, current.getPositionCount()); - builders[i].copyFrom(page.getBlock(i), 0, page.getPositionCount()); - } - return new Page(Arrays.stream(builders).map(Block.Builder::build).toArray(Block[]::new)); - }); - }); - - DateFormatter dateFmt = DateFormatter.forPattern("yyyy-MM-dd"); - - var runner = new DriverRunner(transportService.getThreadPool().getThreadContext()) { - final Executor executor = transportService.getThreadPool().executor(EsqlPlugin.ESQL_THREAD_POOL_NAME); - - @Override - protected void start(Driver driver, ActionListener listener) { - Driver.start(transportService.getThreadPool().getThreadContext(), executor, driver, between(1, 1000), listener); - } - }; - Driver driver = new Driver(driverContext(), sourceOperator, List.of(enrichOperator), outputOperator, () -> {}); - PlainActionFuture future = new PlainActionFuture<>(); - runner.runToCompletion(List.of(driver), future); - future.actionGet(TimeValue.timeValueSeconds(30)); - transportService.getTaskManager().unregister(parentTask); - Page output = outputPage.get(); - assertThat(output.getBlockCount(), equalTo(4)); - assertThat(output.getPositionCount(), equalTo(5)); - BytesRef scratch = new BytesRef(); - BytesRefBlock names = output.getBlock(1); - BytesRefBlock cities = output.getBlock(2); - LongBlock dates = output.getBlock(3); - - assertThat(names.getBytesRef(0, scratch), equalTo(new BytesRef("John"))); - assertThat(cities.getBytesRef(0, scratch), equalTo(new BytesRef("New York/NY"))); - assertThat(dateFmt.formatMillis(dates.getLong(0)), equalTo("2020-03-01")); - - assertTrue(names.isNull(1)); - assertTrue(cities.isNull(1)); - assertTrue(dates.isNull(1)); - - assertThat(names.getBytesRef(2, scratch), equalTo(new BytesRef("Jack"))); - assertThat(cities.getBytesRef(2, scratch), equalTo(new BytesRef("Austin/TX"))); - assertThat(dateFmt.formatMillis(dates.getLong(2)), equalTo("1999-11-03")); - - assertThat(names.getBytesRef(3, scratch), equalTo(new BytesRef("John"))); - assertThat(cities.getBytesRef(3, scratch), equalTo(new BytesRef("New York/NY"))); - assertThat(dateFmt.formatMillis(dates.getLong(3)), equalTo("2020-03-01")); - - assertTrue(names.isNull(4)); - assertTrue(cities.isNull(4)); - assertTrue(dates.isNull(4)); - } - - private static SourceOperator sourceOperator(BytesRefBlock input) { - return new SourceOperator() { - int position = 0; - - @Override - public void finish() { - - } - - @Override - public boolean isFinished() { - return position >= input.getPositionCount(); - } - - @Override - public Page getOutput() { - if (isFinished()) { - return null; - } - int remaining = input.getPositionCount() - position; - int size = between(1, remaining); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(size); - builder.copyFrom(input, position, position + size); - position += size; - Block block = builder.build(); - if (block.areAllValuesNull() && randomBoolean()) { - block = Block.constantNullBlock(block.getPositionCount()); - } - return new Page(block); - } - - @Override - public void close() { - - } - }; - } - - public void testRandom() { - - } - - public void testMultipleMatches() { - - } - - static DriverContext driverContext() { - return new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - BlockFactory.getNonBreakingInstance() - ); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java index 5ac551103f338..d2b18b1b45a60 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java @@ -106,7 +106,7 @@ protected void performAsync(Page inputPage, ActionListener listener) { } @Override - public void close() { + protected void doClose() { // TODO: Maybe create a sub-task as the parent task of all the lookup tasks // then cancel it when this operator terminates early (e.g., have enough result). } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 98c1397d97860..7dd9f01a9d6c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -38,6 +38,9 @@ import org.elasticsearch.compute.operator.OutputOperator; import org.elasticsearch.compute.operator.ProjectOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.AbstractRefCounted; +import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; @@ -125,7 +128,12 @@ public EnrichLookupService( this.executor = transportService.getThreadPool().executor(EsqlPlugin.ESQL_THREAD_POOL_NAME); this.bigArrays = bigArrays; this.blockFactory = blockFactory; - transportService.registerRequestHandler(LOOKUP_ACTION_NAME, this.executor, LookupRequest::new, new TransportHandler()); + transportService.registerRequestHandler( + LOOKUP_ACTION_NAME, + this.executor, + in -> new LookupRequest(in, blockFactory), + new TransportHandler() + ); } public void lookupAsync( @@ -164,7 +172,11 @@ public void lookupAsync( lookupRequest, parentTask, TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(listener.map(r -> r.page), LookupResponse::new, executor) + new ActionListenerResponseHandler<>( + listener.map(LookupResponse::takePage), + in -> new LookupResponse(in, blockFactory), + executor + ) ); } }, listener::onFailure)); @@ -226,11 +238,11 @@ private void doLookup( ActionListener listener ) { Block inputBlock = inputPage.getBlock(0); - if (inputBlock.areAllValuesNull()) { - listener.onResponse(createNullResponse(inputPage.getPositionCount(), extractFields)); - return; - } try { + if (inputBlock.areAllValuesNull()) { + listener.onResponse(createNullResponse(inputPage.getPositionCount(), extractFields)); + return; + } ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shardId, 0, AliasFilter.EMPTY); SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, SearchService.NO_TIMEOUT); listener = ActionListener.runBefore(listener, searchContext::close); @@ -255,9 +267,7 @@ private void doLookup( extractField instanceof Alias a ? ((NamedExpression) a.child()).name() : extractField.name(), EsqlDataTypes.isUnsupported(extractField.dataType()) ); - intermediateOperators.add( - new ValuesSourceReaderOperator(BlockFactory.getNonBreakingInstance(), sources, 0, extractField.name()) - ); + intermediateOperators.add(new ValuesSourceReaderOperator(blockFactory, sources, 0, extractField.name())); } // drop docs block intermediateOperators.add(droppingBlockOperator(extractFields.size() + 2, 0)); @@ -297,12 +307,18 @@ private void doLookup( } } - private static Page createNullResponse(int positionCount, List extractFields) { + private Page createNullResponse(int positionCount, List extractFields) { final Block[] blocks = new Block[extractFields.size()]; - for (int i = 0; i < extractFields.size(); i++) { - blocks[i] = Block.constantNullBlock(positionCount); + try { + for (int i = 0; i < extractFields.size(); i++) { + blocks[i] = blockFactory.newConstantNullBlock(positionCount); + } + return new Page(blocks); + } finally { + if (blocks[blocks.length - 1] == null) { + Releasables.close(blocks); + } } - return new Page(blocks); } private static Operator droppingBlockOperator(int totalBlocks, int droppingPosition) { @@ -340,6 +356,9 @@ private static class LookupRequest extends TransportRequest implements IndicesRe private final String matchField; private final Page inputPage; private final List extractFields; + // TODO: Remove this workaround once we have Block RefCount + private final Page toRelease; + private final RefCounted refs = AbstractRefCounted.of(this::releasePage); LookupRequest( String sessionId, @@ -354,17 +373,18 @@ private static class LookupRequest extends TransportRequest implements IndicesRe this.matchType = matchType; this.matchField = matchField; this.inputPage = inputPage; + this.toRelease = null; this.extractFields = extractFields; } - LookupRequest(StreamInput in) throws IOException { + LookupRequest(StreamInput in, BlockFactory blockFactory) throws IOException { super(in); this.sessionId = in.readString(); this.shardId = new ShardId(in); this.matchType = in.readString(); this.matchField = in.readString(); - // TODO real BlockFactory - this.inputPage = new Page(new BlockStreamInput(in, BlockFactory.getNonBreakingInstance())); + this.inputPage = new Page(new BlockStreamInput(in, blockFactory)); + this.toRelease = inputPage; PlanStreamInput planIn = new PlanStreamInput(in, PlanNameRegistry.INSTANCE, in.namedWriteableRegistry(), null); this.extractFields = planIn.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readNamedExpression)); } @@ -400,6 +420,32 @@ public String getDescription() { } }; } + + private void releasePage() { + if (toRelease != null) { + Releasables.closeExpectNoException(toRelease::releaseBlocks); + } + } + + @Override + public void incRef() { + refs.incRef(); + } + + @Override + public boolean tryIncRef() { + return refs.tryIncRef(); + } + + @Override + public boolean decRef() { + return refs.decRef(); + } + + @Override + public boolean hasReferences() { + return refs.hasReferences(); + } } private static String lookupDescription( @@ -427,20 +473,52 @@ private static String lookupDescription( } private static class LookupResponse extends TransportResponse { - private final Page page; + private Page page; + private final RefCounted refs = AbstractRefCounted.of(this::releasePage); LookupResponse(Page page) { this.page = page; } - LookupResponse(StreamInput in) throws IOException { - // TODO real BlockFactory - this.page = new Page(new BlockStreamInput(in, BlockFactory.getNonBreakingInstance())); + LookupResponse(StreamInput in, BlockFactory blockFactory) throws IOException { + this.page = new Page(new BlockStreamInput(in, blockFactory)); } @Override public void writeTo(StreamOutput out) throws IOException { page.writeTo(out); } + + Page takePage() { + var p = page; + page = null; + return p; + } + + private void releasePage() { + if (page != null) { + Releasables.closeExpectNoException(page::releaseBlocks); + } + } + + @Override + public void incRef() { + refs.incRef(); + } + + @Override + public boolean tryIncRef() { + return refs.tryIncRef(); + } + + @Override + public boolean decRef() { + return refs.decRef(); + } + + @Override + public boolean hasReferences() { + return refs.hasReferences(); + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java index e9a9512fe23c9..d1eb9e8d28d78 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java @@ -81,23 +81,27 @@ public boolean needsInput() { @Override public void addInput(Page page) { - final IntBlock positions = page.getBlock(positionChannel); - final int currentPosition = positions.getInt(0); - if (singleMode) { - fillNullUpToPosition(currentPosition); - for (int i = 0; i < mergingChannels.length; i++) { - int channel = mergingChannels[i]; - outputBuilders[i].appendAllValuesToCurrentPosition(page.getBlock(channel)); - } - filledPositions++; - } else { - if (positionBuilder != null && positionBuilder.position != currentPosition) { - flushPositionBuilder(); - } - if (positionBuilder == null) { - positionBuilder = new PositionBuilder(currentPosition, mergingTypes); + try { + final IntBlock positions = page.getBlock(positionChannel); + final int currentPosition = positions.getInt(0); + if (singleMode) { + fillNullUpToPosition(currentPosition); + for (int i = 0; i < mergingChannels.length; i++) { + int channel = mergingChannels[i]; + outputBuilders[i].appendAllValuesToCurrentPosition(page.getBlock(channel)); + } + filledPositions++; + } else { + if (positionBuilder != null && positionBuilder.position != currentPosition) { + flushPositionBuilder(); + } + if (positionBuilder == null) { + positionBuilder = new PositionBuilder(currentPosition, mergingTypes); + } + positionBuilder.combine(page, mergingChannels); } - positionBuilder.combine(page, mergingChannels); + } finally { + Releasables.closeExpectNoException(page::releaseBlocks); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 13581710f7c53..00000f7755107 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Randomness; @@ -215,7 +216,7 @@ public CsvTests(String fileName, String groupName, String testName, Integer line public final void test() throws Throwable { try { - assumeTrue("Test " + testName + " is not enabled", isEnabled(testName)); + assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, Version.CURRENT)); doTest(); } catch (Throwable th) { throw reworkException(th); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 094ecc9bfe569..c79e77915ac01 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -52,6 +52,10 @@ import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; import java.io.IOException; import java.nio.file.Files; @@ -117,11 +121,11 @@ public static Literal randomLiteral(DataType type) { protected TestCaseSupplier.TestCase testCase; - protected static Iterable parameterSuppliersFromTypedData(List cases) { + protected static Iterable parameterSuppliersFromTypedData(List suppliers) { // TODO rename this method to something more descriptive. Javadoc. And make sure all parameters are "representable" types. - List parameters = new ArrayList<>(cases.size()); - for (TestCaseSupplier element : cases) { - parameters.add(new Object[] { element }); + List parameters = new ArrayList<>(suppliers.size()); + for (TestCaseSupplier supplier : suppliers) { + parameters.add(new Object[] { supplier }); } return parameters; } @@ -493,13 +497,34 @@ public void testSerializationOfSimple() { assertSerialization(buildFieldExpression(testCase)); } + private static boolean ranAllTests = false; + + @ClassRule + public static TestRule rule = new TestRule() { + @Override + public Statement apply(Statement base, Description description) { + for (Description d : description.getChildren()) { + if (d.getChildren().size() > 1) { + ranAllTests = true; + return base; + } + } + return base; + } + }; + @AfterClass public static void testFunctionInfo() { + if (ranAllTests == false) { + LogManager.getLogger(getTestClass()).info("Skipping function info checks because we're running a portion of the tests"); + return; + } FunctionDefinition definition = definition(); if (definition == null) { LogManager.getLogger(getTestClass()).info("Skipping function info checks because the function isn't registered"); return; } + LogManager.getLogger(getTestClass()).info("Running function info checks"); EsqlFunctionRegistry.FunctionDescription description = EsqlFunctionRegistry.description(definition); List args = description.args(); @@ -762,6 +787,10 @@ public static void renderSignature() throws IOException { if (System.getProperty("generateDocs") == null) { return; } + if (ranAllTests == false) { + LogManager.getLogger(getTestClass()).info("Skipping rendering signature because we're running a portion of the tests"); + return; + } FunctionDefinition definition = definition(); if (definition == null) { LogManager.getLogger(getTestClass()).info("Skipping rendering signature because the function isn't registered"); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index 3ca8c7302d6dd..4f1efbbca387c 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -490,6 +490,111 @@ public void testDownsampleTwice() throws Exception { } } + public void testDownsampleTwiceSameInterval() throws Exception { + // Create the ILM policy + Request request = new Request("PUT", "_ilm/policy/" + policy); + request.setJsonEntity(""" + { + "policy": { + "phases": { + "warm": { + "actions": { + "downsample": { + "fixed_interval" : "5m" + } + } + }, + "cold": { + "min_age": "365d", + "actions": {} + } + } + } + } + """); + assertOK(client().performRequest(request)); + + // Create a template + Request createIndexTemplateRequest = new Request("POST", "/_index_template/" + dataStream); + createIndexTemplateRequest.setJsonEntity( + Strings.format(TEMPLATE, dataStream, "2006-01-08T23:40:53.384Z", "2021-01-08T23:40:53.384Z", policy) + ); + assertOK(client().performRequest(createIndexTemplateRequest)); + + index(client(), dataStream, true, null, "@timestamp", "2020-01-01T05:10:00Z", "volume", 11.0, "metricset", randomAlphaOfLength(5)); + + String firstBackingIndex = getBackingIndices(client(), dataStream).get(0); + logger.info("--> firstBackingIndex: {}", firstBackingIndex); + assertBusy( + () -> assertThat( + "index must wait in the " + CheckNotDataStreamWriteIndexStep.NAME + " until it is not the write index anymore", + explainIndex(client(), firstBackingIndex).get("step"), + is(CheckNotDataStreamWriteIndexStep.NAME) + ), + 30, + TimeUnit.SECONDS + ); + + // before we rollover, update template to not contain time boundaries anymore (rollover is blocked otherwise due to index time + // boundaries overlapping after rollover) + Request updateIndexTemplateRequest = new Request("POST", "/_index_template/" + dataStream); + updateIndexTemplateRequest.setJsonEntity(Strings.format(TEMPLATE_NO_TIME_BOUNDARIES, dataStream, policy)); + assertOK(client().performRequest(updateIndexTemplateRequest)); + + // Manual rollover the original index such that it's not the write index in the data stream anymore + rolloverMaxOneDocCondition(client(), dataStream); + + String downsampleIndexName = "downsample-5m-" + firstBackingIndex; + // wait for the downsample index to get to the end of the warm phase + assertBusy(() -> { + assertThat(indexExists(downsampleIndexName), is(true)); + assertThat(indexExists(firstBackingIndex), is(false)); + + assertThat(explainIndex(client(), downsampleIndexName).get("step"), is(PhaseCompleteStep.NAME)); + assertThat(explainIndex(client(), downsampleIndexName).get("phase"), is("warm")); + + Map settings = getOnlyIndexSettings(client(), downsampleIndexName); + assertEquals(firstBackingIndex, settings.get(IndexMetadata.INDEX_DOWNSAMPLE_ORIGIN_NAME.getKey())); + assertEquals(firstBackingIndex, settings.get(IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_NAME.getKey())); + assertEquals(DownsampleTaskStatus.SUCCESS.toString(), settings.get(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey())); + assertEquals(policy, settings.get(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey())); + }, 60, TimeUnit.SECONDS); + + // update the policy to now contain the downsample action in cold, whilst not existing in warm anymore (this will have our already + // downsampled index attempt to go through the downsample action again when in cold) + + Request updatePolicyRequest = new Request("PUT", "_ilm/policy/" + policy); + updatePolicyRequest.setJsonEntity(""" + { + "policy": { + "phases": { + "warm": { + "actions": { + } + }, + "cold": { + "min_age": "0ms", + "actions": { + "downsample": { + "fixed_interval" : "5m" + } + } + } + } + } + } + """); + assertOK(client().performRequest(updatePolicyRequest)); + + // the downsample index (already part of the data stream as we created it in the warm phase previously) should continue to exist and + // reach the cold/complete/complete step + assertBusy(() -> { + assertThat(indexExists(downsampleIndexName), is(true)); + assertThat(explainIndex(client(), downsampleIndexName).get("step"), is(PhaseCompleteStep.NAME)); + assertThat(explainIndex(client(), downsampleIndexName).get("phase"), is("cold")); + }, 60, TimeUnit.SECONDS); + } + /** * Gets the generated rollup index name for a given index by looking at newly created indices that match the rollup index name pattern * diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java index 668cc4121b7b5..b40528664275d 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java @@ -960,16 +960,12 @@ static void putComposableIndexTemplate( ) throws IOException { PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(name); request.indexTemplate( - new ComposableIndexTemplate( - patterns, - new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle), - null, - null, - null, - metadata, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle)) + .metadata(metadata) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java index 795cc104e96b7..ecbe94fa75210 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java @@ -87,16 +87,11 @@ public void testShrinkOnTiers() throws Exception { null ); - ComposableIndexTemplate template = new ComposableIndexTemplate( - Collections.singletonList(index), - t, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList(index)) + .template(t) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("template").indexTemplate(template) diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java index d8cdcc27bc038..1919bd86b5083 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java @@ -87,16 +87,11 @@ public void testShrinkOnTiers() throws Exception { null ); - ComposableIndexTemplate template = new ComposableIndexTemplate( - Collections.singletonList(index), - t, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList(index)) + .template(t) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("template").indexTemplate(template) diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java index 6aa46dee54829..63c029316536f 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java @@ -1395,77 +1395,82 @@ public void testMigrateComposableIndexTemplates() { String includeRoutingSetting = INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + nodeAttrName; String excludeRoutingSetting = INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + nodeAttrName; - ComposableIndexTemplate templateWithRequireRouting = new ComposableIndexTemplate( - List.of("test-*"), - new Template( - Settings.builder().put(requireRoutingSetting, "hot").put(LifecycleSettings.LIFECYCLE_NAME, "testLifecycle").build(), - null, - null - ), - List.of(), - randomLong(), - randomLong(), - null - ); + ComposableIndexTemplate templateWithRequireRouting = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template( + new Template( + Settings.builder().put(requireRoutingSetting, "hot").put(LifecycleSettings.LIFECYCLE_NAME, "testLifecycle").build(), + null, + null + ) + ) + .componentTemplates(List.of()) + .priority(randomLong()) + .version(randomLong()) + .build(); - ComposableIndexTemplate templateWithIncludeRouting = new ComposableIndexTemplate( - List.of("test-*"), - new Template( - Settings.builder().put(includeRoutingSetting, "hot").put(LifecycleSettings.LIFECYCLE_NAME, "testLifecycle").build(), - null, - null - ), - List.of(), - randomLong(), - randomLong(), - null - ); + ComposableIndexTemplate templateWithIncludeRouting = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template( + new Template( + Settings.builder().put(includeRoutingSetting, "hot").put(LifecycleSettings.LIFECYCLE_NAME, "testLifecycle").build(), + null, + null + ) + ) + .componentTemplates(List.of()) + .priority(randomLong()) + .version(randomLong()) + .build(); - ComposableIndexTemplate templateWithExcludeRouting = new ComposableIndexTemplate( - List.of("test-*"), - new Template( - Settings.builder().put(excludeRoutingSetting, "hot").put(LifecycleSettings.LIFECYCLE_NAME, "testLifecycle").build(), - null, - null - ), - List.of(), - randomLong(), - randomLong(), - null - ); + ComposableIndexTemplate templateWithExcludeRouting = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template( + new Template( + Settings.builder().put(excludeRoutingSetting, "hot").put(LifecycleSettings.LIFECYCLE_NAME, "testLifecycle").build(), + null, + null + ) + ) + .componentTemplates(List.of()) + .priority(randomLong()) + .version(randomLong()) + .build(); - ComposableIndexTemplate templateWithRequireAndIncludeRoutings = new ComposableIndexTemplate( - List.of("test-*"), - new Template( - Settings.builder() - .put(requireRoutingSetting, "hot") - .put(includeRoutingSetting, "rack1") - .put(LifecycleSettings.LIFECYCLE_NAME, "testLifecycle") - .build(), - null, - null - ), - List.of(), - randomLong(), - randomLong(), - null - ); + ComposableIndexTemplate templateWithRequireAndIncludeRoutings = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template( + new Template( + Settings.builder() + .put(requireRoutingSetting, "hot") + .put(includeRoutingSetting, "rack1") + .put(LifecycleSettings.LIFECYCLE_NAME, "testLifecycle") + .build(), + null, + null + ) + ) + .componentTemplates(List.of()) + .priority(randomLong()) + .version(randomLong()) + .build(); - ComposableIndexTemplate templateWithoutCustomRoutings = new ComposableIndexTemplate( - List.of("test-*"), - new Template( - Settings.builder() - .put(LifecycleSettings.LIFECYCLE_NAME, "testLifecycle") - .put(IndexSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true) - .build(), - null, - null - ), - List.of(), - randomLong(), - randomLong(), - null - ); + ComposableIndexTemplate templateWithoutCustomRoutings = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template( + new Template( + Settings.builder() + .put(LifecycleSettings.LIFECYCLE_NAME, "testLifecycle") + .put(IndexSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true) + .build(), + null, + null + ) + ) + .componentTemplates(List.of()) + .priority(randomLong()) + .version(randomLong()) + .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) .metadata( @@ -1635,14 +1640,13 @@ public void testMigrateIndexAndComponentTemplates() { null ); - ComposableIndexTemplate composableTemplateWithRequireRouting = new ComposableIndexTemplate( - List.of("test-*"), - new Template(Settings.builder().put(requireRoutingSetting, "hot").build(), null, null), - List.of("component-template-without-custom-routing"), - randomLong(), - randomLong(), - null - ); + ComposableIndexTemplate composableTemplateWithRequireRouting = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template(new Template(Settings.builder().put(requireRoutingSetting, "hot").build(), null, null)) + .componentTemplates(List.of("component-template-without-custom-routing")) + .priority(randomLong()) + .version(randomLong()) + .build(); ComponentTemplate compTemplateWithRequireAndIncludeRoutings = new ComponentTemplate( new Template( diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java index 0da0340084cba..47f279b6b6910 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; @@ -80,17 +81,21 @@ public void testMockService() { assertModelsAreEqual(putModel, readModel); // The response is randomly generated, the input can be anything - inferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, randomAlphaOfLength(10)); + inferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomAlphaOfLength(10))); } - public void testMockInClusterService() { - String modelId = "test-mock-in-cluster"; - ModelConfigurations putModel = putMockService(modelId, "test_service_in_cluster_service", TaskType.SPARSE_EMBEDDING); + public void testMockServiceWithMultipleInputs() { + String modelId = "test-mock-with-multi-inputs"; + ModelConfigurations putModel = putMockService(modelId, "test_service", TaskType.SPARSE_EMBEDDING); ModelConfigurations readModel = getModel(modelId, TaskType.SPARSE_EMBEDDING); assertModelsAreEqual(putModel, readModel); // The response is randomly generated, the input can be anything - inferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, randomAlphaOfLength(10)); + inferOnMockService( + modelId, + TaskType.SPARSE_EMBEDDING, + List.of(randomAlphaOfLength(5), randomAlphaOfLength(10), randomAlphaOfLength(15)) + ); } public void testMockService_DoesNotReturnSecretsInGetResponse() throws IOException { @@ -164,16 +169,21 @@ public ModelConfigurations getModel(String modelId, TaskType taskType) { return response.getModel(); } - private void inferOnMockService(String modelId, TaskType taskType, String input) { + private List inferOnMockService(String modelId, TaskType taskType, List input) { var response = client().execute(InferenceAction.INSTANCE, new InferenceAction.Request(taskType, modelId, input, Map.of())) .actionGet(); if (taskType == TaskType.SPARSE_EMBEDDING) { - assertThat(response.getResult(), instanceOf(TextExpansionResults.class)); - var teResult = (TextExpansionResults) response.getResult(); - assertThat(teResult.getWeightedTokens(), not(empty())); + response.getResults().forEach(result -> { + assertThat(result, instanceOf(TextExpansionResults.class)); + var teResult = (TextExpansionResults) result; + assertThat(teResult.getWeightedTokens(), not(empty())); + }); + } else { fail("test with task type [" + taskType + "] are not supported yet"); } + + return response.getResults(); } private void assertModelsAreEqual(ModelConfigurations model1, ModelConfigurations model2) { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java index f9e6eef5ffcc7..c06910432e37d 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -27,10 +28,12 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResultsTests; import org.elasticsearch.xpack.inference.services.MapParsingUtils; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -154,9 +157,21 @@ public TestServiceModel parsePersistedConfig( } @Override - public void infer(Model model, String input, Map taskSettings, ActionListener listener) { + public void infer( + Model model, + List input, + Map taskSettings, + ActionListener> listener + ) { switch (model.getConfigurations().getTaskType()) { - case SPARSE_EMBEDDING -> listener.onResponse(TextExpansionResultsTests.createRandomResults(1, 10)); + case SPARSE_EMBEDDING -> { + var results = new ArrayList(); + input.forEach(i -> { + int numTokensInResult = Strings.tokenizeToStringArray(i, " ").length; + results.add(TextExpansionResultsTests.createRandomResults(numTokensInResult, numTokensInResult)); + }); + listener.onResponse(results); + } default -> listener.onFailure( new ElasticsearchStatusException( TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), name()), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 393cbd0413e5f..2817276631f95 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -40,6 +40,7 @@ import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpSettings; +import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.registry.ModelRegistry; @@ -47,6 +48,7 @@ import org.elasticsearch.xpack.inference.rest.RestGetInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestInferenceAction; import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; +import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserService; @@ -61,10 +63,10 @@ public class InferencePlugin extends Plugin implements ActionPlugin, InferenceSe public static final String NAME = "inference"; public static final String UTILITY_THREAD_POOL_NAME = "inference_utility"; private final Settings settings; - private final SetOnce httpRequestSenderFactory = new SetOnce<>(); // We'll keep a reference to the http manager just in case the inference services don't get closed individually private final SetOnce httpManager = new SetOnce<>(); - private final SetOnce throttlerManager = new SetOnce<>(); + private final SetOnce httpFactory = new SetOnce<>(); + private final SetOnce serviceComponents = new SetOnce<>(); public InferencePlugin(Settings settings) { this.settings = settings; @@ -100,12 +102,19 @@ public List getRestHandlers( @Override public Collection createComponents(PluginServices services) { - throttlerManager.set(new ThrottlerManager(settings, services.threadPool(), services.clusterService())); + var throttlerManager = new ThrottlerManager(settings, services.threadPool(), services.clusterService()); + serviceComponents.set(new ServiceComponents(services.threadPool(), throttlerManager, settings)); - httpManager.set(HttpClientManager.create(settings, services.threadPool(), services.clusterService(), throttlerManager.get())); - httpRequestSenderFactory.set( - new HttpRequestSenderFactory(services.threadPool(), httpManager.get(), services.clusterService(), settings) + httpManager.set(HttpClientManager.create(settings, services.threadPool(), services.clusterService(), throttlerManager)); + + var httpRequestSenderFactory = new HttpRequestSenderFactory( + services.threadPool(), + httpManager.get(), + services.clusterService(), + settings ); + httpFactory.set(httpRequestSenderFactory); + ModelRegistry modelRegistry = new ModelRegistry(services.client()); return List.of(modelRegistry); } @@ -157,7 +166,8 @@ public List> getSettings() { HttpSettings.getSettings(), HttpClientManager.getSettings(), HttpRequestSenderFactory.HttpRequestSender.getSettings(), - ThrottlerManager.getSettings() + ThrottlerManager.getSettings(), + RetrySettings.getSettingsDefinitions() ).flatMap(Collection::stream).collect(Collectors.toList()); } @@ -173,7 +183,7 @@ public String getFeatureDescription() { @Override public List getInferenceServiceFactories() { - return List.of(ElserMlNodeService::new, context -> new HuggingFaceElserService(httpRequestSenderFactory, throttlerManager)); + return List.of(ElserMlNodeService::new, context -> new HuggingFaceElserService(httpFactory, serviceComponents)); } @Override @@ -183,6 +193,9 @@ public List getInferenceServiceNamedWriteables() { @Override public void close() { - IOUtils.closeWhileHandlingException(httpManager.get(), throttlerManager.get()); + var serviceComponentsRef = serviceComponents.get(); + var throttlerToClose = serviceComponentsRef != null ? serviceComponentsRef.throttlerManager() : null; + + IOUtils.closeWhileHandlingException(httpManager.get(), throttlerToClose); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java index 7938c2abd8d99..5fc11464bfa2e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.action; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; @@ -24,6 +25,7 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; +import java.util.List; import java.util.Map; import java.util.Objects; @@ -44,7 +46,7 @@ public static class Request extends ActionRequest { static final ObjectParser PARSER = new ObjectParser<>(NAME, Request.Builder::new); static { // TODO timeout - PARSER.declareString(Request.Builder::setInput, INPUT); + PARSER.declareStringArray(Request.Builder::setInput, INPUT); PARSER.declareObject(Request.Builder::setTaskSettings, (p, c) -> p.mapOrdered(), TASK_SETTINGS); } @@ -57,10 +59,10 @@ public static Request parseRequest(String modelId, String taskType, XContentPars private final TaskType taskType; private final String modelId; - private final String input; + private final List input; private final Map taskSettings; - public Request(TaskType taskType, String modelId, String input, Map taskSettings) { + public Request(TaskType taskType, String modelId, List input, Map taskSettings) { this.taskType = taskType; this.modelId = modelId; this.input = input; @@ -71,7 +73,11 @@ public Request(StreamInput in) throws IOException { super(in); this.taskType = TaskType.fromStream(in); this.modelId = in.readString(); - this.input = in.readString(); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_MULTIPLE_INPUTS)) { + this.input = in.readStringCollectionAsList(); + } else { + this.input = List.of(in.readString()); + } this.taskSettings = in.readMap(); } @@ -83,7 +89,7 @@ public String getModelId() { return modelId; } - public String getInput() { + public List getInput() { return input; } @@ -98,6 +104,11 @@ public ActionRequestValidationException validate() { e.addValidationError("missing input"); return e; } + if (input.isEmpty()) { + var e = new ActionRequestValidationException(); + e.addValidationError("input array is empty"); + return e; + } return null; } @@ -106,7 +117,11 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); taskType.writeTo(out); out.writeString(modelId); - out.writeString(input); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_MULTIPLE_INPUTS)) { + out.writeStringCollection(input); + } else { + out.writeString(input.get(0)); + } out.writeGenericMap(taskSettings); } @@ -130,7 +145,7 @@ public static class Builder { private TaskType taskType; private String modelId; - private String input; + private List input; private Map taskSettings = Map.of(); private Builder() {} @@ -150,7 +165,7 @@ public Builder setTaskType(String taskTypeStr) { return this; } - public Builder setInput(String input) { + public Builder setInput(List input) { this.input = input; return this; } @@ -168,30 +183,45 @@ public Request build() { public static class Response extends ActionResponse implements ToXContentObject { - private final InferenceResults result; + private final List results; - public Response(InferenceResults result) { - this.result = result; + public Response(List results) { + this.results = results; } public Response(StreamInput in) throws IOException { super(in); - result = in.readNamedWriteable(InferenceResults.class); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_MULTIPLE_INPUTS)) { + results = in.readNamedWriteableCollectionAsList(InferenceResults.class); + } else { + results = List.of(in.readNamedWriteable(InferenceResults.class)); + } } - public InferenceResults getResult() { - return result; + public List getResults() { + return results; } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeNamedWriteable(result); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_MULTIPLE_INPUTS)) { + out.writeNamedWriteableCollection(results); + } else { + out.writeNamedWriteable(results.get(0)); + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - result.toXContent(builder, params); + builder.startArray("inference_results"); // TODO what is the name of this field? + for (var result : results) { + // inference results implement ToXContentFragment + builder.startObject(); + result.toXContent(builder, params); + builder.endObject(); + } + builder.endArray(); builder.endObject(); return builder; } @@ -201,12 +231,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Response response = (Response) o; - return Objects.equals(result, response.result); + return Objects.equals(results, response.results); } @Override public int hashCode() { - return Objects.hash(result); + return Objects.hash(results); } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index 29909163d7b3b..7718739420cf1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -83,8 +83,8 @@ private void inferOnService( InferenceService service, ActionListener listener ) { - service.infer(model, request.getInput(), request.getTaskSettings(), ActionListener.wrap(inferenceResult -> { - listener.onResponse(new InferenceAction.Response(inferenceResult)); + service.infer(model, request.getInput(), request.getTaskSettings(), ActionListener.wrap(inferenceResults -> { + listener.onResponse(new InferenceAction.Response(inferenceResults)); }, listener::onFailure)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java index bc52a04ab7209..a7da146ffd5df 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java @@ -10,9 +10,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceResults; +import java.util.List; + /** * Defines an inference request to a 3rd party service. The success or failure response is communicated through the provided listener. */ public interface ExecutableAction { - void execute(String input, ActionListener listener); + void execute(List input, ActionListener> listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java index acc3ab57ce9eb..3da9f92e0dece 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.rest.RestStatus; @@ -18,30 +19,54 @@ import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceClient; import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequest; import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequestEntity; -import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import java.util.List; + +import static org.elasticsearch.core.Strings.format; + public class HuggingFaceElserAction implements ExecutableAction { private final HuggingFaceAccount account; private final HuggingFaceClient client; - public HuggingFaceElserAction(Sender sender, HuggingFaceElserModel model, ThrottlerManager throttlerManager) { - this.client = new HuggingFaceClient(sender, throttlerManager); + public HuggingFaceElserAction(Sender sender, HuggingFaceElserModel model, ServiceComponents serviceComponents) { + this.client = new HuggingFaceClient(sender, serviceComponents); this.account = new HuggingFaceAccount(model.getServiceSettings().uri(), model.getSecretSettings().apiKey()); } - public void execute(String input, ActionListener listener) { + @Override + public void execute(List input, ActionListener> listener) { try { HuggingFaceElserRequest request = new HuggingFaceElserRequest(account, new HuggingFaceElserRequestEntity(input)); - client.send(request, listener); + ActionListener> wrapFailuresInElasticsearchExceptionListener = ActionListener.wrap( + listener::onResponse, + e -> { + var unwrappedException = ExceptionsHelper.unwrapCause(e); + + if (unwrappedException instanceof ElasticsearchException esException) { + listener.onFailure(esException); + } else { + listener.onFailure(createInternalServerError(unwrappedException)); + } + } + ); + + client.send(request, wrapFailuresInElasticsearchExceptionListener); } catch (ElasticsearchException e) { listener.onFailure(e); } catch (Exception e) { - listener.onFailure( - new ElasticsearchStatusException("Failed to send request ELSER Hugging Face request", RestStatus.INTERNAL_SERVER_ERROR, e) - ); + listener.onFailure(createInternalServerError(e)); } } + + private ElasticsearchStatusException createInternalServerError(Throwable e) { + return new ElasticsearchStatusException( + format("Failed to send ELSER Hugging Face request to [%s]", account.url()), + RestStatus.INTERNAL_SERVER_ERROR, + e + ); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java index 1dac8153da4f1..73d1fa1c32568 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java @@ -30,6 +30,9 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; +/** + * Provides a wrapper around a {@link CloseableHttpAsyncClient} to move the responses to a separate thread for processing. + */ public class HttpClient implements Closeable { private static final Logger logger = LogManager.getLogger(HttpClient.class); @@ -92,7 +95,7 @@ public void completed(HttpResponse response) { @Override public void failed(Exception ex) { - throttlerManager.getThrottler().warn(logger, format("Request [%s] failed", request.getRequestLine()), ex); + throttlerManager.warn(logger, format("Request [%s] failed", request.getRequestLine()), ex); failUsingUtilityThread(ex, listener); } @@ -108,7 +111,7 @@ private void respondUsingUtilityThread(HttpResponse response, HttpUriRequest req try { listener.onResponse(HttpResult.create(settings.getMaxResponseSize(), response)); } catch (Exception e) { - throttlerManager.getThrottler().warn(logger, format("Failed to create http result for [%s]", request.getRequestLine()), e); + throttlerManager.warn(logger, format("Failed to create http result for [%s]", request.getRequestLine()), e); listener.onFailure(e); } }); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java index 494e0f7c60dff..7cc4a3cb24502 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java @@ -38,9 +38,8 @@ public class HttpClientManager implements Closeable { public static final Setting MAX_CONNECTIONS = Setting.intSetting( "xpack.inference.http.max_connections", // TODO pick a reasonable values here - 20, - 1, - 1000, + 20, // default + 1, // min Setting.Property.NodeScope, Setting.Property.Dynamic ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpResult.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpResult.java index 82256b51cf83e..6c79daa2dedc0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpResult.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpResult.java @@ -43,4 +43,8 @@ private static byte[] limitBody(ByteSizeValue maxResponseSize, HttpResponse resp Objects.requireNonNull(response); Objects.requireNonNull(body); } + + public boolean isBodyEmpty() { + return body().length == 0; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java new file mode 100644 index 0000000000000..b6dbc6d6f2911 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http; + +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import static org.elasticsearch.core.Strings.format; + +public class HttpUtils { + + public static void checkForFailureStatusCode( + ThrottlerManager throttlerManager, + Logger logger, + HttpRequestBase request, + HttpResult result + ) { + if (result.response().getStatusLine().getStatusCode() >= 300) { + String message = getStatusCodeErrorMessage(request, result); + + throttlerManager.warn(logger, message); + + throw new IllegalStateException(message); + } + } + + private static String getStatusCodeErrorMessage(HttpRequestBase request, HttpResult result) { + int statusCode = result.response().getStatusLine().getStatusCode(); + + if (statusCode >= 400) { + return format( + "Received a failure status code for request [%s] status [%s]", + request.getRequestLine(), + result.response().getStatusLine().getStatusCode() + ); + } else if (statusCode >= 300) { + return format( + "Unhandled redirection for request [%s] status [%s]", + request.getRequestLine(), + result.response().getStatusLine().getStatusCode() + ); + } else { + return ""; + } + } + + public static void checkForEmptyBody(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) { + if (result.isBodyEmpty()) { + String message = format("Response body was empty for request [%s]", request.getRequestLine()); + throttlerManager.warn(logger, message); + throw new IllegalStateException(message); + } + } + + private HttpUtils() {} +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictor.java index 295c9b7b17946..e8f25db127922 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictor.java @@ -10,6 +10,7 @@ import org.apache.http.nio.conn.NHttpClientConnectionManager; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; @@ -45,7 +46,7 @@ public IdleConnectionEvictor( ThreadPool threadPool, NHttpClientConnectionManager connectionManager, TimeValue sleepTime, - TimeValue maxIdleTime + @Nullable TimeValue maxIdleTime ) { this.threadPool = Objects.requireNonNull(threadPool); this.connectionManager = Objects.requireNonNull(connectionManager); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java new file mode 100644 index 0000000000000..5efb6e9003fba --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; +import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForFailureStatusCode; + +/** + * Provides a {@link ResponseHandler} which flags all errors as retryable. + */ +public class AlwaysRetryingResponseHandler implements ResponseHandler { + protected final String requestType; + private final CheckedFunction, IOException> parseFunction; + + public AlwaysRetryingResponseHandler( + String requestType, + CheckedFunction, IOException> parseFunction + ) { + this.requestType = Objects.requireNonNull(requestType); + this.parseFunction = Objects.requireNonNull(parseFunction); + } + + public void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) + throws RetryException { + try { + checkForFailureStatusCode(throttlerManager, logger, request, result); + checkForEmptyBody(throttlerManager, logger, request, result); + } catch (Exception e) { + throw new RetryException(true, e); + } + } + + public String getRequestType() { + return requestType; + } + + @Override + public List parseResult(HttpResult result) throws RetryException { + try { + return parseFunction.apply(result); + } catch (Exception e) { + throw new RetryException(true, e); + } + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java new file mode 100644 index 0000000000000..f7c98e2ecd2e5 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.util.List; + +/** + * A contract for clients to specify behavior for handling http responses. Clients can pass this contract to the retry sender to parse + * the response and help with logging. + */ +public interface ResponseHandler { + + /** + * A method for checking the response from the 3rd party service. This could check the status code and that the response body + * is in the correct form. + * + * @param throttlerManager a throttler for the logs + * @param logger the logger to use for logging + * @param request the original request + * @param result the response from the server + * @throws RetryException if the response is invalid + */ + void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) + throws RetryException; + + /** + * A method for parsing the response from the server. + * @param result The wrapped response from the server. + * @return the parsed inference results + * @throws RetryException if a parsing error occurs + */ + List parseResult(HttpResult result) throws RetryException; + + /** + * A string to uniquely identify the type of request that is being handled. This allows loggers to clarify which type of request + * might have failed. + * + * @return a {@link String} indicating the request type that was sent (e.g. elser, elser hugging face etc) + */ + String getRequestType(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/Retrier.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/Retrier.java new file mode 100644 index 0000000000000..4688b15ab86f4 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/Retrier.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.apache.http.client.methods.HttpRequestBase; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceResults; + +import java.util.List; + +public interface Retrier { + void send(HttpRequestBase request, ResponseHandler responseHandler, ActionListener> listener); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java new file mode 100644 index 0000000000000..3fe8225927f06 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchWrapperException; + +public class RetryException extends ElasticsearchException implements ElasticsearchWrapperException { + private final boolean shouldRetry; + + public RetryException(boolean shouldRetry, Throwable cause) { + super(cause); + this.shouldRetry = shouldRetry; + } + + public RetryException(boolean shouldRetry, String msg) { + super(msg); + this.shouldRetry = shouldRetry; + } + + public RetryException(boolean shouldRetry, String msg, Throwable cause) { + super(msg, cause); + this.shouldRetry = shouldRetry; + } + + public boolean shouldRetry() { + return shouldRetry; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetrySettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetrySettings.java new file mode 100644 index 0000000000000..040903a35ab08 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetrySettings.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; + +import java.util.List; + +public class RetrySettings { + + public static final Setting RETRY_INITIAL_DELAY_SETTING = Setting.timeSetting( + "xpack.inference.http.retry.initial_delay", + TimeValue.timeValueSeconds(1), + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + public static final Setting RETRY_MAX_DELAY_BOUND_SETTING = Setting.timeSetting( + "xpack.inference.http.retry.max_delay_bound", + TimeValue.timeValueSeconds(5), + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + public static final Setting RETRY_TIMEOUT_SETTING = Setting.timeSetting( + "xpack.inference.http.retry.timeout", + TimeValue.timeValueSeconds(30), + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private final InternalSettings internalSettings; + + public RetrySettings(Settings settings) { + var initialDelay = RETRY_INITIAL_DELAY_SETTING.get(settings); + var maxDelayBound = RETRY_MAX_DELAY_BOUND_SETTING.get(settings); + var timeoutValue = RETRY_TIMEOUT_SETTING.get(settings); + this.internalSettings = new InternalSettings(initialDelay, maxDelayBound, timeoutValue); + } + + public record InternalSettings(TimeValue initialDelay, TimeValue maxDelayBound, TimeValue timeoutValue) {} + + public InternalSettings getSettings() { + return internalSettings; + } + + public static List> getSettingsDefinitions() { + return List.of(RETRY_INITIAL_DELAY_SETTING, RETRY_MAX_DELAY_BOUND_SETTING, RETRY_TIMEOUT_SETTING); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java new file mode 100644 index 0000000000000..dabd78fe86885 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java @@ -0,0 +1,163 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RetryableAction; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.Executor; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; + +public class RetryingHttpSender implements Retrier { + private final Sender sender; + private final ThrottlerManager throttlerManager; + private final Logger logger; + private final RetrySettings retrySettings; + private final ThreadPool threadPool; + private final Executor executor; + + public RetryingHttpSender( + Sender sender, + ThrottlerManager throttlerManager, + Logger logger, + RetrySettings retrySettings, + ThreadPool threadPool + ) { + this(sender, throttlerManager, logger, retrySettings, threadPool, threadPool.executor(UTILITY_THREAD_POOL_NAME)); + } + + // For testing only + RetryingHttpSender( + Sender sender, + ThrottlerManager throttlerManager, + Logger logger, + RetrySettings retrySettings, + ThreadPool threadPool, + Executor executor + ) { + this.sender = Objects.requireNonNull(sender); + this.throttlerManager = Objects.requireNonNull(throttlerManager); + this.logger = Objects.requireNonNull(logger); + this.retrySettings = Objects.requireNonNull(retrySettings); + this.threadPool = Objects.requireNonNull(threadPool); + this.executor = Objects.requireNonNull(executor); + } + + private class InternalRetrier extends RetryableAction> { + private final HttpRequestBase request; + private final ResponseHandler responseHandler; + + InternalRetrier( + HttpRequestBase request, + ResponseHandler responseHandler, + ActionListener> listener + ) { + super( + logger, + threadPool, + retrySettings.getSettings().initialDelay(), + retrySettings.getSettings().maxDelayBound(), + retrySettings.getSettings().timeoutValue(), + listener, + executor + ); + this.request = request; + this.responseHandler = responseHandler; + } + + @Override + public void tryAction(ActionListener> listener) { + ActionListener responseListener = ActionListener.wrap(result -> { + try { + responseHandler.validateResponse(throttlerManager, logger, request, result); + List inferenceResults = responseHandler.parseResult(result); + + listener.onResponse(inferenceResults); + } catch (Exception e) { + logException(request, result, responseHandler.getRequestType(), e); + listener.onFailure(e); + } + }, e -> { + logException(request, responseHandler.getRequestType(), e); + listener.onFailure(transformIfRetryable(e)); + }); + + sender.send(request, responseListener); + } + + @Override + public boolean shouldRetry(Exception e) { + if (e instanceof RetryException) { + return ((RetryException) e).shouldRetry(); + } + + return false; + } + + /** + * If the connection gets closed by the server or because of the connections time to live is exceeded we'll likely get a + * {@link org.apache.http.ConnectionClosedException} exception which is a child of IOException. For now, + * we'll consider all IOExceptions retryable because something failed while we were trying to send the request + * @param e the Exception received while sending the request + * @return a {@link RetryException} if this exception can be retried + */ + private Exception transformIfRetryable(Exception e) { + var exceptionToReturn = e; + if (e instanceof IOException) { + exceptionToReturn = new RetryException(true, e); + } + + return exceptionToReturn; + } + } + + @Override + public void send(HttpRequestBase request, ResponseHandler responseHandler, ActionListener> listener) { + InternalRetrier retrier = new InternalRetrier(request, responseHandler, listener); + retrier.run(); + } + + private void logException(HttpRequestBase request, String requestType, Exception exception) { + var causeException = ExceptionsHelper.unwrapCause(exception); + + throttlerManager.warn( + logger, + format("Failed while sending request [%s] of type [%s]", request.getRequestLine(), requestType), + causeException + ); + } + + private void logException(HttpRequestBase request, HttpResult result, String requestType, Exception exception) { + var causeException = ExceptionsHelper.unwrapCause(exception); + + throttlerManager.warn( + logger, + format( + "Failed to process the response for request [%s] of type [%s] with status [%s] [%s]", + request.getRequestLine(), + requestType, + result.response().getStatusLine().getStatusCode(), + result.response().getStatusLine().getReasonPhrase() + ), + causeException + ); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java index 328afb264c4ab..1e066410506bc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java @@ -59,27 +59,41 @@ class HttpRequestExecutorService implements ExecutorService { private final HttpClientContext httpContext; private final HttpClient httpClient; private final ThreadPool threadPool; + private final CountDownLatch startupLatch; @SuppressForbidden(reason = "wraps a queue and handles errors appropriately") - HttpRequestExecutorService(String serviceName, HttpClient httpClient, ThreadPool threadPool) { - this(serviceName, httpClient, threadPool, new LinkedBlockingQueue<>()); + HttpRequestExecutorService(String serviceName, HttpClient httpClient, ThreadPool threadPool, @Nullable CountDownLatch startupLatch) { + this(serviceName, httpClient, threadPool, new LinkedBlockingQueue<>(), startupLatch); } @SuppressForbidden(reason = "wraps a queue and handles errors appropriately") - HttpRequestExecutorService(String serviceName, HttpClient httpClient, ThreadPool threadPool, int capacity) { - this(serviceName, httpClient, threadPool, new LinkedBlockingQueue<>(capacity)); + HttpRequestExecutorService( + String serviceName, + HttpClient httpClient, + ThreadPool threadPool, + int capacity, + @Nullable CountDownLatch startupLatch + ) { + this(serviceName, httpClient, threadPool, new LinkedBlockingQueue<>(capacity), startupLatch); } /** * This constructor should only be used directly for testing. */ @SuppressForbidden(reason = "wraps a queue and handles errors appropriately") - HttpRequestExecutorService(String serviceName, HttpClient httpClient, ThreadPool threadPool, BlockingQueue queue) { + HttpRequestExecutorService( + String serviceName, + HttpClient httpClient, + ThreadPool threadPool, + BlockingQueue queue, + @Nullable CountDownLatch startupLatch + ) { this.serviceName = Objects.requireNonNull(serviceName); this.httpClient = Objects.requireNonNull(httpClient); this.threadPool = Objects.requireNonNull(threadPool); this.httpContext = HttpClientContext.create(); this.queue = queue; + this.startupLatch = startupLatch; } /** @@ -87,6 +101,8 @@ class HttpRequestExecutorService implements ExecutorService { */ public void start() { try { + signalStartInitiated(); + while (running.get()) { handleTasks(); } @@ -99,6 +115,12 @@ public void start() { } } + private void signalStartInitiated() { + if (startupLatch != null) { + startupLatch.countDown(); + } + } + /** * Protects the task retrieval logic from an unexpected exception. * diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java index 40adc9c4a8bea..c94d82e234c0c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java @@ -24,6 +24,8 @@ import java.io.IOException; import java.util.List; import java.util.Objects; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.core.Strings.format; @@ -60,6 +62,7 @@ public HttpRequestSender createSender(String serviceName) { */ public static final class HttpRequestSender implements Sender { private static final Logger logger = LogManager.getLogger(HttpRequestSender.class); + private static final TimeValue START_COMPLETED_WAIT_TIME = TimeValue.timeValueSeconds(5); /** * The maximum time a request can take. The timer starts once a request is enqueued and continues until a response is @@ -78,6 +81,7 @@ public static final class HttpRequestSender implements Sender { private final HttpRequestExecutorService service; private final AtomicBoolean started = new AtomicBoolean(false); private volatile TimeValue maxRequestTimeout; + private final CountDownLatch startCompleted = new CountDownLatch(2); private HttpRequestSender( String serviceName, @@ -88,7 +92,7 @@ private HttpRequestSender( ) { this.threadPool = Objects.requireNonNull(threadPool); this.manager = Objects.requireNonNull(httpClientManager); - service = new HttpRequestExecutorService(serviceName, manager.getHttpClient(), threadPool); + service = new HttpRequestExecutorService(serviceName, manager.getHttpClient(), threadPool, startCompleted); this.maxRequestTimeout = MAX_REQUEST_TIMEOUT.get(settings); addSettingsUpdateConsumers(clusterService); @@ -109,8 +113,11 @@ void setMaxRequestTimeout(TimeValue maxRequestTimeout) { */ public void start() { if (started.compareAndSet(false, true)) { + // The manager must be started before the executor service. That way we guarantee that the http client + // is ready prior to the service attempting to use the http client to send a request manager.start(); threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(service::start); + startCompleted.countDown(); } } @@ -130,9 +137,20 @@ public void close() throws IOException { */ public void send(HttpRequestBase request, @Nullable TimeValue timeout, ActionListener listener) { assert started.get() : "call start() before sending a request"; + waitForStartToComplete(); service.send(request, timeout, listener); } + private void waitForStartToComplete() { + try { + if (startCompleted.await(START_COMPLETED_WAIT_TIME.getSeconds(), TimeUnit.SECONDS) == false) { + throw new IllegalStateException("Http sender startup did not complete in time"); + } + } catch (InterruptedException e) { + throw new IllegalStateException("Http sender interrupted while waiting for startup to complete"); + } + } + /** * Send a request at some point in the future. The timeout used is retrieved from the settings. * @param request the http request to send @@ -140,6 +158,7 @@ public void send(HttpRequestBase request, @Nullable TimeValue timeout, ActionLis */ public void send(HttpRequestBase request, ActionListener listener) { assert started.get() : "call start() before sending a request"; + waitForStartToComplete(); service.send(request, maxRequestTimeout, listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java index 82ef0bcc7bab3..5875126190e5d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java @@ -131,17 +131,22 @@ public boolean isCancelled() { }; } - private record Command(HttpClient httpClient, HttpUriRequest request, HttpClientContext context, ActionListener listener) - implements - Runnable { + private record Command( + HttpClient httpClient, + HttpUriRequest requestToSend, + HttpClientContext context, + ActionListener resultListener + ) implements Runnable { @Override public void run() { try { - httpClient.send(request, context, listener); + httpClient.send(requestToSend, context, resultListener); } catch (Exception e) { - logger.warn(format("Failed to send request [%s] via the http client", request.getRequestLine()), e); - listener.onFailure(new ElasticsearchException(format("Failed to send request [%s]", request.getRequestLine()), e)); + logger.warn(format("Failed to send request [%s] via the http client", requestToSend.getRequestLine()), e); + resultListener.onFailure( + new ElasticsearchException(format("Failed to send request [%s]", requestToSend.getRequestLine()), e) + ); } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java index ed6e5c200b367..b2e3f35e593b8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java @@ -7,46 +7,43 @@ package org.elasticsearch.xpack.inference.external.huggingface; -import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceResults; -import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.AlwaysRetryingResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; +import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequest; import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceElserResponseEntity; -import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.ServiceComponents; import java.io.IOException; - -import static org.elasticsearch.core.Strings.format; +import java.util.List; public class HuggingFaceClient { private static final Logger logger = LogManager.getLogger(HuggingFaceClient.class); + private static final ResponseHandler ELSER_RESPONSE_HANDLER = createElserHandler(); + + private final RetryingHttpSender sender; + + public HuggingFaceClient(Sender sender, ServiceComponents serviceComponents) { + this.sender = new RetryingHttpSender( + sender, + serviceComponents.throttlerManager(), + logger, + new RetrySettings(serviceComponents.settings()), + serviceComponents.threadPool() + ); + } - private final ThrottlerManager throttlerManager; - - private final Sender sender; - - public HuggingFaceClient(Sender sender, ThrottlerManager throttlerManager) { - this.sender = sender; - this.throttlerManager = throttlerManager; + public void send(HuggingFaceElserRequest request, ActionListener> listener) throws IOException { + this.sender.send(request.createRequest(), ELSER_RESPONSE_HANDLER, listener); } - public void send(HuggingFaceElserRequest request, ActionListener listener) throws IOException { - HttpRequestBase httpRequest = request.createRequest(); - ActionListener responseListener = ActionListener.wrap(response -> { - try { - listener.onResponse(HuggingFaceElserResponseEntity.fromResponse(response)); - } catch (Exception e) { - String msg = format("Failed to parse the Hugging Face ELSER response for request [%s]", httpRequest.getRequestLine()); - throttlerManager.getThrottler().warn(logger, msg, e); - listener.onFailure(new ElasticsearchException(msg, e)); - } - }, listener::onFailure); - - sender.send(httpRequest, responseListener); + private static ResponseHandler createElserHandler() { + return new AlwaysRetryingResponseHandler("elser hugging face", HuggingFaceElserResponseEntity::fromResponse); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java index f21bee923ecab..10ba249f9da7d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java @@ -11,9 +11,10 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.List; import java.util.Objects; -public record HuggingFaceElserRequestEntity(String inputs) implements ToXContentObject { +public record HuggingFaceElserRequestEntity(List inputs) implements ToXContentObject { private static final String INPUTS_FIELD = "inputs"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java index 2ac9eb44ed7fb..390195d738043 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java @@ -56,7 +56,7 @@ public class HuggingFaceElserResponseEntity { * * */ - public static TextExpansionResults fromResponse(HttpResult response) throws IOException { + public static List fromResponse(HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { @@ -70,11 +70,11 @@ public static TextExpansionResults fromResponse(HttpResult response) throws IOEx ); if (parsedResponse.isEmpty()) { - return new TextExpansionResults(DEFAULT_RESULTS_FIELD, Collections.emptyList(), false); + return List.of(new TextExpansionResults(DEFAULT_RESULTS_FIELD, Collections.emptyList(), false)); } // we only handle a single response right now so just grab the first one - return parsedResponse.get(0); + return parsedResponse; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java index b1dee15a93bd7..0cf0e65eaba37 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java @@ -87,25 +87,22 @@ public void setDurationToWait(TimeValue durationToWait) { this.durationToWait = Duration.ofMillis(durationToWait.millis()); } - public void warn(Logger logger, String message, Throwable e) { - Objects.requireNonNull(message); - Objects.requireNonNull(e); - - if (isRunning.get()) { - logHelper(message, msgToAppend -> logger.warn(message.concat(msgToAppend), e)); + public void execute(String message, Consumer consumer) { + if (isRunning.get() == false) { + return; } - } - private void logHelper(String message, Consumer executor) { LogExecutor logExecutor = logExecutors.compute(message, (key, value) -> { if (value == null) { - return new LogExecutor(clock, executor); + return new LogExecutor(clock, consumer); } - return value.compute(executor, durationToWait); + return value.compute(consumer, durationToWait); }); - logExecutor.log(); + // This executes an internal consumer that wraps the passed in one, it will either log the message passed here + // unchanged, do nothing if it is in the throttled period, or log this message + some text saying how many times it was repeated + logExecutor.log(message); } @Override @@ -119,41 +116,38 @@ private static class LogExecutor { private final long skippedLogCalls; private final Instant timeOfLastLogCall; private final Clock clock; - private final Runnable logRunner; - - LogExecutor(Clock clock, Consumer logAppendedMessage) { - skippedLogCalls = 0; - timeOfLastLogCall = Instant.now(clock); - this.clock = clock; - // The first log message can log the original message without waiting - this.logRunner = () -> logAppendedMessage.accept(""); + private final Consumer consumer; + + LogExecutor(Clock clock, Consumer throttledConsumer) { + this(clock, 0, throttledConsumer); } - LogExecutor(Clock clock, long skippedLogCalls, Runnable logRunner) { + LogExecutor(Clock clock, long skippedLogCalls, Consumer consumer) { this.skippedLogCalls = skippedLogCalls; - timeOfLastLogCall = Instant.now(clock); - this.clock = clock; - this.logRunner = logRunner; + this.clock = Objects.requireNonNull(clock); + timeOfLastLogCall = Instant.now(this.clock); + this.consumer = Objects.requireNonNull(consumer); } - void log() { - this.logRunner.run(); + void log(String message) { + this.consumer.accept(message); } LogExecutor compute(Consumer executor, Duration durationToWait) { if (hasDurationExpired(durationToWait)) { - String msg = ""; + String messageToAppend = ""; if (this.skippedLogCalls == 1) { - msg = ", repeated 1 time"; + messageToAppend = ", repeated 1 time"; } else if (this.skippedLogCalls > 1) { - msg = format(", repeated %s times", this.skippedLogCalls); + messageToAppend = format(", repeated %s times", this.skippedLogCalls); } - String finalMsg = msg; - return new LogExecutor(this.clock, 0, () -> executor.accept(finalMsg)); + final String stringToAppend = messageToAppend; + return new LogExecutor(this.clock, 0, (message) -> executor.accept(message.concat(stringToAppend))); } - return new LogExecutor(this.clock, this.skippedLogCalls + 1, () -> {}); + // This creates a consumer that won't do anything because the original consumer is being throttled + return new LogExecutor(this.clock, this.skippedLogCalls + 1, (message) -> {}); } private boolean hasDurationExpired(Duration durationToWait) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java index 6c38c341a0401..2a84494d6af21 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.logging; +import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -78,10 +79,24 @@ void setResetInterval(TimeValue resetInterval) { throttler = new Throttler(loggerSettings.resetInterval(), loggerSettings.waitDuration(), threadPool); } - public Throttler getThrottler() { + // default for testing + Throttler getThrottler() { return throttler; } + public void warn(Logger logger, String message, Throwable e) { + Objects.requireNonNull(message); + Objects.requireNonNull(e); + + throttler.execute(message, messageToLog -> logger.warn(messageToLog, e)); + } + + public void warn(Logger logger, String message) { + Objects.requireNonNull(message); + + throttler.execute(message, logger::warn); + } + @Override public void close() { throttler.close(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceComponents.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceComponents.java new file mode 100644 index 0000000000000..bff1ce70b6e13 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceComponents.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +/** + * A container for common components need at various levels of the inference services to instantiate their internals + */ +public record ServiceComponents(ThreadPool threadPool, ThrottlerManager throttlerManager, Settings settings) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java index 57f5acbebd05b..0808bfc8a5e96 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java @@ -23,7 +23,6 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; -import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; import java.io.IOException; @@ -157,7 +156,12 @@ public void start(Model model, ActionListener listener) { } @Override - public void infer(Model model, String input, Map taskSettings, ActionListener listener) { + public void infer( + Model model, + List input, + Map taskSettings, + ActionListener> listener + ) { // No task settings to override with requestTaskSettings if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) { @@ -173,12 +177,11 @@ public void infer(Model model, String input, Map taskSettings, A var request = InferTrainedModelDeploymentAction.Request.forTextInput( model.getConfigurations().getModelId(), TextExpansionConfigUpdate.EMPTY_UPDATE, - List.of(input), + input, TimeValue.timeValueSeconds(10) // TODO get timeout from request ); client.execute(InferTrainedModelDeploymentAction.INSTANCE, request, ActionListener.wrap(inferenceResult -> { - var textExpansionResult = (TextExpansionResults) inferenceResult.getResults().get(0); - listener.onResponse(textExpansionResult); + listener.onResponse(inferenceResult.getResults()); }, listener::onFailure)); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java index e25315b6bbaf0..9e4407945d775 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -24,9 +24,10 @@ import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceElserAction; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.external.http.sender.Sender; -import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.ServiceComponents; import java.io.IOException; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -39,15 +40,15 @@ public class HuggingFaceElserService implements InferenceService { public static final String NAME = "hugging_face_elser"; private final SetOnce factory; - private final SetOnce throttlerManager; + private final SetOnce serviceComponents; private final AtomicReference sender = new AtomicReference<>(); // This is initialized once which assumes that the settings will not change. To change the service, it // should be deleted and then added again private final AtomicReference action = new AtomicReference<>(); - public HuggingFaceElserService(SetOnce factory, SetOnce throttlerManager) { + public HuggingFaceElserService(SetOnce factory, SetOnce serviceComponents) { this.factory = Objects.requireNonNull(factory); - this.throttlerManager = Objects.requireNonNull(throttlerManager); + this.serviceComponents = Objects.requireNonNull(serviceComponents); } @Override @@ -90,7 +91,12 @@ public HuggingFaceElserModel parsePersistedConfig( } @Override - public void infer(Model model, String input, Map taskSettings, ActionListener listener) { + public void infer( + Model model, + List input, + Map taskSettings, + ActionListener> listener + ) { if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) { listener.onFailure( new ElasticsearchStatusException( @@ -115,7 +121,6 @@ public void infer(Model model, String input, Map taskSettings, A public void start(Model model, ActionListener listener) { try { init(model); - sender.get().start(); listener.onResponse(true); } catch (Exception e) { listener.onFailure(new ElasticsearchException("Failed to start service", e)); @@ -133,12 +138,13 @@ private void init(Model model) { } sender.updateAndGet(current -> Objects.requireNonNullElseGet(current, () -> factory.get().createSender(name()))); + sender.get().start(); HuggingFaceElserModel huggingFaceElserModel = (HuggingFaceElserModel) model; action.updateAndGet( current -> Objects.requireNonNullElseGet( current, - () -> new HuggingFaceElserAction(sender.get(), huggingFaceElserModel, throttlerManager.get()) + () -> new HuggingFaceElserAction(sender.get(), huggingFaceElserModel, serviceComponents.get()) ) ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java index 3e1bea0051656..d263cf8c776ea 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java @@ -11,10 +11,14 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; +import static org.hamcrest.collection.IsIterableContainingInOrder.contains; + public class InferenceActionRequestTests extends AbstractWireSerializingTestCase { @Override @@ -27,11 +31,33 @@ protected InferenceAction.Request createTestInstance() { return new InferenceAction.Request( randomFrom(TaskType.values()), randomAlphaOfLength(6), - randomAlphaOfLength(8), + randomList(1, 5, () -> randomAlphaOfLength(8)), randomMap(0, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))) ); } + public void testParsing() throws IOException { + String singleInputRequest = """ + { + "input": "single text input" + } + """; + try (var parser = createParser(JsonXContent.jsonXContent, singleInputRequest)) { + var request = InferenceAction.Request.parseRequest("model_id", "sparse_embedding", parser); + assertThat(request.getInput(), contains("single text input")); + } + + String multiInputRequest = """ + { + "input": ["an array", "of", "inputs"] + } + """; + try (var parser = createParser(JsonXContent.jsonXContent, multiInputRequest)) { + var request = InferenceAction.Request.parseRequest("model_id", "sparse_embedding", parser); + assertThat(request.getInput(), contains("an array", "of", "inputs")); + } + } + @Override protected InferenceAction.Request mutateInstance(InferenceAction.Request instance) throws IOException { int select = randomIntBetween(0, 3); @@ -46,12 +72,11 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc instance.getInput(), instance.getTaskSettings() ); - case 2 -> new InferenceAction.Request( - instance.getTaskType(), - instance.getModelId(), - instance.getInput() + "bar", - instance.getTaskSettings() - ); + case 2 -> { + var changedInputs = new ArrayList(instance.getInput()); + changedInputs.add("bar"); + yield new InferenceAction.Request(instance.getTaskType(), instance.getModelId(), changedInputs, instance.getTaskSettings()); + } case 3 -> { var taskSettings = new HashMap<>(instance.getTaskSettings()); if (taskSettings.isEmpty()) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java index 795923e56c6bb..fbe1f57c7e9f7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java @@ -35,7 +35,7 @@ protected Writeable.Reader instanceReader() { @Override protected InferenceAction.Response createTestInstance() { - return new InferenceAction.Response(TextExpansionResultsTests.createRandomResults()); + return new InferenceAction.Response(List.of(TextExpansionResultsTests.createRandomResults())); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java index 9809acf536c86..ca9e6a438d6d7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java @@ -9,6 +9,7 @@ import org.apache.http.HttpHeaders; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -21,9 +22,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; @@ -31,18 +34,23 @@ import org.junit.Before; import java.io.IOException; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -66,6 +74,7 @@ public void shutdown() throws IOException { webServer.close(); } + @SuppressWarnings("unchecked") public void testExecute_ReturnsSuccessfulResponse() throws IOException { var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); @@ -83,10 +92,10 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { var action = createAction(getUrl(webServer), sender); - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute("abc", listener); + PlainActionFuture> listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); - InferenceResults result = listener.actionGet(TIMEOUT); + var result = listener.actionGet(TIMEOUT).get(0); assertThat(result.asMap(), is(Map.of(DEFAULT_RESULTS_FIELD, Map.of(".", 0.13315596f)))); assertThat(webServer.requests(), hasSize(1)); @@ -99,7 +108,9 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { var requestMap = entityAsMap(webServer.requests().get(0).getBody()); assertThat(requestMap.size(), is(1)); - assertThat(requestMap.get("inputs"), is("abc")); + assertThat(requestMap.get("inputs"), instanceOf(List.class)); + var inputList = (List) requestMap.get("inputs"); + assertThat(inputList, contains("abc")); } } @@ -110,32 +121,53 @@ public void testExecute_ThrowsURISyntaxException_ForInvalidUrl() throws IOExcept } } - public void testExecute_ThrowsElasticsearchException() { + public void testExecute_ThrowsElasticsearchException_WhenSenderThrows() { var sender = mock(Sender.class); doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); var action = createAction(getUrl(webServer), sender); - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute("abc", listener); + PlainActionFuture> listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat(thrownException.getMessage(), is("failed")); } + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var action = createAction(getUrl(webServer), sender); + + PlainActionFuture> listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send ELSER Hugging Face request to [%s]", getUrl(webServer)))); + } + public void testExecute_ThrowsException() { var sender = mock(Sender.class); doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any()); var action = createAction(getUrl(webServer), sender); - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute("abc", listener); + PlainActionFuture> listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), is("Failed to send request ELSER Hugging Face request")); + assertThat(thrownException.getMessage(), is(format("Failed to send ELSER Hugging Face request to [%s]", getUrl(webServer)))); } private HuggingFaceElserAction createAction(String url, Sender sender) { @@ -147,6 +179,6 @@ private HuggingFaceElserAction createAction(String url, Sender sender) { new HuggingFaceElserSecretSettings(new SecureString("secret".toCharArray())) ); - return new HuggingFaceElserAction(sender, model, mock(ThrottlerManager.class)); + return new HuggingFaceElserAction(sender, model, new ServiceComponents(threadPool, mock(ThrottlerManager.class), Settings.EMPTY)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpUtilsTests.java new file mode 100644 index 0000000000000..affbd43958e29 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpUtilsTests.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http; + +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; +import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForFailureStatusCode; +import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class HttpUtilsTests extends ESTestCase { + public void testCheckForFailureStatusCode_ThrowsWhenStatusCodeIs300() { + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(300); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + var result = new HttpResult(httpResponse, new byte[0]); + + var thrownException = expectThrows( + IllegalStateException.class, + () -> checkForFailureStatusCode(mockThrottlerManager(), mock(Logger.class), mock(HttpRequestBase.class), result) + ); + + assertThat(thrownException.getMessage(), is("Unhandled redirection for request [null] status [300]")); + } + + public void testCheckForFailureStatusCode_DoesNotThrowWhenStatusCodeIs200() { + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(200); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + var result = new HttpResult(httpResponse, new byte[0]); + + checkForFailureStatusCode(mockThrottlerManager(), mock(Logger.class), mock(HttpRequestBase.class), result); + } + + public void testCheckForEmptyBody_DoesNotThrowWhenTheBodyIsNotEmpty() { + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(mock(StatusLine.class)); + + var result = new HttpResult(httpResponse, new byte[] { 'a' }); + + checkForEmptyBody(mockThrottlerManager(), mock(Logger.class), mock(HttpRequestBase.class), result); + } + + public void testCheckForEmptyBody_ThrowsWhenTheBodyIsEmpty() { + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(mock(StatusLine.class)); + + var result = new HttpResult(httpResponse, new byte[0]); + + var thrownException = expectThrows( + IllegalStateException.class, + () -> checkForEmptyBody(mockThrottlerManager(), mock(Logger.class), mock(HttpRequestBase.class), result) + ); + + assertThat(thrownException.getMessage(), is("Response body was empty for request [null]")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java index 22c36fe38a25c..b433306ec8261 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java @@ -18,6 +18,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; @@ -47,7 +48,8 @@ public static ClusterService mockClusterService(Settings settings) { HttpSettings.getSettings(), HttpClientManager.getSettings(), HttpRequestSenderFactory.HttpRequestSender.getSettings(), - ThrottlerManager.getSettings() + ThrottlerManager.getSettings(), + RetrySettings.getSettingsDefinitions() ).flatMap(Collection::stream).collect(Collectors.toSet()); var cSettings = new ClusterSettings(settings, registeredSettings); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetrySettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetrySettingsTests.java new file mode 100644 index 0000000000000..940205a663337 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetrySettingsTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; + +public class RetrySettingsTests extends ESTestCase { + + /** + * Creates a {@link RetrySettings} object with initial delay of 1 millisecond, max delay bound of 1 millisecond, + * and timeout of 30 seconds + */ + public static RetrySettings createDefaultRetrySettings() { + return createRetrySettings(TimeValue.timeValueMillis(1), TimeValue.timeValueMillis(1), TimeValue.timeValueSeconds(30)); + } + + public static RetrySettings createRetrySettings(TimeValue initialDelay, TimeValue maxDelayBound, TimeValue timeout) { + var settings = buildSettingsWithRetryFields(initialDelay, maxDelayBound, timeout); + + return new RetrySettings(settings); + } + + public static Settings buildSettingsWithRetryFields(TimeValue initialDelay, TimeValue maxDelayBound, TimeValue timeout) { + return Settings.builder() + .put(RetrySettings.RETRY_INITIAL_DELAY_SETTING.getKey(), initialDelay) + .put(RetrySettings.RETRY_MAX_DELAY_BOUND_SETTING.getKey(), maxDelayBound) + .put(RetrySettings.RETRY_TIMEOUT_SETTING.getKey(), timeout) + .build(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java new file mode 100644 index 0000000000000..e837591edb2e9 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java @@ -0,0 +1,453 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.apache.http.ConnectionClosedException; +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.junit.Before; +import org.mockito.stubbing.Answer; + +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.createDefaultRetrySettings; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class RetryingHttpSenderTests extends ESTestCase { + private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(30); + + private DeterministicTaskQueue taskQueue; + + @Before + public void init() throws Exception { + taskQueue = new DeterministicTaskQueue(); + } + + public void testSend_CallsSenderAgain_AfterValidateResponseThrowsAnException() { + var sender = mock(Sender.class); + var httpResponse = mockHttpResponse(); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new HttpResult(httpResponse, new byte[0])); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var inferenceResults = List.of(mock(InferenceResults.class)); + Answer> answer = (invocation) -> inferenceResults; + + var handler = mock(ResponseHandler.class); + doThrow(new RetryException(true, "failed")).doNothing().when(handler).validateResponse(any(), any(), any(), any()); + // Mockito.thenReturn() does not compile when returning a + // bounded wild card list, thenAnswer must be used instead. + when(handler.parseResult(any())).thenAnswer(answer); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture>(); + executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + + assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); + verify(sender, times(2)).send(any(), any()); + } + + public void testSend_CallsSenderAgain_WhenAFailureStatusCodeIsReturned() { + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(300).thenReturn(200); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new HttpResult(httpResponse, new byte[] { 'a' })); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var inferenceResults = List.of(mock(InferenceResults.class)); + + var handler = new AlwaysRetryingResponseHandler("test", result -> inferenceResults); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture>(); + executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + + assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); + verify(sender, times(2)).send(any(), any()); + } + + public void testSend_CallsSenderAgain_WhenParsingFailsOnce() { + var sender = mock(Sender.class); + var httpResponse = mockHttpResponse(); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new HttpResult(httpResponse, new byte[] { 'a' })); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var inferenceResults = List.of(mock(InferenceResults.class)); + Answer> answer = (invocation) -> inferenceResults; + + var handler = mock(ResponseHandler.class); + when(handler.parseResult(any())).thenThrow(new RetryException(true, "failed")).thenAnswer(answer); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture>(); + executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + + assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); + verify(sender, times(2)).send(any(), any()); + } + + public void testSend_DoesNotCallSenderAgain_WhenParsingFailsWithNonRetryableException() { + var sender = mock(Sender.class); + var httpResponse = mockHttpResponse(); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new HttpResult(httpResponse, new byte[] { 'a' })); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var inferenceResults = List.of(mock(InferenceResults.class)); + Answer> answer = (invocation) -> inferenceResults; + + var handler = mock(ResponseHandler.class); + when(handler.parseResult(any())).thenThrow(new IllegalStateException("failed")).thenAnswer(answer); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture>(); + executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 0); + + var thrownException = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(thrownException.getMessage(), is("failed")); + + verify(sender, times(1)).send(any(), any()); + } + + public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnce() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new RetryException(true, "failed")); + + return Void.TYPE; + }).doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new HttpResult(mock(HttpResponse.class), new byte[] { 'a' })); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var inferenceResults = List.of(mock(InferenceResults.class)); + Answer> answer = (invocation) -> inferenceResults; + + var handler = mock(ResponseHandler.class); + when(handler.parseResult(any())).thenAnswer(answer); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture>(); + executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + + assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); + verify(sender, times(2)).send(any(), any()); + } + + public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnceWithConnectionClosedException() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new ConnectionClosedException("failed")); + + return Void.TYPE; + }).doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new HttpResult(mock(HttpResponse.class), new byte[] { 'a' })); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var inferenceResults = List.of(mock(InferenceResults.class)); + Answer> answer = (invocation) -> inferenceResults; + + var handler = mock(ResponseHandler.class); + when(handler.parseResult(any())).thenAnswer(answer); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture>(); + executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + + assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); + verify(sender, times(2)).send(any(), any()); + } + + public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnException_AfterOneRetry() { + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(mock(StatusLine.class)); + + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new HttpResult(httpResponse, new byte[0])); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var inferenceResults = List.of(mock(InferenceResults.class)); + Answer> answer = (invocation) -> inferenceResults; + + var handler = mock(ResponseHandler.class); + doThrow(new RetryException(true, "failed")).doThrow(new IllegalStateException("failed again")) + .when(handler) + .validateResponse(any(), any(), any(), any()); + when(handler.parseResult(any())).thenAnswer(answer); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture>(); + executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + + var thrownException = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(thrownException.getMessage(), is("failed again")); + assertThat(thrownException.getSuppressed().length, is(1)); + assertThat(thrownException.getSuppressed()[0].getMessage(), is("failed")); + + verify(sender, times(2)).send(any(), any()); + } + + public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnElasticsearchException_AfterOneRetry() { + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(mock(StatusLine.class)); + + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new HttpResult(httpResponse, new byte[0])); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var inferenceResults = List.of(mock(InferenceResults.class)); + Answer> answer = (invocation) -> inferenceResults; + + var handler = mock(ResponseHandler.class); + doThrow(new RetryException(true, "failed")).doThrow(new RetryException(false, "failed again")) + .when(handler) + .validateResponse(any(), any(), any(), any()); + when(handler.parseResult(any())).thenAnswer(answer); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture>(); + executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + + var thrownException = expectThrows(RetryException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(thrownException.getMessage(), is("failed again")); + assertThat(thrownException.getSuppressed().length, is(1)); + assertThat(thrownException.getSuppressed()[0].getMessage(), is("failed")); + verify(sender, times(2)).send(any(), any()); + } + + public void testSend_ReturnsFailure_WhenHttpResultsListenerCallsOnFailure_AfterOneRetry() { + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(mock(StatusLine.class)); + + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new RetryException(true, "failed")); + + return Void.TYPE; + }).doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new RetryException(false, "failed again")); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var handler = mock(ResponseHandler.class); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture>(); + executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + + var thrownException = expectThrows(RetryException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(thrownException.getMessage(), is("failed again")); + assertThat(thrownException.getSuppressed().length, is(1)); + assertThat(thrownException.getSuppressed()[0].getMessage(), is("failed")); + verify(sender, times(2)).send(any(), any()); + } + + public void testSend_ReturnsFailure_WhenHttpResultsListenerCallsOnFailure_WithNonRetryableException() { + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(mock(StatusLine.class)); + + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var handler = mock(ResponseHandler.class); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture>(); + executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 0); + + var thrownException = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(thrownException.getMessage(), is("failed")); + assertThat(thrownException.getSuppressed().length, is(0)); + verify(sender, times(1)).send(any(), any()); + } + + private static HttpResponse mockHttpResponse() { + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(200); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + return httpResponse; + } + + private void executeTasks(Runnable runnable, int retries) { + taskQueue.scheduleNow(runnable); + // Execute the task scheduled from the line above + taskQueue.runAllRunnableTasks(); + + for (int i = 0; i < retries; i++) { + // set the timing correctly to get ready to run the next task + taskQueue.advanceTime(); + taskQueue.runAllRunnableTasks(); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java index 2dd31144b3bc2..992f0d68bd920 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java @@ -54,20 +54,20 @@ public void shutdown() { } public void testQueueSize_IsEmpty() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool); + var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); assertThat(service.queueSize(), is(0)); } public void testQueueSize_IsOne() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool); + var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); service.send(mock(HttpRequestBase.class), null, new PlainActionFuture<>()); assertThat(service.queueSize(), is(1)); } public void testExecute_ThrowsUnsupported() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool); + var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); var noopTask = mock(RequestTask.class); var thrownException = expectThrows(UnsupportedOperationException.class, () -> service.execute(noopTask)); @@ -75,16 +75,18 @@ public void testExecute_ThrowsUnsupported() { } public void testIsTerminated_IsFalse() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool); + var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); assertFalse(service.isTerminated()); } - public void testIsTerminated_IsTrue() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool); + public void testIsTerminated_IsTrue() throws InterruptedException { + var latch = new CountDownLatch(1); + var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, latch); service.shutdown(); service.start(); + latch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); assertTrue(service.isTerminated()); } @@ -98,7 +100,7 @@ public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { return Void.TYPE; }).when(mockHttpClient).send(any(), any(), any()); - var service = new HttpRequestExecutorService(getTestName(), mockHttpClient, threadPool); + var service = new HttpRequestExecutorService(getTestName(), mockHttpClient, threadPool, null); Future executorTermination = threadPool.generic().submit(() -> { try { @@ -127,7 +129,7 @@ public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { } public void testSend_AfterShutdown_Throws() { - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool); + var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, null); service.shutdown(); @@ -143,7 +145,7 @@ public void testSend_AfterShutdown_Throws() { } public void testSend_Throws_WhenQueueIsFull() { - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, 1); + var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, 1, null); service.send(mock(HttpRequestBase.class), null, new PlainActionFuture<>()); var listener = new PlainActionFuture(); @@ -160,7 +162,7 @@ public void testSend_Throws_WhenQueueIsFull() { public void testTaskThrowsError_CallsOnFailure() throws Exception { var httpClient = mock(HttpClient.class); - var service = new HttpRequestExecutorService(getTestName(), httpClient, threadPool); + var service = new HttpRequestExecutorService(getTestName(), httpClient, threadPool, null); doAnswer(invocation -> { service.shutdown(); @@ -180,7 +182,7 @@ public void testTaskThrowsError_CallsOnFailure() throws Exception { } public void testShutdown_AllowsMultipleCalls() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool); + var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); service.shutdown(); service.shutdown(); @@ -192,7 +194,7 @@ public void testShutdown_AllowsMultipleCalls() { } public void testSend_CallsOnFailure_WhenRequestTimesOut() { - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool); + var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, null); var listener = new PlainActionFuture(); service.send(mock(HttpRequestBase.class), TimeValue.timeValueNanos(1), listener); @@ -206,7 +208,7 @@ public void testSend_CallsOnFailure_WhenRequestTimesOut() { } public void testSend_NotifiesTasksOfShutdown() { - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool); + var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, null); var listener = new PlainActionFuture(); service.send(mock(HttpRequestBase.class), null, listener); @@ -228,7 +230,7 @@ public void testQueueTake_Throwing_DoesNotCauseServiceToTerminate() throws Inter BlockingQueue queue = mock(LinkedBlockingQueue.class); when(queue.take()).thenThrow(new ElasticsearchException("failed")).thenReturn(new ShutdownTask()); - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, queue); + var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, queue, null); service.start(); @@ -241,7 +243,7 @@ public void testQueueTake_ThrowingInterruptedException_TerminatesService() throw BlockingQueue queue = mock(LinkedBlockingQueue.class); when(queue.take()).thenThrow(new InterruptedException("failed")); - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, queue); + var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, queue, null); Future executorTermination = threadPool.generic().submit(() -> { try { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java index 0cc97ca38de80..49866f9071779 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java @@ -21,24 +21,28 @@ import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.net.URISyntaxException; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; import static org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequestTests.createRequest; import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doThrow; @@ -64,6 +68,7 @@ public void shutdown() throws IOException { webServer.close(); } + @SuppressWarnings("unchecked") public void testSend_SuccessfulResponse() throws IOException, URISyntaxException { var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); @@ -79,12 +84,15 @@ public void testSend_SuccessfulResponse() throws IOException, URISyntaxException """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - HuggingFaceClient huggingFaceClient = new HuggingFaceClient(sender, mockThrottlerManager()); + HuggingFaceClient huggingFaceClient = new HuggingFaceClient( + sender, + new ServiceComponents(threadPool, mockThrottlerManager(), Settings.EMPTY) + ); - PlainActionFuture listener = new PlainActionFuture<>(); + PlainActionFuture> listener = new PlainActionFuture<>(); huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); - InferenceResults result = listener.actionGet(TIMEOUT); + var result = listener.actionGet(TIMEOUT).get(0); assertThat(result.asMap(), is(Map.of(DEFAULT_RESULTS_FIELD, Map.of(".", 0.13315596f)))); @@ -98,10 +106,13 @@ public void testSend_SuccessfulResponse() throws IOException, URISyntaxException var requestMap = entityAsMap(webServer.requests().get(0).getBody()); assertThat(requestMap.size(), is(1)); - assertThat(requestMap.get("inputs"), is("abc")); + assertThat(requestMap.get("inputs"), instanceOf(List.class)); + var inputList = (List) requestMap.get("inputs"); + assertThat(inputList, contains("abc")); } } + @SuppressWarnings("unchecked") public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyntaxException { var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); @@ -125,15 +136,23 @@ public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyn """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - HuggingFaceClient huggingFaceClient = new HuggingFaceClient(sender, mockThrottlerManager()); + HuggingFaceClient huggingFaceClient = new HuggingFaceClient( + sender, + new ServiceComponents( + threadPool, + mockThrottlerManager(), + // timeout as zero for no retries + buildSettingsWithRetryFields(TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1), TimeValue.timeValueSeconds(0)) + ) + ); - PlainActionFuture listener = new PlainActionFuture<>(); + PlainActionFuture> listener = new PlainActionFuture<>(); huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), - is(format("Failed to parse the Hugging Face ELSER response for request [POST %s HTTP/1.1]", getUrl(webServer))) + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_ARRAY]") ); assertThat(webServer.requests(), hasSize(1)); @@ -146,21 +165,25 @@ public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyn var requestMap = entityAsMap(webServer.requests().get(0).getBody()); assertThat(requestMap.size(), is(1)); - assertThat(requestMap.get("inputs"), is("abc")); + assertThat(requestMap.get("inputs"), instanceOf(List.class)); + var inputList = (List) requestMap.get("inputs"); + assertThat(inputList, contains("abc")); } } - public void testSend_ThrowsException() { + public void testSend_ThrowsException() throws URISyntaxException, IOException { var sender = mock(Sender.class); doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); - HuggingFaceClient huggingFaceClient = new HuggingFaceClient(sender, mockThrottlerManager()); - PlainActionFuture listener = new PlainActionFuture<>(); - - var thrownException = expectThrows( - ElasticsearchException.class, - () -> huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener) + HuggingFaceClient huggingFaceClient = new HuggingFaceClient( + sender, + new ServiceComponents(threadPool, mockThrottlerManager(), Settings.EMPTY) ); + PlainActionFuture> listener = new PlainActionFuture<>(); + + huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat(thrownException.getMessage(), is("failed")); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java index b0977da234c18..06279e9c89da6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java @@ -14,21 +14,20 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.List; import static org.hamcrest.CoreMatchers.is; public class HuggingFaceElserRequestEntityTests extends ESTestCase { public void testXContent() throws IOException { - var entity = new HuggingFaceElserRequestEntity("abc"); + var entity = new HuggingFaceElserRequestEntity(List.of("abc")); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); assertThat(xContentResult, is(""" - { - "inputs" : "abc" - }""")); + {"inputs":["abc"]}""")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java index 717f5a7e2409d..2a8ce9a46e498 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java @@ -17,12 +17,15 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.util.List; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class HuggingFaceElserRequestTests extends ESTestCase { + @SuppressWarnings("unchecked") public void testCreateRequest() throws URISyntaxException, IOException { var huggingFaceRequest = createRequest("www.google.com", "secret", "abc"); var httpRequest = huggingFaceRequest.createRequest(); @@ -36,12 +39,14 @@ public void testCreateRequest() throws URISyntaxException, IOException { var requestMap = entityAsMap(httpPost.getEntity().getContent()); assertThat(requestMap.size(), is(1)); - assertThat(requestMap.get("inputs"), is("abc")); + assertThat(requestMap.get("inputs"), instanceOf(List.class)); + var inputList = (List) requestMap.get("inputs"); + assertThat(inputList, contains("abc")); } public static HuggingFaceElserRequest createRequest(String url, String apiKey, String input) throws URISyntaxException { var account = new HuggingFaceAccount(new URI(url), new SecureString(apiKey.toCharArray())); - var entity = new HuggingFaceElserRequestEntity(input); + var entity = new HuggingFaceElserRequestEntity(List.of(input)); return new HuggingFaceElserRequest(account, entity); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java index 8cfac1858ab50..716c4520c7ee4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java @@ -17,6 +17,7 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -34,10 +35,11 @@ public void testFromResponse_CreatesTextExpansionResults() throws IOException { } ]"""; - TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + List parsedResults = HuggingFaceElserResponseEntity.fromResponse( new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - Map tokenWeightMap = parsedResults.getWeightedTokens() + Map tokenWeightMap = parsedResults.get(0) + .getWeightedTokens() .stream() .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); @@ -45,7 +47,7 @@ public void testFromResponse_CreatesTextExpansionResults() throws IOException { assertThat(tokenWeightMap.size(), is(2)); assertThat(tokenWeightMap.get("."), is(0.13315596f)); assertThat(tokenWeightMap.get("the"), is(0.67472112f)); - assertFalse(parsedResults.isTruncated()); + assertFalse(parsedResults.get(0).isTruncated()); } public void testFromResponse_CreatesTextExpansionResultsForFirstItem() throws IOException { @@ -61,18 +63,33 @@ public void testFromResponse_CreatesTextExpansionResultsForFirstItem() throws IO } ]"""; - TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + List parsedResults = HuggingFaceElserResponseEntity.fromResponse( new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - Map tokenWeightMap = parsedResults.getWeightedTokens() - .stream() - .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); - - // the results get truncated because weighted token stores them as a float - assertThat(tokenWeightMap.size(), is(2)); - assertThat(tokenWeightMap.get("."), is(0.13315596f)); - assertThat(tokenWeightMap.get("the"), is(0.67472112f)); - assertFalse(parsedResults.isTruncated()); + { + var parsedResult = parsedResults.get(0); + Map tokenWeightMap = parsedResult.getWeightedTokens() + .stream() + .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); + + // the results get truncated because weighted token stores them as a float + assertThat(tokenWeightMap.size(), is(2)); + assertThat(tokenWeightMap.get("."), is(0.13315596f)); + assertThat(tokenWeightMap.get("the"), is(0.67472112f)); + assertFalse(parsedResult.isTruncated()); + } + { + var parsedResult = parsedResults.get(1); + Map tokenWeightMap = parsedResult.getWeightedTokens() + .stream() + .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); + + // the results get truncated because weighted token stores them as a float + assertThat(tokenWeightMap.size(), is(2)); + assertThat(tokenWeightMap.get("hi"), is(0.13315596f)); + assertThat(tokenWeightMap.get("super"), is(0.67472112f)); + assertFalse(parsedResult.isTruncated()); + } } public void testFails_NotAnArray() { @@ -128,7 +145,7 @@ public void testFails_ValueInt() throws IOException { TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) - ); + ).get(0); Map tokenWeightMap = parsedResults.getWeightedTokens() .stream() .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); @@ -149,7 +166,7 @@ public void testFails_ValueLong() throws IOException { TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) - ); + ).get(0); Map tokenWeightMap = parsedResults.getWeightedTokens() .stream() .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java index ba9e7851c9ad4..a9e85d0ffcb1d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.logging; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -22,6 +23,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class ThrottlerManagerTests extends ESTestCase { @@ -39,6 +41,31 @@ public void shutdown() { terminate(threadPool); } + public void testWarn_LogsOnlyOnce() { + var logger = mock(Logger.class); + + try (var throttler = new ThrottlerManager(Settings.EMPTY, threadPool, mockClusterServiceEmpty())) { + throttler.warn(logger, "test", new IllegalArgumentException("failed")); + + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + throttler.warn(logger, "test", new IllegalArgumentException("failed")); + verifyNoMoreInteractions(logger); + } + } + + public void testWarn_AllowsDifferentMessagesToBeLogged() { + var logger = mock(Logger.class); + + try (var throttler = new ThrottlerManager(Settings.EMPTY, threadPool, mockClusterServiceEmpty())) { + throttler.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + throttler.warn(logger, "a different message", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("a different message"), any(Throwable.class)); + } + } + public void testStartsNewThrottler_WhenResetIntervalIsChanged() { var mockThreadPool = mock(ThreadPool.class); when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java index 27df66c54cd1c..d23f057a7a23e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java @@ -52,7 +52,7 @@ public void testWarn_LogsOnlyOnce() { var logger = mock(Logger.class); try ( - var throttled = new Throttler( + var throttler = new Throttler( TimeValue.timeValueDays(1), TimeValue.timeValueSeconds(10), Clock.fixed(Instant.now(), ZoneId.systemDefault()), @@ -60,11 +60,11 @@ public void testWarn_LogsOnlyOnce() { new ConcurrentHashMap<>() ) ) { - throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttler.execute("test", logger::warn); - verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + verify(logger, times(1)).warn(eq("test")); - throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttler.execute("test", logger::warn); verifyNoMoreInteractions(logger); } } @@ -77,7 +77,7 @@ public void testWarn_LogsOnce_ThenOnceAfterDuration() { var clock = mock(Clock.class); try ( - var throttled = new Throttler( + var throttler = new Throttler( TimeValue.timeValueDays(1), TimeValue.timeValueSeconds(10), clock, @@ -88,17 +88,17 @@ public void testWarn_LogsOnce_ThenOnceAfterDuration() { when(clock.instant()).thenReturn(now); // The first call is always logged - throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttler.execute("test", (message) -> logger.warn(message, new IllegalArgumentException("failed"))); verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); when(clock.instant()).thenReturn(now.plus(Duration.ofMinutes(1))); // This call should be allowed because the clock thinks it's after the duration period - throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttler.execute("test", (message) -> logger.warn(message, new IllegalArgumentException("failed"))); verify(logger, times(2)).warn(eq("test"), any(Throwable.class)); when(clock.instant()).thenReturn(now); // This call should not be allowed because the clock doesn't think it's pasted the wait period - throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttler.execute("test", (message) -> logger.warn(message, new IllegalArgumentException("failed"))); verifyNoMoreInteractions(logger); } } @@ -109,7 +109,7 @@ public void testWarn_AllowsDifferentMessagesToBeLogged() { var clock = mock(Clock.class); try ( - var throttled = new Throttler( + var throttler = new Throttler( TimeValue.timeValueDays(1), TimeValue.timeValueSeconds(10), clock, @@ -117,10 +117,10 @@ public void testWarn_AllowsDifferentMessagesToBeLogged() { new ConcurrentHashMap<>() ) ) { - throttled.warn(logger, "test", new IllegalArgumentException("failed")); - verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + throttler.execute("test", logger::warn); + verify(logger, times(1)).warn(eq("test")); - throttled.warn(logger, "a different message", new IllegalArgumentException("failed")); + throttler.execute("a different message", (message) -> logger.warn(message, new IllegalArgumentException("failed"))); verify(logger, times(1)).warn(eq("a different message"), any(Throwable.class)); } } @@ -133,7 +133,7 @@ public void testWarn_LogsRepeated1Time() { var clock = mock(Clock.class); try ( - var throttled = new Throttler( + var throttler = new Throttler( TimeValue.timeValueDays(1), TimeValue.timeValueSeconds(10), clock, @@ -143,16 +143,16 @@ public void testWarn_LogsRepeated1Time() { ) { when(clock.instant()).thenReturn(now); // first message is allowed - throttled.warn(logger, "test", new IllegalArgumentException("failed")); - verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + throttler.execute("test", logger::warn); + verify(logger, times(1)).warn(eq("test")); when(clock.instant()).thenReturn(now); // don't allow this message because duration hasn't expired - throttled.warn(logger, "test", new IllegalArgumentException("failed")); - verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + throttler.execute("test", logger::warn); + verify(logger, times(1)).warn(eq("test")); when(clock.instant()).thenReturn(now.plus(Duration.ofMinutes(1))); // allow this message by faking expired duration - throttled.warn(logger, "test", new IllegalArgumentException("failed")); - verify(logger, times(1)).warn(eq("test, repeated 1 time"), any(Throwable.class)); + throttler.execute("test", logger::warn); + verify(logger, times(1)).warn(eq("test, repeated 1 time")); } } @@ -164,7 +164,7 @@ public void testWarn_LogsRepeated2Times() { var clock = mock(Clock.class); try ( - var throttled = new Throttler( + var throttler = new Throttler( TimeValue.timeValueDays(1), TimeValue.timeValueSeconds(10), clock, @@ -174,16 +174,16 @@ public void testWarn_LogsRepeated2Times() { ) { when(clock.instant()).thenReturn(now); // message allowed because it is the first one - throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttler.execute("test", (message) -> logger.warn(message, new IllegalArgumentException("failed"))); verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); when(clock.instant()).thenReturn(now); // don't allow these messages because duration hasn't expired - throttled.warn(logger, "test", new IllegalArgumentException("failed")); - throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttler.execute("test", (message) -> logger.warn(message, new IllegalArgumentException("failed"))); + throttler.execute("test", (message) -> logger.warn(message, new IllegalArgumentException("failed"))); verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); when(clock.instant()).thenReturn(now.plus(Duration.ofMinutes(1))); // allow this message by faking the duration completion - throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttler.execute("test", (message) -> logger.warn(message, new IllegalArgumentException("failed"))); verify(logger, times(1)).warn(eq("test, repeated 2 times"), any(Throwable.class)); } } @@ -214,7 +214,7 @@ public void testClose_DoesNotAllowLoggingAnyMore() { var clock = mock(Clock.class); - var throttled = new Throttler( + var throttler = new Throttler( TimeValue.timeValueDays(1), TimeValue.timeValueSeconds(10), clock, @@ -222,8 +222,8 @@ public void testClose_DoesNotAllowLoggingAnyMore() { new ConcurrentHashMap<>() ); - throttled.close(); - throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttler.close(); + throttler.execute("test", logger::warn); verifyNoMoreInteractions(logger); } } diff --git a/x-pack/plugin/mapper-counted-keyword/build.gradle b/x-pack/plugin/mapper-counted-keyword/build.gradle new file mode 100644 index 0000000000000..f2e7152406962 --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/build.gradle @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +apply plugin: 'elasticsearch.internal-es-plugin' + +esplugin { + name 'counted-keyword' + description 'Module for the counted-keyword field type, which allows to consider duplicates in an array of values of that type.' + classname 'org.elasticsearch.xpack.countedkeyword.CountedKeywordMapperPlugin' + extendedPlugins = ['x-pack-core'] +} +base { + archivesName = 'x-pack-counted-keyword' +} + +dependencies { + compileOnly project(path: xpackModule('core')) +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java new file mode 100644 index 0000000000000..a7f8a47e42463 --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -0,0 +1,393 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.countedkeyword; + +import org.apache.lucene.document.BinaryDocValuesField; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.SortField; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues; +import org.elasticsearch.index.fielddata.FieldData; +import org.elasticsearch.index.fielddata.FieldDataContext; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.LeafOrdinalsFieldData; +import org.elasticsearch.index.fielddata.plain.AbstractIndexOrdinalsFieldData; +import org.elasticsearch.index.fielddata.plain.AbstractLeafOrdinalsFieldData; +import org.elasticsearch.index.mapper.BinaryFieldMapper; +import org.elasticsearch.index.mapper.DocumentParserContext; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.SourceValueFetcher; +import org.elasticsearch.index.mapper.StringFieldType; +import org.elasticsearch.index.mapper.TextSearchInfo; +import org.elasticsearch.index.mapper.ValueFetcher; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.script.field.KeywordDocValuesField; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.sort.BucketedSort; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; + +import static org.elasticsearch.common.lucene.Lucene.KEYWORD_ANALYZER; + +/** + *

A special field mapper for multi-valued keywords that may contain duplicate values. If the associated counted_terms + * aggregation is used, duplicates are considered in aggregation results. Consider the following values:

+ * + *
    + *
  • ["a", "a", "b"]
  • + *
  • ["a", "b", "b"]
  • + *
+ * + *

While a regular keyword and the corresponding terms aggregation deduplicates values and reports a count of + * 2 for each key (one per document), a counted_terms aggregation on a counted_keyword field will consider + * the actual count and report a count of 3 for each key.

+ * + *

Only regular source is supported; synthetic source won't work.

+ */ +public class CountedKeywordFieldMapper extends FieldMapper { + public static final String CONTENT_TYPE = "counted_keyword"; + public static final String COUNT_FIELD_NAME_SUFFIX = "_count"; + + public static final FieldType FIELD_TYPE; + + static { + FieldType ft = new FieldType(); + ft.setDocValuesType(DocValuesType.SORTED_SET); + ft.setTokenized(false); + ft.setOmitNorms(true); + ft.setIndexOptions(IndexOptions.DOCS); + ft.freeze(); + FIELD_TYPE = freezeAndDeduplicateFieldType(ft); + } + + private static class CountedKeywordFieldType extends StringFieldType { + + private final MappedFieldType countFieldType; + + CountedKeywordFieldType( + String name, + boolean isIndexed, + boolean isStored, + boolean hasDocValues, + TextSearchInfo textSearchInfo, + Map meta, + MappedFieldType countFieldType + ) { + super(name, isIndexed, isStored, hasDocValues, textSearchInfo, meta); + this.countFieldType = countFieldType; + } + + @Override + public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + return SourceValueFetcher.identity(name(), context, format); + } + + @Override + public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { + failIfNoDocValues(); + + return (cache, breakerService) -> new AbstractIndexOrdinalsFieldData( + name(), + CoreValuesSourceType.KEYWORD, + cache, + breakerService, + (dv, n) -> new KeywordDocValuesField(FieldData.toString(dv), n) + ) { + + @Override + public LeafOrdinalsFieldData load(LeafReaderContext context) { + final SortedSetDocValues dvValues; + final BinaryDocValues dvCounts; + try { + dvValues = DocValues.getSortedSet(context.reader(), getFieldName()); + dvCounts = DocValues.getBinary(context.reader(), countFieldType.name()); + } catch (IOException e) { + throw new UncheckedIOException("Unable to load " + CONTENT_TYPE + " doc values", e); + } + + return new AbstractLeafOrdinalsFieldData(toScriptFieldFactory) { + + @Override + public SortedSetDocValues getOrdinalsValues() { + return new CountedKeywordSortedBinaryDocValues(dvValues, dvCounts); + } + + @Override + public long ramBytesUsed() { + return 0; // Unknown + } + + @Override + public void close() { + // nothing to close + } + }; + } + + @Override + public LeafOrdinalsFieldData loadDirect(LeafReaderContext context) { + return load(context); + } + + @Override + public SortField sortField( + Object missingValue, + MultiValueMode sortMode, + XFieldComparatorSource.Nested nested, + boolean reverse + ) { + throw new UnsupportedOperationException("can't sort on the [" + CONTENT_TYPE + "] field"); + } + + @Override + public BucketedSort newBucketedSort( + BigArrays bigArrays, + Object missingValue, + MultiValueMode sortMode, + XFieldComparatorSource.Nested nested, + SortOrder sortOrder, + DocValueFormat format, + int bucketSize, + BucketedSort.ExtraData extra + ) { + throw new IllegalArgumentException("can't sort on the [" + CONTENT_TYPE + "] field"); + } + }; + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + } + + static class CountedKeywordSortedBinaryDocValues extends AbstractSortedSetDocValues { + private final SortedSetDocValues dvValues; + private final BinaryDocValues dvCounts; + private int sumCount; + private Iterator ordsForThisDoc; + private final ByteArrayStreamInput scratch = new ByteArrayStreamInput(); + + CountedKeywordSortedBinaryDocValues(SortedSetDocValues dvValues, BinaryDocValues dvCounts) { + this.dvValues = dvValues; + this.dvCounts = dvCounts; + } + + @Override + public boolean advanceExact(int doc) throws IOException { + sumCount = 0; + if (dvValues.advanceExact(doc)) { + boolean exactMatch = dvCounts.advanceExact(doc); + assert exactMatch; + + BytesRef encodedValue = dvCounts.binaryValue(); + scratch.reset(encodedValue.bytes, encodedValue.offset, encodedValue.length); + int[] counts = scratch.readVIntArray(); + assert counts.length == dvValues.docValueCount(); + + List values = new ArrayList<>(); + for (int count : counts) { + this.sumCount += count; + long ord = dvValues.nextOrd(); + for (int j = 0; j < count; j++) { + values.add(ord); + } + } + this.ordsForThisDoc = values.iterator(); + return true; + } else { + ordsForThisDoc = null; + return false; + } + } + + @Override + public int docValueCount() { + return sumCount; + } + + @Override + public long nextOrd() { + if (ordsForThisDoc.hasNext()) { + return ordsForThisDoc.next(); + } else { + return NO_MORE_ORDS; + } + } + + @Override + public BytesRef lookupOrd(long ord) throws IOException { + return dvValues.lookupOrd(ord); + } + + @Override + public long getValueCount() { + return dvValues.getValueCount(); + } + + @Override + public TermsEnum termsEnum() throws IOException { + return dvValues.termsEnum(); + } + } + + public static class Builder extends FieldMapper.Builder { + private final Parameter> meta = Parameter.metaParam(); + + protected Builder(String name) { + super(name); + } + + @Override + protected Parameter[] getParameters() { + return new Parameter[] { meta }; + } + + @Override + public FieldMapper build(MapperBuilderContext context) { + + BinaryFieldMapper countFieldMapper = new BinaryFieldMapper.Builder(name + COUNT_FIELD_NAME_SUFFIX, true).build(context); + return new CountedKeywordFieldMapper( + name, + FIELD_TYPE, + new CountedKeywordFieldType( + name, + true, + false, + true, + new TextSearchInfo(FIELD_TYPE, null, KEYWORD_ANALYZER, KEYWORD_ANALYZER), + meta.getValue(), + countFieldMapper.fieldType() + ), + multiFieldsBuilder.build(this, context), + copyTo, + countFieldMapper + ); + } + } + + public static TypeParser PARSER = new TypeParser((n, c) -> new CountedKeywordFieldMapper.Builder(n)); + + private final FieldType fieldType; + private final BinaryFieldMapper countFieldMapper; + + protected CountedKeywordFieldMapper( + String simpleName, + FieldType fieldType, + MappedFieldType mappedFieldType, + MultiFields multiFields, + CopyTo copyTo, + BinaryFieldMapper countFieldMapper + ) { + super(simpleName, mappedFieldType, multiFields, copyTo); + this.fieldType = fieldType; + this.countFieldMapper = countFieldMapper; + } + + @Override + public boolean parsesArrayValue() { + return true; + } + + @Override + protected void parseCreateField(DocumentParserContext context) throws IOException { + XContentParser parser = context.parser(); + SortedMap values = new TreeMap<>(); + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + return; + } + if (parser.currentToken() == XContentParser.Token.START_ARRAY) { + parseArray(context, values); + } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + parseValue(parser, values); + } else { + throw new IllegalArgumentException("Encountered unexpected token [" + parser.currentToken() + "]."); + } + int i = 0; + int[] counts = new int[values.size()]; + for (Map.Entry value : values.entrySet()) { + context.doc().add(new KeywordFieldMapper.KeywordField(name(), new BytesRef(value.getKey()), fieldType)); + counts[i++] = value.getValue(); + } + BytesStreamOutput streamOutput = new BytesStreamOutput(); + streamOutput.writeVIntArray(counts); + context.doc().add(new BinaryDocValuesField(countFieldMapper.name(), streamOutput.bytes().toBytesRef())); + } + + private void parseArray(DocumentParserContext context, SortedMap values) throws IOException { + XContentParser parser = context.parser(); + while (true) { + XContentParser.Token token = parser.nextToken(); + if (token == XContentParser.Token.END_ARRAY) { + return; + } + if (token == XContentParser.Token.VALUE_STRING) { + parseValue(parser, values); + } else if (token == XContentParser.Token.VALUE_NULL) { + // ignore null values + } else { + throw new IllegalArgumentException("Encountered unexpected token [" + token + "]."); + } + } + } + + private static void parseValue(XContentParser parser, SortedMap values) throws IOException { + String value = parser.text(); + if (values.containsKey(value) == false) { + values.put(value, 1); + } else { + values.put(value, values.get(value) + 1); + } + } + + @Override + public Iterator iterator() { + List mappers = new ArrayList<>(); + Iterator m = super.iterator(); + while (m.hasNext()) { + mappers.add(m.next()); + } + mappers.add(countFieldMapper); + return mappers.iterator(); + } + + @Override + public FieldMapper.Builder getMergeBuilder() { + return new Builder(simpleName()).init(this); + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordMapperPlugin.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordMapperPlugin.java new file mode 100644 index 0000000000000..62fb10be05f9d --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordMapperPlugin.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.countedkeyword; + +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + *

This plugin adds two associated features:

+ *
    + *
  1. The mapping type counted_keyword that behaves like keyword except that it counts duplicate values.
  2. + *
  3. The counted_terms aggregation that operates on fields mapped as counted_keyword and considers + * duplicate values in the doc_count that it returns.
  4. + *
+ * + *

Both features are considered a tech preview and are thus intentionally undocumented.

+ */ +public class CountedKeywordMapperPlugin extends Plugin implements MapperPlugin, SearchPlugin { + @Override + public Map getMappers() { + Map mappers = new LinkedHashMap<>(); + mappers.put(CountedKeywordFieldMapper.CONTENT_TYPE, CountedKeywordFieldMapper.PARSER); + return Collections.unmodifiableMap(mappers); + } + + @Override + public List getAggregations() { + List specs = new ArrayList<>(); + specs.add( + new SearchPlugin.AggregationSpec( + CountedTermsAggregationBuilder.NAME, + CountedTermsAggregationBuilder::new, + CountedTermsAggregationBuilder.PARSER + ).setAggregatorRegistrar(CountedTermsAggregationBuilder::registerAggregators) + ); + return List.copyOf(specs); + } +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilder.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilder.java new file mode 100644 index 0000000000000..23adacd8f65fa --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilder.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.countedkeyword; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; +import org.elasticsearch.search.aggregations.support.AggregationContext; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +class CountedTermsAggregationBuilder extends ValuesSourceAggregationBuilder { + public static final String NAME = "counted_terms"; + public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = + new ValuesSourceRegistry.RegistryKey<>(NAME, CountedTermsAggregatorSupplier.class); + + public static final ParseField REQUIRED_SIZE_FIELD_NAME = new ParseField("size"); + + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + CountedTermsAggregationBuilder::new + ); + static { + ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); + + PARSER.declareInt(CountedTermsAggregationBuilder::size, REQUIRED_SIZE_FIELD_NAME); + } + + // see TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS + private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(1, 0, 10, -1); + + protected CountedTermsAggregationBuilder(String name) { + super(name); + } + + protected CountedTermsAggregationBuilder( + ValuesSourceAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { + super(clone, factoriesBuilder, metadata); + } + + protected CountedTermsAggregationBuilder(StreamInput in) throws IOException { + super(in); + bucketCountThresholds = new TermsAggregator.BucketCountThresholds(in); + } + + public static void registerAggregators(ValuesSourceRegistry.Builder builder) { + CountedTermsAggregatorFactory.registerAggregators(builder); + } + + public CountedTermsAggregationBuilder size(int size) { + if (size <= 0) { + throw new IllegalArgumentException("[size] must be greater than 0. Found [" + size + "] in [" + name + "]"); + } + bucketCountThresholds.setRequiredSize(size); + return this; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.COUNTED_KEYWORD_ADDED; + } + + @Override + protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map metadata) { + return new CountedTermsAggregationBuilder(this, factoriesBuilder, metadata); + } + + @Override + public BucketCardinality bucketCardinality() { + return BucketCardinality.MANY; + } + + @Override + public String getType() { + return NAME; + } + + @Override + protected void innerWriteTo(StreamOutput out) throws IOException { + bucketCountThresholds.writeTo(out); + } + + @Override + protected ValuesSourceType defaultValueSourceType() { + return CoreValuesSourceType.KEYWORD; + } + + @Override + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + CountedTermsAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new CountedTermsAggregatorFactory( + name, + config, + bucketCountThresholds, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); + } + + @Override + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + // expose only size in XContent as only size can be set externally + builder.field(REQUIRED_SIZE_FIELD_NAME.getPreferredName(), bucketCountThresholds.getRequiredSize()); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (super.equals(o) == false) { + return false; + } + CountedTermsAggregationBuilder that = (CountedTermsAggregationBuilder) o; + return Objects.equals(bucketCountThresholds, that.bucketCountThresholds); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), bucketCountThresholds); + } +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregator.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregator.java new file mode 100644 index 0000000000000..5e1b1e3624f00 --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregator.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.countedkeyword; + +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AggregationExecutionContext; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.CardinalityUpperBound; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalOrder; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; +import org.elasticsearch.search.aggregations.bucket.terms.BucketPriorityQueue; +import org.elasticsearch.search.aggregations.bucket.terms.BytesKeyedBucketOrds; +import org.elasticsearch.search.aggregations.bucket.terms.InternalTerms; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; +import org.elasticsearch.search.aggregations.support.AggregationContext; +import org.elasticsearch.search.aggregations.support.ValuesSource; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Supplier; + +import static java.util.Collections.emptyList; +import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; +import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; + +class CountedTermsAggregator extends TermsAggregator { + private final BytesKeyedBucketOrds bucketOrds; + protected final ValuesSource.Bytes.WithOrdinals valuesSource; + + @SuppressWarnings("this-escape") + CountedTermsAggregator( + String name, + AggregatorFactories factories, + ValuesSource.Bytes.WithOrdinals valuesSource, + BucketOrder order, + DocValueFormat format, + BucketCountThresholds bucketCountThresholds, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { + super(name, factories, context, parent, bucketCountThresholds, order, format, SubAggCollectionMode.DEPTH_FIRST, metadata); + this.valuesSource = valuesSource; + this.bucketOrds = BytesKeyedBucketOrds.build(context.bigArrays(), cardinality); + } + + @Override + public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, LeafBucketCollector sub) throws IOException { + SortedSetDocValues ords = valuesSource.ordinalsValues(aggCtx.getLeafReaderContext()); + return new LeafBucketCollectorBase(sub, ords) { + + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + if (ords.advanceExact(doc) == false) { + return; + } + for (long ord = ords.nextOrd(); ord != NO_MORE_ORDS; ord = ords.nextOrd()) { + long bucketOrdinal = bucketOrds.add(owningBucketOrd, ords.lookupOrd(ord)); + if (bucketOrdinal < 0) { // already seen + bucketOrdinal = -1 - bucketOrdinal; + collectExistingBucket(sub, doc, bucketOrdinal); + } else { + collectBucket(sub, doc, bucketOrdinal); + } + } + } + }; + } + + @Override + public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + StringTerms.Bucket[][] topBucketsPerOrd = new StringTerms.Bucket[owningBucketOrds.length][]; + long[] otherDocCounts = new long[owningBucketOrds.length]; + for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); + + // as users can't control sort order, in practice we'll always sort by doc count descending + BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, partiallyBuiltBucketComparator); + StringTerms.Bucket spare = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); + Supplier emptyBucketBuilder = () -> new StringTerms.Bucket(new BytesRef(), 0, null, false, 0, format); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts[ordIdx] += docCount; + if (spare == null) { + spare = emptyBucketBuilder.get(); + } + ordsEnum.readValue(spare.getTermBytes()); + spare.setDocCount(docCount); + spare.setBucketOrd(ordsEnum.ord()); + spare = ordered.insertWithOverflow(spare); + } + + topBucketsPerOrd[ordIdx] = new StringTerms.Bucket[ordered.size()]; + for (int i = ordered.size() - 1; i >= 0; --i) { + topBucketsPerOrd[ordIdx][i] = ordered.pop(); + otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][i].getDocCount(); + topBucketsPerOrd[ordIdx][i].setTermBytes(BytesRef.deepCopyOf(topBucketsPerOrd[ordIdx][i].getTermBytes())); + } + } + + buildSubAggsForAllBuckets(topBucketsPerOrd, InternalTerms.Bucket::getBucketOrd, InternalTerms.Bucket::setAggregations); + InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; + for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + final BucketOrder reduceOrder; + if (isKeyOrder(order) == false) { + reduceOrder = InternalOrder.key(true); + Arrays.sort(topBucketsPerOrd[ordIdx], reduceOrder.comparator()); + } else { + reduceOrder = order; + } + result[ordIdx] = new StringTerms( + name, + reduceOrder, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata(), + format, + bucketCountThresholds.getShardSize(), + false, + otherDocCounts[ordIdx], + Arrays.asList(topBucketsPerOrd[ordIdx]), + null + ); + } + return result; + } + + @Override + public InternalAggregation buildEmptyAggregation() { + return new StringTerms( + name, + order, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata(), + format, + bucketCountThresholds.getShardSize(), + false, + 0, + emptyList(), + 0L + ); + } + + @Override + public void collectDebugInfo(BiConsumer add) { + super.collectDebugInfo(add); + add.accept("total_buckets", bucketOrds.size()); + } + + @Override + protected void doClose() { + Releasables.close(bucketOrds); + } + +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorFactory.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorFactory.java new file mode 100644 index 0000000000000..3b8be76f14da8 --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorFactory.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.countedkeyword; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.CardinalityUpperBound; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.NonCollectingAggregator; +import org.elasticsearch.search.aggregations.bucket.BucketUtils; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; +import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; +import org.elasticsearch.search.aggregations.support.AggregationContext; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.aggregations.support.SamplingContext; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +class CountedTermsAggregatorFactory extends ValuesSourceAggregatorFactory { + private final BucketOrder order = BucketOrder.count(false); + private final CountedTermsAggregatorSupplier supplier; + private final TermsAggregator.BucketCountThresholds bucketCountThresholds; + + static void registerAggregators(ValuesSourceRegistry.Builder builder) { + builder.register( + CountedTermsAggregationBuilder.REGISTRY_KEY, + List.of(CoreValuesSourceType.KEYWORD), + CountedTermsAggregatorFactory.bytesSupplier(), + true + ); + } + + /** + * This supplier is used for all the field types that should be aggregated as bytes/strings, + * including those that need global ordinals + */ + static CountedTermsAggregatorSupplier bytesSupplier() { + return (name, factories, valuesSourceConfig, order, bucketCountThresholds, context, parent, cardinality, metadata) -> { + + assert valuesSourceConfig.getValuesSource() instanceof ValuesSource.Bytes.WithOrdinals; + ValuesSource.Bytes.WithOrdinals ordinalsValuesSource = (ValuesSource.Bytes.WithOrdinals) valuesSourceConfig.getValuesSource(); + + return new CountedTermsAggregator( + name, + factories, + ordinalsValuesSource, + order, + valuesSourceConfig.format(), + bucketCountThresholds, + context, + parent, + cardinality, + metadata + ); + }; + } + + CountedTermsAggregatorFactory( + String name, + ValuesSourceConfig config, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + CountedTermsAggregatorSupplier supplier + ) throws IOException { + super(name, config, context, parent, subFactoriesBuilder, metadata); + this.bucketCountThresholds = bucketCountThresholds; + this.supplier = supplier; + } + + @Override + protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { + final InternalAggregation aggregation = new UnmappedTerms( + name, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata + ); + return new NonCollectingAggregator(name, context, parent, factories, metadata) { + @Override + public InternalAggregation buildEmptyAggregation() { + return aggregation; + } + }; + } + + @Override + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + bucketCountThresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize())); + // If min_doc_count and shard_min_doc_count is provided, we do not support them being larger than 1 + // This is because we cannot be sure about their relative scale when sampled + if (getSamplingContext().map(SamplingContext::isSampled).orElse(false)) { + if (bucketCountThresholds.getMinDocCount() > 1 || bucketCountThresholds.getShardMinDocCount() > 1) { + throw new ElasticsearchStatusException( + "aggregation [{}] is within a sampling context; " + + "min_doc_count, provided [{}], and min_shard_doc_count, provided [{}], cannot be greater than 1", + RestStatus.BAD_REQUEST, + name(), + bucketCountThresholds.getMinDocCount(), + bucketCountThresholds.getShardMinDocCount() + ); + } + } + bucketCountThresholds.ensureValidity(); + + return supplier.build(name, factories, config, order, bucketCountThresholds, context, parent, cardinality, metadata); + } +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorSupplier.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorSupplier.java new file mode 100644 index 0000000000000..2817863f6b42c --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorSupplier.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.countedkeyword; + +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.CardinalityUpperBound; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; +import org.elasticsearch.search.aggregations.support.AggregationContext; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; + +import java.io.IOException; +import java.util.Map; + +interface CountedTermsAggregatorSupplier { + Aggregator build( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + BucketOrder order, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException; +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java new file mode 100644 index 0000000000000..9d1935a835cbe --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.countedkeyword; + +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperTestCase; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xcontent.XContentBuilder; +import org.junit.AssumptionViolatedException; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; + +public class CountedKeywordFieldMapperTests extends MapperTestCase { + @Override + protected Collection getPlugins() { + return Collections.singletonList(new CountedKeywordMapperPlugin()); + } + + @Override + protected void minimalMapping(XContentBuilder b) throws IOException { + b.field("type", CountedKeywordFieldMapper.CONTENT_TYPE); + } + + @Override + protected Object getSampleValueForDocument() { + return new String[] { "a", "a", "b", "c" }; + } + + @Override + protected Object getSampleValueForQuery() { + return "b"; + } + + @Override + protected boolean supportsIgnoreMalformed() { + return false; + } + + @Override + protected boolean supportsStoredFields() { + return false; + } + + @Override + protected void registerParameters(ParameterChecker checker) { + // Nothing to do + } + + @Override + protected Object generateRandomInputValue(MappedFieldType ft) { + return randomBoolean() ? null : randomAlphaOfLengthBetween(1, 10); + } + + @Override + protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { + throw new AssumptionViolatedException("not supported"); + } + + @Override + protected IngestScriptSupport ingestScriptSupport() { + throw new AssumptionViolatedException("not supported"); + } +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java new file mode 100644 index 0000000000000..c29e4513562fc --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java @@ -0,0 +1,176 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.countedkeyword; + +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.List; + +public class CountedKeywordFieldTypeTests extends ESTestCase { + public void testSingleValuedField() throws Exception { + SortedSetDocValues sd = new CollectionBasedSortedSetDocValues(List.of(new BytesRef("a"))); + BinaryDocValues bd = new CollectionBasedBinaryDocValues(List.of(toBytesRef(new int[] { 3 }))); + + CountedKeywordFieldMapper.CountedKeywordSortedBinaryDocValues dv = + new CountedKeywordFieldMapper.CountedKeywordSortedBinaryDocValues(sd, bd); + + assertTrue(dv.advanceExact(0)); + + assertEquals(3, dv.docValueCount()); + + assertOrdinal(dv, "a", 3); + } + + public void testMultiValuedField() throws Exception { + SortedSetDocValues sd = new CollectionBasedSortedSetDocValues(List.of(new BytesRef("a"), new BytesRef("b"))); + BinaryDocValues bd = new CollectionBasedBinaryDocValues(List.of(toBytesRef(new int[] { 1, 5 }))); + + CountedKeywordFieldMapper.CountedKeywordSortedBinaryDocValues dv = + new CountedKeywordFieldMapper.CountedKeywordSortedBinaryDocValues(sd, bd); + + assertTrue(dv.advanceExact(0)); + + assertEquals(6, dv.docValueCount()); + + assertOrdinal(dv, "a", 1); + assertOrdinal(dv, "b", 5); + } + + private void assertOrdinal(CountedKeywordFieldMapper.CountedKeywordSortedBinaryDocValues dv, String value, int times) + throws IOException { + for (int i = 0; i < times; i++) { + long ordinal = dv.nextOrd(); + assertNotEquals(DocIdSetIterator.NO_MORE_DOCS, ordinal); + assertEquals(new BytesRef(value), dv.lookupOrd(ordinal)); + } + } + + private BytesRef toBytesRef(int[] counts) throws IOException { + try (BytesStreamOutput streamOutput = new BytesStreamOutput()) { + streamOutput.writeVIntArray(counts); + return streamOutput.bytes().toBytesRef(); + } + } + + private static class CollectionBasedSortedSetDocValues extends SortedSetDocValues { + private final List docValues; + + private final DocIdSetIterator disi; + + private long currentOrd = -1; + + private CollectionBasedSortedSetDocValues(List docValues) { + this.docValues = docValues; + this.disi = DocIdSetIterator.all(docValues.size()); + } + + @Override + public long nextOrd() { + currentOrd++; + if (currentOrd >= docValues.size()) { + return NO_MORE_ORDS; + } + return currentOrd; + } + + @Override + public int docValueCount() { + return docValues.size(); + } + + @Override + public BytesRef lookupOrd(long ord) throws IOException { + return docValues.get((int) ord); + } + + @Override + public long getValueCount() { + return docValues.size(); + } + + @Override + public boolean advanceExact(int target) throws IOException { + currentOrd = -1; + return disi.advance(target) == target; + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public int nextDoc() throws IOException { + currentOrd = -1; + return disi.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + currentOrd = -1; + return disi.advance(target); + } + + @Override + public long cost() { + return disi.cost(); + } + } + + private static class CollectionBasedBinaryDocValues extends BinaryDocValues { + private final List docValues; + private final DocIdSetIterator disi; + + private int current = -1; + + private CollectionBasedBinaryDocValues(List docValues) { + this.docValues = docValues; + this.disi = DocIdSetIterator.all(docValues.size()); + } + + @Override + public BytesRef binaryValue() { + return docValues.get(current); + } + + @Override + public boolean advanceExact(int target) throws IOException { + current = target; + return disi.advance(target) == target; + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public int nextDoc() throws IOException { + current = -1; + return disi.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + current = -1; + return disi.advance(target); + } + + @Override + public long cost() { + return disi.cost(); + } + } + +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilderTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilderTests.java new file mode 100644 index 0000000000000..ba266e82fecc8 --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilderTests.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.countedkeyword; + +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.aggregations.BaseAggregationTestCase; + +import java.util.Collection; +import java.util.Collections; + +public class CountedTermsAggregationBuilderTests extends BaseAggregationTestCase { + @Override + protected Collection> getPlugins() { + return Collections.singletonList(CountedKeywordMapperPlugin.class); + } + + @Override + protected CountedTermsAggregationBuilder createTestAggregatorBuilder() { + return new CountedTermsAggregationBuilder(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLength(7)); + } +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java new file mode 100644 index 0000000000000..02d629c7604ac --- /dev/null +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.countedkeyword; + +import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.mapper.TestDocumentParserContext; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.bucket.terms.InternalTerms; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class CountedTermsAggregatorTests extends AggregatorTestCase { + @Override + protected List getSearchPlugins() { + return Collections.singletonList(new CountedKeywordMapperPlugin()); + } + + public void testAggregatesCountedKeywords() throws Exception { + FieldMapper mapper = new CountedKeywordFieldMapper.Builder("stacktraces").build(MapperBuilderContext.root(false, false)); + MappedFieldType fieldType = mapper.fieldType(); + + CountedTermsAggregationBuilder aggregationBuilder = new CountedTermsAggregationBuilder("st").field("stacktraces"); + testCase(iw -> { + iw.addDocument(doc(mapper, "a", null, "a", "b")); + iw.addDocument(doc(mapper, "b", "c", "d")); + iw.addDocument(doc(mapper, new String[] { null })); + + }, (InternalTerms result) -> { + // note how any nulls are ignored + Map expectedBuckets = Map.of("a", 2L, "b", 2L, "c", 1L, "d", 1L); + assertEquals("Bucket count does not match", expectedBuckets.size(), result.getBuckets().size()); + + Set seenUniqueKeys = new HashSet<>(); + for (InternalTerms.Bucket bucket : result.getBuckets()) { + String k = bucket.getKeyAsString(); + assertTrue("Unexpected bucket key [" + k + "]", expectedBuckets.containsKey(k)); + assertEquals(expectedBuckets.get(k).longValue(), bucket.getDocCount()); + seenUniqueKeys.add(k); + } + // ensure no duplicate keys + assertEquals("Every bucket key must be unique", expectedBuckets.size(), seenUniqueKeys.size()); + assertTrue(AggregationInspectionHelper.hasValue(result)); + }, new AggTestConfig(aggregationBuilder, fieldType)); + } + + private List doc(FieldMapper mapper, String... values) { + // quote regular strings but keep null values unquoted so they are not treated as regular strings + List quotedValues = Arrays.stream(values).map(v -> v != null ? "\"" + v + "\"" : v).toList(); + String source = "[" + Strings.collectionToCommaDelimitedString(quotedValues) + "]"; + try { + XContentParser parser = createParser(JsonXContent.jsonXContent, source); + // move to first token + parser.nextToken(); + TestDocumentParserContext ctx = new TestDocumentParserContext( + MappingLookup.EMPTY, + new SourceToParse("test", new BytesArray(source), XContentType.JSON) + ) { + @Override + public XContentParser parser() { + return parser; + } + }; + mapper.parse(ctx); + return ctx.doc().getFields(); + } catch (IOException e) { + throw new AssertionError(e); + } + } +} diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java index 41db25881185f..6cdeb93d1e07d 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java @@ -90,7 +90,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A if (packagedModelId.equals(packageConfig.getPackagedModelId()) == false) { // the package is somehow broken - listener.onFailure(new ElasticsearchStatusException("Invalid package", RestStatus.INTERNAL_SERVER_ERROR)); + listener.onFailure(new ElasticsearchStatusException("Invalid package name", RestStatus.INTERNAL_SERVER_ERROR)); return; } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestInputConfigIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestInputConfigIT.java index a3e5c3993398e..d71273e7f8119 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestInputConfigIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestInputConfigIT.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.ml.integration; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; import org.elasticsearch.core.Strings; import org.elasticsearch.xpack.core.ml.utils.MapHelper; @@ -118,16 +116,4 @@ private static String pipelineDefinition(String modelId, String inputOutput) { ] }""", modelId, inputOutput); } - - private Response simulatePipeline(String pipelineDef, String docs) throws IOException { - String simulate = Strings.format(""" - { - "pipeline": %s, - "docs": %s - }""", pipelineDef, docs); - - Request request = new Request("POST", "_ingest/pipeline/_simulate?error_trace=true"); - request.setJsonEntity(simulate); - return client().performRequest(request); - } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index 34d50216ae325..276f60a28ccb4 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -440,16 +440,11 @@ protected static void createDataStreamAndTemplate(String dataStreamName, String client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request(dataStreamName + "_template").indexTemplate( - new ComposableIndexTemplate( - Collections.singletonList(dataStreamName), - new Template(null, new CompressedXContent(mapping), null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList(dataStreamName)) + .template(new Template(null, new CompressedXContent(mapping), null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ) ).actionGet(); client().execute(CreateDataStreamAction.INSTANCE, new CreateDataStreamAction.Request(dataStreamName)).actionGet(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java index a3106eac4ab22..34ef0baecccc5 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java @@ -31,7 +31,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.ml.integration.InferenceIngestIT.putPipeline; import static org.elasticsearch.xpack.ml.integration.InferenceIngestIT.simulateRequest; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -635,7 +634,7 @@ public void testInferencePipelineAgainstUnallocatedModel() throws IOException { ) ); - client().performRequest(putPipeline("my_pipeline", """ + putPipeline("my_pipeline", """ {"processors": [ { "inference": { @@ -643,7 +642,7 @@ public void testInferencePipelineAgainstUnallocatedModel() throws IOException { } } ] - }""")); + }"""); Request request = new Request("PUT", "undeployed_model_index/_doc/1?pipeline=my_pipeline&refresh=true"); request.setJsonEntity(""" @@ -717,7 +716,7 @@ public void testStopUsedDeploymentByIngestProcessor() throws IOException { putVocabulary(List.of("these", "are", "my", "words"), modelId); startDeployment(modelId); - client().performRequest(putPipeline("my_pipeline", Strings.format(""" + putPipeline("my_pipeline", Strings.format(""" { "processors": [ { @@ -726,7 +725,7 @@ public void testStopUsedDeploymentByIngestProcessor() throws IOException { } } ] - }""", modelId))); + }""", modelId)); ResponseException ex = expectThrows(ResponseException.class, () -> stopDeployment(modelId)); assertThat(ex.getResponse().getStatusLine().getStatusCode(), equalTo(409)); assertThat( @@ -749,7 +748,7 @@ public void testStopWithModelAliasUsedDeploymentByIngestProcessor() throws IOExc startDeployment(modelId); client().performRequest(new Request("PUT", Strings.format("_ml/trained_models/%s/model_aliases/%s", modelId, modelAlias))); - client().performRequest(putPipeline("my_pipeline", Strings.format(""" + putPipeline("my_pipeline", Strings.format(""" { "processors": [ { @@ -758,7 +757,7 @@ public void testStopWithModelAliasUsedDeploymentByIngestProcessor() throws IOExc } } ] - }""", modelAlias))); + }""", modelAlias)); ResponseException ex = expectThrows(ResponseException.class, () -> stopDeployment(modelId)); assertThat(ex.getResponse().getStatusLine().getStatusCode(), equalTo(409)); assertThat( diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelRestTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelRestTestCase.java index b278f9fe9e466..c785ae96c5c16 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelRestTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelRestTestCase.java @@ -357,6 +357,24 @@ protected void forceMergeIndex(String index) throws IOException { assertOkWithErrorMessage(client().performRequest(request)); } + protected void putPipeline(String pipelineId, String pipelineDefinition) throws IOException { + Request request = new Request("PUT", "_ingest/pipeline/" + pipelineId); + request.setJsonEntity(pipelineDefinition); + assertOkWithErrorMessage(client().performRequest(request)); + } + + protected Response simulatePipeline(String pipelineDef, String docs) throws IOException { + String simulate = Strings.format(""" + { + "pipeline": %s, + "docs": %s + }""", pipelineDef, docs); + + Request request = new Request("POST", "_ingest/pipeline/_simulate?error_trace=true"); + request.setJsonEntity(simulate); + return client().performRequest(request); + } + @SuppressWarnings("unchecked") protected int getAllocationCount(String modelId) throws IOException { Response response = getTrainedModelStats(modelId); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextEmbeddingQueryIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextEmbeddingQueryIT.java index c3b738f66127a..8e425ea071879 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextEmbeddingQueryIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextEmbeddingQueryIT.java @@ -283,7 +283,7 @@ public void testHybridSearch() throws IOException { } } - public void testSearchWithMissingModel() throws IOException { + public void testSearchWithMissingModel() { String modelId = "missing-model"; String indexName = modelId + "-index"; @@ -291,6 +291,120 @@ public void testSearchWithMissingModel() throws IOException { assertThat(e.getMessage(), containsString("Could not find trained model [missing-model]")); } + @SuppressWarnings("unchecked") + public void testModelWithPrefixStrings() throws IOException { + String modelId = "model-with-prefix-strings"; + String ingestPrefix = "passage: "; + String searchPrefix = "query: "; + + createTextEmbeddingModelWithPrefixString(modelId, searchPrefix, ingestPrefix); + putModelDefinition(modelId, BASE_64_ENCODED_MODEL, RAW_MODEL_SIZE); + putVocabulary( + List.of( + "these", + "are", + "my", + "words", + "the", + "washing", + "machine", + "is", + "leaking", + "octopus", + "comforter", + "smells", + ingestPrefix, + searchPrefix + ), + modelId + ); + startDeployment(modelId); + + String pipelineDefinition = Strings.format(""" + { + "processors": [ + { + "inference": { + "model_id": "%s", + "input_output": { + "input_field": "source_text", + "output_field": "embedding" + }, + "inference_config": { + "text_embedding": { + } + } + } + } + ] + } + """, modelId); + + String docSource = """ + [ + {"_source": { + "source_text": "the washing machine is leaking"}} + ] + """; + + // At ingest the prefix is automatically added + var simulateResponse = simulatePipeline(pipelineDefinition, docSource); + var simulateResponseMap = entityAsMap(simulateResponse); + var simulatedDocs = (List>) simulateResponseMap.get("docs"); + List pipelineEmbedding = (List) MapHelper.dig("doc._source.embedding", simulatedDocs.get(0)); + assertNotNull(simulateResponseMap.toString(), pipelineEmbedding); + + // Create the embedding for the same input text used in + // simulate pipeline ingest. Here the ingest prefix is + // manually added, the resulting embeddings should be + // the same. + var inferenceResponse = infer(ingestPrefix + "the washing machine is leaking", modelId); + Map inferenceResult = ((List>) entityAsMap(inferenceResponse).get("inference_results")).get(0); + List inferenceEmbedding = (List) inferenceResult.get("predicted_value"); + assertNotNull(inferenceResult.toString(), inferenceEmbedding); + // embeddings are exactly equal + assertEquals(inferenceEmbedding, pipelineEmbedding); + + // Now check the search prefix + List inputs = List.of( + searchPrefix + "my words", + "the machine is leaking", + "washing machine", + "these are my words", + "the octopus comforter smells" + ); + List filters = List.of("foo", "bar", "baz", "foo", "bar"); + List> embeddings = new ArrayList<>(); + + // Generate the text embeddings via the inference API + // then index them for search + for (var input : inputs) { + Response inference = infer(input, modelId); + List> responseMap = (List>) entityAsMap(inference).get("inference_results"); + List embedding = (List) responseMap.get(0).get("predicted_value"); + embeddings.add(embedding); + } + + // index dense vectors + String indexName = modelId + "_index"; + createVectorSearchIndex(indexName); + bulkIndexDocs(inputs, filters, embeddings, indexName); + forceMergeIndex(indexName); + + // the input "my words" should be prefixed with searchPrefix + var textEmbeddingSearchResponse = textEmbeddingSearch(indexName, "my words", modelId, "embedding"); + assertOkWithErrorMessage(textEmbeddingSearchResponse); + + Map responseMap = responseAsMap(textEmbeddingSearchResponse); + List> hits = (List>) MapHelper.dig("hits.hits", responseMap); + Map topHit = hits.get(0); + String sourceText = (String) MapHelper.dig("_source.source_text", topHit); + // The top hit should have the search prefix + assertEquals(searchPrefix + "my words", sourceText); + List foundEmbedding = (List) MapHelper.dig("_source.embedding", topHit); + assertEquals(embeddings.get(0), foundEmbedding); + } + protected Response textEmbeddingSearch(String index, String modelText, String modelId, String denseVectorFieldName) throws IOException { Request request = new Request("GET", index + "/_search?error_trace=true"); @@ -390,4 +504,27 @@ private void bulkIndexDocs(List sourceText, List filters, List { GetJobsStatsAction.Response statsResponse = client().execute( GetJobsStatsAction.INSTANCE, @@ -51,6 +55,7 @@ public void testCrudOnTwoJobsInSharedIndex() throws Exception { ).actionGet(); assertEquals(statsResponse.getResponse().results().get(0).getState(), JobState.OPENED); }); + assertRecentLastTaskStateChangeTime(MlTasks.jobTaskId(jobId2), Duration.of(10, ChronoUnit.SECONDS), null); OriginSettingClient client = new OriginSettingClient(client(), ML_ORIGIN); assertThat( diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobsAndModelsIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobsAndModelsIT.java index 54890c65f7576..1458b9ccf693c 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobsAndModelsIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobsAndModelsIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; @@ -36,6 +37,8 @@ import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelDefinitionDoc; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; +import java.time.Duration; +import java.time.temporal.ChronoUnit; import java.util.List; import java.util.Set; @@ -231,6 +234,8 @@ public void testCluster_GivenAnomalyDetectionJobAndTrainedModelDeployment_Should assertThat(jobStats.getNode(), is(not(equalTo(modelStats.getDeploymentStats().getNodeStats().get(0).getNode())))); }); + assertRecentLastTaskStateChangeTime(MlTasks.jobTaskId(jobId), Duration.of(10, ChronoUnit.SECONDS), null); + // Clean up client().execute(CloseJobAction.INSTANCE, new CloseJobAction.Request(jobId).setForce(true)).actionGet(); client().execute(StopTrainedModelDeploymentAction.INSTANCE, new StopTrainedModelDeploymentAction.Request(model.getModelId())) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index 62029e5e9cb98..53f6c19ce43f1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.ml.job.task.JobTask; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; +import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; @@ -425,7 +426,7 @@ protected void taskOperation( JobTask jobTask, ActionListener listener ) { - JobTaskState taskState = new JobTaskState(JobState.CLOSING, jobTask.getAllocationId(), "close job (api)"); + JobTaskState taskState = new JobTaskState(JobState.CLOSING, jobTask.getAllocationId(), "close job (api)", Instant.now()); jobTask.updatePersistentTaskState(taskState, ActionListener.wrap(task -> { // we need to fork because we are now on a network threadpool and closeJob method may take a while to complete: threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(new AbstractRunnable() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInferTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInferTrainedModelDeploymentAction.java index 5217d7ed1c181..65d630ebf1d6e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInferTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInferTrainedModelDeploymentAction.java @@ -107,6 +107,7 @@ protected void taskOperation( request.getUpdate(), request.isHighPriority(), request.getInferenceTimeout(), + request.getPrefixType(), actionTask, orderedListener(count, results, slot++, nlpInputs.size(), listener) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index d414a013a0e8c..3cf0189c28df2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -286,6 +286,7 @@ private void inferAgainstAllocatedModel( ); } deploymentRequest.setHighPriority(request.isHighPriority()); + deploymentRequest.setPrefixType(request.getPrefixType()); deploymentRequest.setNodes(node.v1()); deploymentRequest.setParentTask(parentTaskId); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 708c8ee285896..242d5e00f0ec7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -577,6 +577,7 @@ static void setTrainedModelConfigFieldsFromPackagedModel( ) ); trainedModelConfig.setTags(resolvedModelPackageConfig.getTags()); + trainedModelConfig.setPrefixStrings(resolvedModelPackageConfig.getPrefixStrings()); trainedModelConfig.setModelPackageConfig( new ModelPackageConfig.Builder(resolvedModelPackageConfig).resetPackageOnlyFields().build() ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 24f3ef90ad76d..525ee7c4aa21d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -83,6 +83,7 @@ import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.elasticsearch.xpack.ml.task.AbstractJobPersistentTasksExecutor; +import java.time.Instant; import java.util.Collection; import java.util.List; import java.util.Map; @@ -788,7 +789,8 @@ private void executeTask(DataFrameAnalyticsTask task) { DataFrameAnalyticsTaskState startedState = new DataFrameAnalyticsTaskState( DataFrameAnalyticsState.STARTED, task.getAllocationId(), - null + null, + Instant.now() ); task.updatePersistentTaskState( startedState, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java index 961331c33c2ce..c3d35fbc11593 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java @@ -47,6 +47,7 @@ import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; +import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -385,7 +386,8 @@ protected void taskOperation( DataFrameAnalyticsTaskState stoppingState = new DataFrameAnalyticsTaskState( DataFrameAnalyticsState.STOPPING, task.getAllocationId(), - null + null, + Instant.now() ); task.updatePersistentTaskState(stoppingState, ActionListener.wrap(pTask -> { threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(new AbstractRunnable() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java index c6af1bcfa6f18..85ab838d52eca 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; import org.elasticsearch.xpack.ml.utils.persistence.MlParserUtils; +import java.time.Instant; import java.util.List; import java.util.Map; import java.util.Objects; @@ -178,7 +179,8 @@ public void setFailed(Exception error) { DataFrameAnalyticsTaskState newTaskState = new DataFrameAnalyticsTaskState( DataFrameAnalyticsState.FAILED, getAllocationId(), - reason + reason, + Instant.now() ); updatePersistentTaskState(newTaskState, ActionListener.wrap(updatedTask -> { String message = Messages.getMessage( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java index 26adca10b8da8..3efae0ed58bf6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.UpdateTrainedModelAssignmentRoutingInfoAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfoUpdate; @@ -289,10 +290,11 @@ public void infer( NlpInferenceInput input, boolean skipQueue, TimeValue timeout, + TrainedModelPrefixStrings.PrefixType prefixType, CancellableTask parentActionTask, ActionListener listener ) { - deploymentManager.infer(task, config, input, skipQueue, timeout, parentActionTask, listener); + deploymentManager.infer(task, config, input, skipQueue, timeout, prefixType, parentActionTask, listener); } public Optional modelStats(TrainedModelDeploymentTask task) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index a8c449ec5d8e7..b4cc70720c866 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.IndexLocation; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; @@ -167,6 +168,7 @@ public void startDeployment(TrainedModelDeploymentTask task, ActionListener getVerifiedModel = ActionListener.wrap((modelConfig) -> { processContext.modelInput.set(modelConfig.getInput()); + processContext.prefixes.set(modelConfig.getPrefixStrings()); if (modelConfig.getInferenceConfig() instanceof NlpConfig nlpConfig) { task.init(nlpConfig); @@ -319,6 +321,7 @@ public void infer( NlpInferenceInput input, boolean skipQueue, TimeValue timeout, + TrainedModelPrefixStrings.PrefixType prefixType, CancellableTask parentActionTask, ActionListener listener ) { @@ -336,6 +339,7 @@ public void infer( processContext, config, input, + prefixType, threadPool, parentActionTask, listener @@ -437,6 +441,7 @@ class ProcessContext { private final SetOnce process = new SetOnce<>(); private final SetOnce nlpTaskProcessor = new SetOnce<>(); private final SetOnce modelInput = new SetOnce<>(); + private final SetOnce prefixes = new SetOnce<>(); private final PyTorchResultProcessor resultProcessor; private final PyTorchStateStreamer stateStreamer; private final PriorityProcessWorkerExecutorService priorityProcessWorker; @@ -681,5 +686,9 @@ SetOnce getProcess() { SetOnce getNlpTaskProcessor() { return nlpTaskProcessor; } + + SetOnce getPrefixStrings() { + return prefixes; + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/InferencePyTorchAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/InferencePyTorchAction.java index c91efb09d3cae..945203c345a3c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/InferencePyTorchAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/InferencePyTorchAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceResults; @@ -18,6 +19,7 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -26,7 +28,6 @@ import org.elasticsearch.xpack.ml.inference.pytorch.results.PyTorchResult; import java.io.IOException; -import java.util.Collections; import java.util.List; import static org.elasticsearch.core.Strings.format; @@ -39,6 +40,7 @@ class InferencePyTorchAction extends AbstractPyTorchAction { private final NlpInferenceInput input; @Nullable private final CancellableTask parentActionTask; + private final TrainedModelPrefixStrings.PrefixType prefixType; InferencePyTorchAction( String deploymentId, @@ -47,6 +49,7 @@ class InferencePyTorchAction extends AbstractPyTorchAction { DeploymentManager.ProcessContext processContext, InferenceConfig config, NlpInferenceInput input, + TrainedModelPrefixStrings.PrefixType prefixType, ThreadPool threadPool, @Nullable CancellableTask parentActionTask, ActionListener listener @@ -54,6 +57,7 @@ class InferencePyTorchAction extends AbstractPyTorchAction { super(deploymentId, requestId, timeout, processContext, threadPool, listener); this.config = config; this.input = input; + this.prefixType = prefixType; this.parentActionTask = parentActionTask; } @@ -83,15 +87,39 @@ protected void doRun() throws Exception { final String requestIdStr = String.valueOf(getRequestId()); try { + String inputText = input.extractInput(getProcessContext().getModelInput().get()); + if (prefixType != TrainedModelPrefixStrings.PrefixType.NONE) { + var prefixStrings = getProcessContext().getPrefixStrings().get(); + if (prefixStrings != null) { + switch (prefixType) { + case SEARCH: { + if (Strings.isNullOrEmpty(prefixStrings.searchPrefix()) == false) { + inputText = prefixStrings.searchPrefix() + inputText; + } + } + break; + case INGEST: { + if (Strings.isNullOrEmpty(prefixStrings.ingestPrefix()) == false) { + inputText = prefixStrings.ingestPrefix() + inputText; + } + } + break; + default: + throw new IllegalStateException("[" + getDeploymentId() + "] Unhandled input prefix type [" + prefixType + "]"); + } + } + } + // The request builder expect a list of inputs which are then batched. // TODO batching was implemented for expected use-cases such as zero-shot classification but is not used here. - List text = Collections.singletonList(input.extractInput(getProcessContext().getModelInput().get())); + var inputs = List.of(inputText); + NlpTask.Processor processor = getProcessContext().getNlpTaskProcessor().get(); - processor.validateInputs(text); + processor.validateInputs(inputs); assert config instanceof NlpConfig; NlpConfig nlpConfig = (NlpConfig) config; NlpTask.Request request = processor.getRequestBuilder(nlpConfig) - .buildRequest(text, requestIdStr, nlpConfig.getTokenization().getTruncate(), nlpConfig.getTokenization().getSpan()); + .buildRequest(inputs, requestIdStr, nlpConfig.getTokenization().getTruncate(), nlpConfig.getTokenization().getSpan()); logger.debug(() -> format("handling request [%s]", requestIdStr)); // Tokenization is non-trivial, so check for cancellation one last time before sending request to the native process diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java index ea06a0a0aba90..cd7ed9e3eb55a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction.TaskParams; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -148,6 +149,7 @@ public void infer( InferenceConfigUpdate update, boolean skipQueue, TimeValue timeout, + TrainedModelPrefixStrings.PrefixType prefixType, CancellableTask parentActionTask, ActionListener listener ) { @@ -175,6 +177,7 @@ public void infer( input, skipQueue, timeout, + prefixType, parentActionTask, listener ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java index 5518903dde125..e600ddd42107f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java @@ -26,6 +26,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; @@ -245,13 +246,15 @@ InferModelAction.Request buildRequest(IngestDocument ingestDocument) { } } } - return InferModelAction.Request.forTextInput( + var request = InferModelAction.Request.forTextInput( modelId, inferenceConfig, requestInputs, previouslyLicensed, InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST ); + request.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); + return request; } else { Map fields = new HashMap<>(ingestDocument.getSourceAndMetadata()); // Add ingestMetadata as previous processors might have added metadata from which we are predicting (see: foreach processor) @@ -260,13 +263,15 @@ InferModelAction.Request buildRequest(IngestDocument ingestDocument) { } LocalModel.mapFieldsIfNecessary(fields, fieldMap); - return InferModelAction.Request.forIngestDocs( + var request = InferModelAction.Request.forIngestDocs( modelId, List.of(fields), inferenceConfig, previouslyLicensed, InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST ); + request.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); + return request; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index 1144b1afffdcb..8deac327c065e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -81,6 +81,7 @@ import java.io.IOException; import java.io.InputStream; import java.time.Duration; +import java.time.Instant; import java.time.ZonedDateTime; import java.util.Date; import java.util.Iterator; @@ -1000,7 +1001,7 @@ public Optional jobOpenTime(JobTask jobTask) { } void setJobState(JobTask jobTask, JobState state, String reason) { - JobTaskState jobTaskState = new JobTaskState(state, jobTask.getAllocationId(), reason); + JobTaskState jobTaskState = new JobTaskState(state, jobTask.getAllocationId(), reason, Instant.now()); jobTask.updatePersistentTaskState( jobTaskState, ActionListener.wrap( @@ -1019,7 +1020,7 @@ private static void logSetJobStateFailure(JobState state, String jobId, Exceptio } void setJobState(JobTask jobTask, JobState state, String reason, CheckedConsumer handler) { - JobTaskState jobTaskState = new JobTaskState(state, jobTask.getAllocationId(), reason); + JobTaskState jobTaskState = new JobTaskState(state, jobTask.getAllocationId(), reason, Instant.now()); jobTask.updatePersistentTaskState(jobTaskState, ActionListener.wrap(persistentTask -> { try { handler.accept(null); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index 15b1993dc0586..09cd6225cf0ca 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -61,6 +61,7 @@ import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.elasticsearch.xpack.ml.task.AbstractJobPersistentTasksExecutor; +import java.time.Instant; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -344,7 +345,7 @@ private void runJob(JobTask jobTask, JobState jobState, OpenJobAction.JobParams private void failTask(JobTask jobTask, String reason) { String jobId = jobTask.getJobId(); auditor.error(jobId, reason); - JobTaskState failedState = new JobTaskState(JobState.FAILED, jobTask.getAllocationId(), reason); + JobTaskState failedState = new JobTaskState(JobState.FAILED, jobTask.getAllocationId(), reason, Instant.now()); jobTask.updatePersistentTaskState(failedState, ActionListener.wrap(r -> { logger.debug("[{}] updated task state to failed", jobId); stopAssociatedDatafeedForFailedJob(jobId); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java index 40e4f5d9ede78..7cdeeb3d559ec 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java @@ -25,6 +25,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; @@ -131,6 +132,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws InferModelAction.Request.DEFAULT_TIMEOUT_FOR_API ); inferRequest.setHighPriority(true); + inferRequest.setPrefixType(TrainedModelPrefixStrings.PrefixType.SEARCH); SetOnce textExpansionResultsSupplier = new SetOnce<>(); queryRewriteContext.registerAsyncAction((client, listener) -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatTrainedModelsAction.java index e2e096ae903ea..f53a82f54a67e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatTrainedModelsAction.java @@ -7,14 +7,15 @@ package org.elasticsearch.xpack.ml.rest.cat; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.GroupedActionListener; +import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.Scope; @@ -32,7 +33,6 @@ import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.security.user.InternalUsers; -import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -85,15 +85,23 @@ protected RestChannelConsumer doCatRequest(RestRequest restRequest, NodeClient c statsRequest.setAllowNoResources(true); modelsAction.setAllowNoResources(restRequest.paramAsBoolean(ALLOW_NO_MATCH.getPreferredName(), statsRequest.isAllowNoResources())); - return channel -> { - final ActionListener listener = ActionListener.notifyOnce(new RestResponseListener<>(channel) { - @Override - public RestResponse buildResponse(final Table table) throws Exception { - return RestTable.buildResponse(table, channel); - } - }); + return new RestChannelConsumer() { + @Override + public void accept(RestChannel channel) { + SubscribableListener.newForked(this::getTrainedModels).andThen(this::getDerivedData).addListener(newRestListener(channel)); + } + + private List trainedModelsStats; + private List dataFrameAnalytics; + + private void getTrainedModels(ActionListener listener) { + client.execute(GetTrainedModelsAction.INSTANCE, modelsAction, listener); + } - client.execute(GetTrainedModelsAction.INSTANCE, modelsAction, ActionListener.wrap(trainedModels -> { + private void getDerivedData( + ActionListener listener, + GetTrainedModelsAction.Response trainedModels + ) { final List trainedModelConfigs = trainedModels.getResources().results(); Set potentialAnalyticsIds = new HashSet<>(); @@ -105,28 +113,35 @@ public RestResponse buildResponse(final Table table) throws Exception { // Find the related DataFrameAnalyticsConfigs String requestIdPattern = Strings.collectionToDelimitedString(potentialAnalyticsIds, "*,") + "*"; - final GroupedActionListener groupedListener = createGroupedListener( - restRequest, - 2, - trainedModels.getResources().results(), - listener - ); + try (var listeners = new RefCountingListener(listener.map(ignored -> trainedModels))) { + client.execute( + GetTrainedModelsStatsAction.INSTANCE, + statsRequest, + listeners.acquire(response -> trainedModelsStats = response.getResources().results()) + ); - client.execute( - GetTrainedModelsStatsAction.INSTANCE, - statsRequest, - ActionListener.wrap(groupedListener::onResponse, groupedListener::onFailure) - ); + final var dataFrameAnalyticsRequest = new GetDataFrameAnalyticsAction.Request(requestIdPattern); + dataFrameAnalyticsRequest.setAllowNoResources(true); + dataFrameAnalyticsRequest.setPageParams(new PageParams(0, potentialAnalyticsIds.size())); + client.execute( + GetDataFrameAnalyticsAction.INSTANCE, + dataFrameAnalyticsRequest, + listeners.acquire(response -> dataFrameAnalytics = response.getResources().results()) + ); + } + } - GetDataFrameAnalyticsAction.Request dataFrameAnalyticsRequest = new GetDataFrameAnalyticsAction.Request(requestIdPattern); - dataFrameAnalyticsRequest.setAllowNoResources(true); - dataFrameAnalyticsRequest.setPageParams(new PageParams(0, potentialAnalyticsIds.size())); - client.execute( - GetDataFrameAnalyticsAction.INSTANCE, - dataFrameAnalyticsRequest, - ActionListener.wrap(groupedListener::onResponse, groupedListener::onFailure) - ); - }, listener::onFailure)); + private ActionListener newRestListener(RestChannel channel) { + return new RestResponseListener<>(channel) { + @Override + public RestResponse buildResponse(final GetTrainedModelsAction.Response trainedModels) throws Exception { + return RestTable.buildResponse( + buildTable(restRequest, trainedModelsStats, trainedModels.getResources().results(), dataFrameAnalytics), + channel + ); + } + }; + } }; } @@ -230,19 +245,6 @@ protected Table getTableWithHeader(RestRequest request) { return table; } - private GroupedActionListener createGroupedListener( - final RestRequest request, - final int size, - final List configs, - final ActionListener
listener - ) { - return new GroupedActionListener<>(size, listener.safeMap(responses -> { - GetTrainedModelsStatsAction.Response statsResponse = extractResponse(responses, GetTrainedModelsStatsAction.Response.class); - GetDataFrameAnalyticsAction.Response analytics = extractResponse(responses, GetDataFrameAnalyticsAction.Response.class); - return buildTable(request, statsResponse.getResources().results(), configs, analytics.getResources().results()); - })); - } - private Table buildTable( RestRequest request, List stats, @@ -302,9 +304,4 @@ private Table buildTable( }); return table; } - - @SuppressWarnings("unchecked") - private static A extractResponse(final Collection responses, Class c) { - return (A) responses.stream().filter(c::isInstance).findFirst().get(); - } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java index 2e780c9849bd5..72663b3f8a7bd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java @@ -20,6 +20,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; @@ -100,6 +101,7 @@ public void buildVector(Client client, ActionListener listener) { InferModelAction.Request.DEFAULT_TIMEOUT_FOR_API ); inferRequest.setHighPriority(true); + inferRequest.setPrefixType(TrainedModelPrefixStrings.PrefixType.SEARCH); executeAsyncWithOrigin(client, ML_ORIGIN, InferModelAction.INSTANCE, inferRequest, ActionListener.wrap(response -> { if (response.getInferenceResults().isEmpty()) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java index f6c5924db37f8..a7a9122c96606 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java @@ -147,7 +147,7 @@ public void testIsNodeSafeToShutdownGivenFailedTasks() { new OpenJobAction.JobParams("job-1"), new PersistentTasksCustomMetadata.Assignment("node-1", "test assignment") ); - tasksBuilder.updateTaskState(MlTasks.jobTaskId("job-1"), new JobTaskState(JobState.FAILED, 1, "testing")); + tasksBuilder.updateTaskState(MlTasks.jobTaskId("job-1"), new JobTaskState(JobState.FAILED, 1, "testing", Instant.now())); tasksBuilder.addTask( MlTasks.dataFrameAnalyticsTaskId("job-2"), MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME, @@ -156,7 +156,7 @@ public void testIsNodeSafeToShutdownGivenFailedTasks() { ); tasksBuilder.updateTaskState( MlTasks.dataFrameAnalyticsTaskId("job-2"), - new DataFrameAnalyticsTaskState(DataFrameAnalyticsState.FAILED, 2, "testing") + new DataFrameAnalyticsTaskState(DataFrameAnalyticsState.FAILED, 2, "testing", Instant.now()) ); tasksBuilder.addTask( MlTasks.snapshotUpgradeTaskId("job-3", "snapshot-3"), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java index c9a2482aac343..84c49ba95b522 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java @@ -141,6 +141,7 @@ public void testSetTrainedModelConfigFieldsFromPackagedModel() throws IOExceptio assertEquals(packageConfig.getDescription(), trainedModelConfig.getDescription()); assertEquals(packageConfig.getMetadata(), trainedModelConfig.getMetadata()); assertEquals(packageConfig.getTags(), trainedModelConfig.getTags()); + assertEquals(packageConfig.getPrefixStrings(), trainedModelConfig.getPrefixStrings()); // fully tested in {@link #testParseInferenceConfigFromModelPackage} assertNotNull(trainedModelConfig.getInferenceConfig()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsActionTests.java index 8e8b45ae10fd4..d1d3338ce14ba 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsActionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsTaskState; import org.elasticsearch.xpack.ml.action.TransportStopDataFrameAnalyticsAction.AnalyticsByTaskState; +import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -84,7 +85,7 @@ private static void addAnalyticsTask( if (state != null) { builder.updateTaskState( MlTasks.dataFrameAnalyticsTaskId(analyticsId), - new DataFrameAnalyticsTaskState(state, builder.getLastAllocationId(), null) + new DataFrameAnalyticsTaskState(state, builder.getLastAllocationId(), null, Instant.now()) ); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java index 0028c66dd9659..42deca32811b2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator; import java.net.InetAddress; +import java.time.Instant; import java.util.Collections; import java.util.List; import java.util.Map; @@ -178,7 +179,7 @@ public void testGetMemoryAndProcessorsScaleUpGivenAwaitingLazyAssignmentButFaile 1, AWAITING_LAZY_ASSIGNMENT ), - new JobTaskState(JobState.FAILED, 1, "a nasty bug") + new JobTaskState(JobState.FAILED, 1, "a nasty bug", Instant.now()) ) ), List.of(), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDeciderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDeciderTests.java index 2d9e19cbb3830..0b3851012d0e8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDeciderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDeciderTests.java @@ -48,6 +48,7 @@ import org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator; import org.junit.Before; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -1406,7 +1407,7 @@ public static void addAnalyticsTask( if (jobState != null) { builder.updateTaskState( MlTasks.dataFrameAnalyticsTaskId(jobId), - new DataFrameAnalyticsTaskState(jobState, builder.getLastAllocationId(), null) + new DataFrameAnalyticsTaskState(jobState, builder.getLastAllocationId(), null, Instant.now()) ); } } @@ -1419,7 +1420,10 @@ public static void addJobTask(String jobId, String nodeId, JobState jobState, Pe nodeId == null ? AWAITING_LAZY_ASSIGNMENT : new PersistentTasksCustomMetadata.Assignment(nodeId, "test assignment") ); if (jobState != null) { - builder.updateTaskState(MlTasks.jobTaskId(jobId), new JobTaskState(jobState, builder.getLastAllocationId(), null)); + builder.updateTaskState( + MlTasks.jobTaskId(jobId), + new JobTaskState(jobState, builder.getLastAllocationId(), null, Instant.now()) + ); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java index 9da54416ce066..69d8663478b36 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java @@ -43,6 +43,7 @@ import org.junit.Before; import java.net.InetAddress; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -441,7 +442,7 @@ public void testSelectNode_jobTaskStale() { PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder(); addJobTask(job.getId(), nodeId, JobState.OPENED, tasksBuilder); // Set to lower allocationId, so job task is stale: - tasksBuilder.updateTaskState(MlTasks.jobTaskId(job.getId()), new JobTaskState(JobState.OPENED, 0, null)); + tasksBuilder.updateTaskState(MlTasks.jobTaskId(job.getId()), new JobTaskState(JobState.OPENED, 0, null, Instant.now())); tasks = tasksBuilder.build(); givenClusterState("foo", 1, 0); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java index c2318c879328e..a2b7969210691 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java @@ -333,18 +333,20 @@ private void testSetFailed(boolean nodeShuttingDown) throws IOException { assertThat(parsedProgress.get().get(0), equalTo(new PhaseProgress("reindexing", 100))); } - verify(client).execute( - same(UpdatePersistentTaskStatusAction.INSTANCE), - eq( - new UpdatePersistentTaskStatusAction.Request( - "task-id", - 42, - new DataFrameAnalyticsTaskState(DataFrameAnalyticsState.FAILED, 42, "some exception") - ) - ), - any() + ArgumentCaptor captor = ArgumentCaptor.forClass( + UpdatePersistentTaskStatusAction.Request.class ); + + verify(client).execute(same(UpdatePersistentTaskStatusAction.INSTANCE), captor.capture(), any()); + + UpdatePersistentTaskStatusAction.Request request = captor.getValue(); + assertThat(request.getTaskId(), equalTo("task-id")); + DataFrameAnalyticsTaskState state = (DataFrameAnalyticsTaskState) request.getState(); + assertThat(state.getState(), equalTo(DataFrameAnalyticsState.FAILED)); + assertThat(state.getAllocationId(), equalTo(42L)); + assertThat(state.getReason(), equalTo("some exception")); } + verifyNoMoreInteractions(client, analyticsManager, auditor, taskManager); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java index 028c4b48ad355..6d963cae8159c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; import org.elasticsearch.xpack.ml.inference.pytorch.PriorityProcessWorkerExecutorService; import org.elasticsearch.xpack.ml.inference.pytorch.process.PyTorchProcessFactory; @@ -102,6 +103,7 @@ public void testRejectedExecution() { NlpInferenceInput.fromText("foo"), false, TimeValue.timeValueMinutes(1), + TrainedModelPrefixStrings.PrefixType.NONE, null, ActionListener.wrap(result -> fail("unexpected success"), e -> assertThat(e, instanceOf(EsRejectedExecutionException.class))) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/InferencePyTorchActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/InferencePyTorchActionTests.java index c937e9be24b01..4fa0876991e3b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/InferencePyTorchActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/InferencePyTorchActionTests.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.ml.inference.deployment; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceResults; @@ -20,19 +22,30 @@ import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.PassThroughConfig; +import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; +import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; +import org.elasticsearch.xpack.ml.inference.pytorch.process.PyTorchProcess; import org.elasticsearch.xpack.ml.inference.pytorch.process.PyTorchResultProcessor; import org.junit.After; import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.xpack.ml.MachineLearning.UTILITY_THREAD_POOL_NAME; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyList; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -79,6 +92,7 @@ public void testInferListenerOnlyCalledOnce() { processContext, new PassThroughConfig(null, null, null), NlpInferenceInput.fromText("foo"), + TrainedModelPrefixStrings.PrefixType.NONE, tp, null, listener @@ -100,6 +114,7 @@ public void testInferListenerOnlyCalledOnce() { processContext, new PassThroughConfig(null, null, null), NlpInferenceInput.fromText("foo"), + TrainedModelPrefixStrings.PrefixType.NONE, tp, null, listener @@ -122,6 +137,7 @@ public void testInferListenerOnlyCalledOnce() { processContext, new PassThroughConfig(null, null, null), NlpInferenceInput.fromText("foo"), + TrainedModelPrefixStrings.PrefixType.NONE, tp, null, listener @@ -153,6 +169,7 @@ public void testRunNotCalledAfterNotified() { processContext, new PassThroughConfig(null, null, null), NlpInferenceInput.fromText("foo"), + TrainedModelPrefixStrings.PrefixType.NONE, tp, null, listener @@ -171,6 +188,7 @@ public void testRunNotCalledAfterNotified() { processContext, new PassThroughConfig(null, null, null), NlpInferenceInput.fromText("foo"), + TrainedModelPrefixStrings.PrefixType.NONE, tp, null, listener @@ -214,6 +232,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, processContext, new PassThroughConfig(null, null, null), NlpInferenceInput.fromText("foo"), + TrainedModelPrefixStrings.PrefixType.NONE, tp, cancellableTask, listener @@ -227,6 +246,170 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, verify(resultProcessor, never()).registerRequest(anyString(), any()); } + @SuppressWarnings("unchecked") + public void testPrefixStrings() throws Exception { + DeploymentManager.ProcessContext processContext = mock(DeploymentManager.ProcessContext.class); + + TrainedModelPrefixStrings prefixStrings = new TrainedModelPrefixStrings("ingest_prefix: ", "search_prefix: "); + when(processContext.getPrefixStrings()).thenReturn(new SetOnce<>(prefixStrings)); + + TrainedModelInput modelInput = new TrainedModelInput(List.of("text_field")); + when(processContext.getModelInput()).thenReturn(new SetOnce<>(modelInput)); + + NlpTask.Processor nlpProcessor = mock(NlpTask.Processor.class); + NlpTask.RequestBuilder requestBuilder = mock(NlpTask.RequestBuilder.class); + when(nlpProcessor.getRequestBuilder(any())).thenReturn(requestBuilder); + + NlpTask.Request builtRequest = new NlpTask.Request(mock(TokenizationResult.class), mock(BytesReference.class)); + when(requestBuilder.buildRequest(anyList(), anyString(), any(), anyInt())).thenReturn(builtRequest); + + when(processContext.getNlpTaskProcessor()).thenReturn(new SetOnce<>(nlpProcessor)); + PyTorchResultProcessor resultProcessor = new PyTorchResultProcessor("1", threadSettings -> {}); + + PyTorchProcess pyTorchProcess = mock(PyTorchProcess.class); + when(processContext.getProcess()).thenReturn(new SetOnce<>(pyTorchProcess)); + + when(processContext.getResultProcessor()).thenReturn(resultProcessor); + AtomicInteger timeoutCount = new AtomicInteger(); + when(processContext.getTimeoutCount()).thenReturn(timeoutCount); + + TestListenerCounter listener = new TestListenerCounter(); + { + // test for search prefix + InferencePyTorchAction action = new InferencePyTorchAction( + "test-model", + 1, + TimeValue.MAX_VALUE, + processContext, + new PassThroughConfig(null, null, null), + NlpInferenceInput.fromText("foo"), + TrainedModelPrefixStrings.PrefixType.SEARCH, + tp, + null, + listener + ); + action.init(); + action.doRun(); + + ArgumentCaptor> inputsCapture = ArgumentCaptor.forClass(List.class); + verify(nlpProcessor).validateInputs(inputsCapture.capture()); + + assertThat(inputsCapture.getValue(), contains("search_prefix: foo")); + } + { + // Clear the previously verified invocations on this mock. + // Using this function is slightly controversal as it is + // not recommended by Mockito however, it does save a lot + // of code rebuilding the mocks for each test. + Mockito.clearInvocations(nlpProcessor); + // test for ingest prefix + InferencePyTorchAction action = new InferencePyTorchAction( + "test-model", + 1, + TimeValue.MAX_VALUE, + processContext, + new PassThroughConfig(null, null, null), + NlpInferenceInput.fromText("foo"), + TrainedModelPrefixStrings.PrefixType.INGEST, + tp, + null, + listener + ); + action.init(); + action.doRun(); + + ArgumentCaptor> inputsCapture = ArgumentCaptor.forClass(List.class); + verify(nlpProcessor).validateInputs(inputsCapture.capture()); + + assertThat(inputsCapture.getValue(), contains("ingest_prefix: foo")); + } + { + Mockito.clearInvocations(nlpProcessor); + // test no prefix + InferencePyTorchAction action = new InferencePyTorchAction( + "test-model", + 1, + TimeValue.MAX_VALUE, + processContext, + new PassThroughConfig(null, null, null), + NlpInferenceInput.fromText("foo"), + TrainedModelPrefixStrings.PrefixType.NONE, + tp, + null, + listener + ); + action.init(); + action.doRun(); + + ArgumentCaptor> inputsCapture = ArgumentCaptor.forClass(List.class); + verify(nlpProcessor).validateInputs(inputsCapture.capture()); + + assertThat(inputsCapture.getValue(), contains("foo")); + } + { + // test search only prefix + TrainedModelPrefixStrings searchOnlyPrefix = new TrainedModelPrefixStrings(null, "search_prefix: "); + when(processContext.getPrefixStrings()).thenReturn(new SetOnce<>(searchOnlyPrefix)); + boolean isForSearch = randomBoolean(); + + Mockito.clearInvocations(nlpProcessor); + InferencePyTorchAction action = new InferencePyTorchAction( + "test-model", + 1, + TimeValue.MAX_VALUE, + processContext, + new PassThroughConfig(null, null, null), + NlpInferenceInput.fromText("foo"), + isForSearch ? TrainedModelPrefixStrings.PrefixType.SEARCH : TrainedModelPrefixStrings.PrefixType.INGEST, + tp, + null, + listener + ); + action.init(); + action.doRun(); + + ArgumentCaptor> inputsCapture = ArgumentCaptor.forClass(List.class); + verify(nlpProcessor).validateInputs(inputsCapture.capture()); + + if (isForSearch) { + assertThat(inputsCapture.getValue(), contains("search_prefix: foo")); + } else { + assertThat(inputsCapture.getValue(), contains("foo")); + } + } + { + // test ingest only prefix + TrainedModelPrefixStrings ingestOnlyPrefix = new TrainedModelPrefixStrings("ingest_prefix: ", null); + when(processContext.getPrefixStrings()).thenReturn(new SetOnce<>(ingestOnlyPrefix)); + boolean isForSearch = randomBoolean(); + + Mockito.clearInvocations(nlpProcessor); + InferencePyTorchAction action = new InferencePyTorchAction( + "test-model", + 1, + TimeValue.MAX_VALUE, + processContext, + new PassThroughConfig(null, null, null), + NlpInferenceInput.fromText("foo"), + isForSearch ? TrainedModelPrefixStrings.PrefixType.SEARCH : TrainedModelPrefixStrings.PrefixType.INGEST, + tp, + null, + listener + ); + action.init(); + action.doRun(); + + ArgumentCaptor> inputsCapture = ArgumentCaptor.forClass(List.class); + verify(nlpProcessor).validateInputs(inputsCapture.capture()); + + if (isForSearch) { + assertThat(inputsCapture.getValue(), contains("foo")); + } else { + assertThat(inputsCapture.getValue(), contains("ingest_prefix: foo")); + } + } + } + static class TestListenerCounter implements ActionListener { private int responseCounts; private int failureCounts; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java index 88dcc2ba5d697..4821efa29631f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.ingest.TestIngestDocument; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.ClassificationFeatureImportance; import org.elasticsearch.xpack.core.ml.inference.results.ClassificationInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.RegressionFeatureImportance; @@ -306,6 +307,7 @@ public void testGenerateRequestWithEmptyMapping() { var request = processor.buildRequest(document); assertThat(request.getObjectsToInfer().get(0), equalTo(source)); assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST, request.getInferenceTimeout()); + assertEquals(TrainedModelPrefixStrings.PrefixType.INGEST, request.getPrefixType()); Map ingestMetadata = Collections.singletonMap("_value", 3); document = TestIngestDocument.ofIngestWithNullableVersion(source, ingestMetadata); @@ -316,6 +318,7 @@ public void testGenerateRequestWithEmptyMapping() { request = processor.buildRequest(document); assertThat(request.getObjectsToInfer().get(0), equalTo(expected)); assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST, request.getInferenceTimeout()); + assertEquals(TrainedModelPrefixStrings.PrefixType.INGEST, request.getPrefixType()); } public void testGenerateWithMapping() { @@ -354,6 +357,7 @@ public void testGenerateWithMapping() { var request = processor.buildRequest(document); assertThat(request.getObjectsToInfer().get(0), equalTo(expectedMap)); assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST, request.getInferenceTimeout()); + assertEquals(TrainedModelPrefixStrings.PrefixType.INGEST, request.getPrefixType()); Map ingestMetadata = Collections.singletonMap("_value", "baz"); document = TestIngestDocument.ofIngestWithNullableVersion(source, ingestMetadata); @@ -363,6 +367,7 @@ public void testGenerateWithMapping() { request = processor.buildRequest(document); assertThat(request.getObjectsToInfer().get(0), equalTo(expectedMap)); assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST, request.getInferenceTimeout()); + assertEquals(TrainedModelPrefixStrings.PrefixType.INGEST, request.getPrefixType()); } public void testGenerateWithMappingNestedFields() { @@ -607,6 +612,7 @@ public void testBuildRequestWithInputFields() { var requestInputs = request.getTextInput(); assertThat(requestInputs, contains("body_text", "title_text")); assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST, request.getInferenceTimeout()); + assertEquals(TrainedModelPrefixStrings.PrefixType.INGEST, request.getPrefixType()); } public void testBuildRequestWithInputFields_WrongType() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java index 92dc9a9a749cf..112a8c80b0483 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java @@ -38,6 +38,7 @@ import org.junit.Before; import java.net.InetAddress; +import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -1429,7 +1430,7 @@ static void addDataFrameAnalyticsJobTask( if (state != null) { builder.updateTaskState( MlTasks.dataFrameAnalyticsTaskId(id), - new DataFrameAnalyticsTaskState(state, builder.getLastAllocationId() - (isStale ? 1 : 0), null) + new DataFrameAnalyticsTaskState(state, builder.getLastAllocationId() - (isStale ? 1 : 0), null, Instant.now()) ); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTaskStateTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTaskStateTests.java index 8ba8a15cf66d0..db52e00b8b9ea 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTaskStateTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTaskStateTests.java @@ -16,7 +16,7 @@ public class JobTaskStateTests extends AbstractXContentSerializingTestCase {}); assertEquals(1, manager.numberOfOpenJobs()); assertTrue(manager.jobHasActiveAutodetectProcess(jobTask)); - verify(jobTask).updatePersistentTaskState(eq(new JobTaskState(JobState.OPENED, 1L, null)), any()); + ArgumentCaptor captor = ArgumentCaptor.forClass(JobTaskState.class); + verify(jobTask).updatePersistentTaskState(captor.capture(), any()); + JobTaskState state = captor.getValue(); + assertThat(state.getState(), equalTo(JobState.OPENED)); + assertThat(state.getAllocationId(), equalTo(1L)); + assertNull(state.getReason()); } public void testOpenJob_withoutVersion() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java index 0b563a8a08107..c3db184759d3f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java @@ -64,6 +64,7 @@ import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.junit.Before; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -238,7 +239,7 @@ public static void addJobTask( if (jobState != null) { builder.updateTaskState( MlTasks.jobTaskId(jobId), - new JobTaskState(jobState, builder.getLastAllocationId() - (isStale ? 1 : 0), null) + new JobTaskState(jobState, builder.getLastAllocationId() - (isStale ? 1 : 0), null, Instant.now()) ); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java index a329a55d8afe9..d8edea137330f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.ml.MachineLearning; @@ -78,6 +79,7 @@ protected boolean canSimulateMethod(Method method, Object[] args) throws NoSuchM protected Object simulateMethod(Method method, Object[] args) { InferModelAction.Request request = (InferModelAction.Request) args[1]; assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_API, request.getInferenceTimeout()); + assertEquals(TrainedModelPrefixStrings.PrefixType.SEARCH, request.getPrefixType()); // Randomisation cannot be used here as {@code #doAssertLuceneQuery} // asserts that 2 rewritten queries are the same diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java index e9a89a81f62e2..ed41042913421 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java @@ -12,6 +12,9 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -36,6 +39,7 @@ import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.license.LicenseSettings; import org.elasticsearch.persistent.PersistentTasksClusterService; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.script.IngestScript; @@ -76,6 +80,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; +import org.elasticsearch.xpack.core.ml.utils.MlTaskState; import org.elasticsearch.xpack.ilm.IndexLifecycle; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.MachineLearning; @@ -86,6 +91,8 @@ import org.junit.After; import org.junit.Before; +import java.time.Duration; +import java.time.Instant; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -104,6 +111,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -507,6 +515,31 @@ protected String awaitJobOpenedAndAssigned(String jobId, String queryNode) throw return jobNode.get(); } + protected void assertRecentLastTaskStateChangeTime(String taskId, Duration howRecent, String queryNode) { + ClusterStateRequest csRequest = new ClusterStateRequest().clear().metadata(true); + ClusterStateResponse csResponse = client(queryNode).execute(ClusterStateAction.INSTANCE, csRequest).actionGet(); + PersistentTasksCustomMetadata tasks = csResponse.getState().getMetadata().custom(PersistentTasksCustomMetadata.TYPE); + assertNotNull(tasks); + PersistentTasksCustomMetadata.PersistentTask task = tasks.getTask(taskId); + assertNotNull(task); + assertThat(task.getState(), instanceOf(MlTaskState.class)); + MlTaskState state = (MlTaskState) task.getState(); + assertNotNull(state.getLastStateChangeTime()); + Instant now = Instant.now(); + assertTrue( + "[" + + taskId + + " has last state change time [" + + state.getLastStateChangeTime() + + "] that is more than [" + + howRecent + + "] behind current time [" + + now + + "]", + state.getLastStateChangeTime().isAfter(now.minus(howRecent)) + ); + } + /** * Sets delayed allocation to 0 to make sure we have tests are not delayed */ diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java index 2c83777487685..8506be491f7e1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.test.AbstractQueryVectorBuilderTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.ml.MachineLearning; @@ -39,6 +40,7 @@ protected void doAssertClientRequest(ActionRequest request, TextEmbeddingQueryVe assertEquals(builder.getModelText(), inferRequest.getTextInput().get(0)); assertEquals(builder.getModelId(), inferRequest.getId()); assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_API, inferRequest.getInferenceTimeout()); + assertEquals(TrainedModelPrefixStrings.PrefixType.SEARCH, inferRequest.getPrefixType()); } public ActionResponse createResponse(float[] array, TextEmbeddingQueryVectorBuilder builder) { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java index 571fe6bd803fc..2a949894f3033 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java @@ -33,6 +33,8 @@ import java.util.Locale; import java.util.Map; +import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.isDataStreamsLifecycleOnlyMode; + /** * Creates all index-templates and ILM policies that are required for using Elastic Universal Profiling. */ diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java index 7203b45e86efa..b56cd28e9dc6c 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java @@ -506,13 +506,12 @@ private void prewarmCache(ActionListener listener, Supplier cance CachedBlobContainerIndexInput cachedIndexInput = (CachedBlobContainerIndexInput) input; final AtomicBoolean alreadyCached = new AtomicBoolean(); - try (var fileListener = new RefCountingListener(ActionListener.runBefore(completionListener.acquire().map(v -> { + try (var fileListener = new RefCountingListener(ActionListener.runBefore(completionListener.acquire(v -> { if (alreadyCached.get()) { recoveryState.markIndexFileAsReused(file.physicalName()); } else { recoveryState.getIndex().addRecoveredFromSnapshotBytesToFile(file.physicalName(), file.length()); } - return v; }), () -> IOUtils.closeWhileHandlingException(cachedIndexInput)))) { if (cachedIndexInput.getPersistentCacheInitialLength() == file.length()) { alreadyCached.set(true); @@ -527,7 +526,7 @@ private void prewarmCache(ActionListener listener, Supplier cance for (int p = 0; p < file.numberOfParts(); p++) { final int part = p; - prewarmTaskRunner.enqueueTask(fileListener.acquire().map(releasable -> { + prewarmTaskRunner.enqueueTask(fileListener.acquire(releasable -> { try (releasable) { var fileName = file.physicalName(); final long startTimeInNanos = statsCurrentTimeNanosSupplier.getAsLong(); @@ -543,7 +542,6 @@ private void prewarmCache(ActionListener listener, Supplier cance prefetchedPartBytes ); } - return null; } })); } diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 285f4f8f99039..b77fc542a7af4 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -302,6 +302,7 @@ public class Constants { "cluster:monitor/update/health/info", "cluster:monitor/ingest/geoip/stats", "cluster:monitor/main", + "cluster:monitor/nodes/data_tier_usage", "cluster:monitor/nodes/hot_threads", "cluster:monitor/nodes/info", "cluster:monitor/nodes/stats", diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleDownsamplingSecurityIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleDownsamplingSecurityIT.java index 4b7a9f46431a5..7cdc91b83afaf 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleDownsamplingSecurityIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleDownsamplingSecurityIT.java @@ -336,16 +336,12 @@ private void putComposableIndexTemplate( ) { PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); request.indexTemplate( - new ComposableIndexTemplate( - patterns, - new Template(settings, mappings, null, lifecycle), - null, - null, - null, - metadata, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(new Template(settings, mappings, null, lifecycle)) + .metadata(metadata) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client.execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } @@ -440,15 +436,11 @@ public Collection getSystemDataStreamDescriptors() { SYSTEM_DATA_STREAM_NAME, "a system data stream for testing", SystemDataStreamDescriptor.Type.EXTERNAL, - new ComposableIndexTemplate( - List.of(SYSTEM_DATA_STREAM_NAME), - new Template(settings.build(), getTSDBMappings(), null, LIFECYCLE), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ), + ComposableIndexTemplate.builder() + .indexPatterns(List.of(SYSTEM_DATA_STREAM_NAME)) + .template(new Template(settings.build(), getTSDBMappings(), null, LIFECYCLE)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(), Map.of(), Collections.singletonList("test"), new ExecutorNames( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleServiceRuntimeSecurityIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleServiceRuntimeSecurityIT.java index 93cd03060842b..fbb2832461a7c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleServiceRuntimeSecurityIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleServiceRuntimeSecurityIT.java @@ -217,16 +217,12 @@ private static void putComposableIndexTemplate( ) throws IOException { PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); request.indexTemplate( - new ComposableIndexTemplate( - patterns, - new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle), - null, - null, - null, - metadata, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle)) + .metadata(metadata) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } @@ -260,15 +256,11 @@ public Collection getSystemDataStreamDescriptors() { SYSTEM_DATA_STREAM_NAME, "a system data stream for testing", SystemDataStreamDescriptor.Type.EXTERNAL, - new ComposableIndexTemplate( - List.of(SYSTEM_DATA_STREAM_NAME), - new Template(Settings.EMPTY, null, null, DataStreamLifecycle.newBuilder().dataRetention(0).build()), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate() - ), + ComposableIndexTemplate.builder() + .indexPatterns(List.of(SYSTEM_DATA_STREAM_NAME)) + .template(new Template(Settings.EMPTY, null, null, DataStreamLifecycle.newBuilder().dataRetention(0).build())) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(), Map.of(), Collections.singletonList("test"), new ExecutorNames(ThreadPool.Names.SYSTEM_CRITICAL_READ, ThreadPool.Names.SYSTEM_READ, ThreadPool.Names.SYSTEM_WRITE) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java index 58d33fc221b21..1e67ae572e4ff 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java @@ -60,16 +60,10 @@ public void testRemoveGhostReference() throws Exception { var putTemplateRequest = new PutComposableIndexTemplateAction.Request("id"); putTemplateRequest.indexTemplate( - new ComposableIndexTemplate( - List.of("logs-*"), - null, - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs-*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); assertAcked(client.execute(PutComposableIndexTemplateAction.INSTANCE, putTemplateRequest).actionGet()); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java index 8420f8fd8d481..d37e9a7f1aa3f 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java @@ -255,16 +255,11 @@ public void testSearchResolveDataStreams() throws Exception { private void putComposableIndexTemplate(String id, List patterns) throws IOException { PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); request.indexTemplate( - new ComposableIndexTemplate( - patterns, - new Template(null, null, null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(patterns) + .template(new Template(null, null, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ); client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java index 4705361e51dbd..d3e1f736c1267 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java @@ -125,22 +125,22 @@ public void testSecurityIndexSettingsCannotBeChanged() throws Exception { ) ); // create an new-style template - ComposableIndexTemplate cit = new ComposableIndexTemplate( - securityIndexNames, - new Template( - Settings.builder() - .put("index.refresh_interval", "1234s") - .put("index.priority", "9876") - .put("index.number_of_replicas", "8") - .build(), - null, - null - ), - null, - 4L, - 5L, - null - ); + ComposableIndexTemplate cit = ComposableIndexTemplate.builder() + .indexPatterns(securityIndexNames) + .template( + new Template( + Settings.builder() + .put("index.refresh_interval", "1234s") + .put("index.priority", "9876") + .put("index.number_of_replicas", "8") + .build(), + null, + null + ) + ) + .priority(4L) + .version(5L) + .build(); assertAcked( client().execute( PutComposableIndexTemplateAction.INSTANCE, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java index 56a102be9587f..3fa5e0e5319c7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java @@ -8,13 +8,13 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.core.Nullable; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; @@ -113,109 +113,75 @@ protected void masterOperation( OperatorPrivileges.OPERATOR_PRIVILEGES_ENABLED.get(settings) ); - final AtomicReference> rolesUsageRef = new AtomicReference<>(); - final AtomicReference> roleMappingUsageRef = new AtomicReference<>(); - final AtomicReference> realmsUsageRef = new AtomicReference<>(); - final AtomicReference> domainsUsageRef = new AtomicReference<>(); - final AtomicReference> userProfileUsageRef = new AtomicReference<>(); - final AtomicReference> remoteClusterServerUsageRef = new AtomicReference<>(); + final AtomicReference> rolesUsageRef = new AtomicReference<>(Map.of()); + final AtomicReference> roleMappingUsageRef = new AtomicReference<>(Map.of()); + final AtomicReference> realmsUsageRef = new AtomicReference<>(Map.of()); + final AtomicReference> domainsUsageRef = new AtomicReference<>(Map.of()); + final AtomicReference> userProfileUsageRef = new AtomicReference<>(Map.of()); + final AtomicReference> remoteClusterServerUsageRef = new AtomicReference<>(Map.of()); final boolean enabled = XPackSettings.SECURITY_ENABLED.get(settings); - final CountDown countDown = new CountDown(5); - final Runnable doCountDown = () -> { - if (countDown.countDown()) { - var usage = new SecurityFeatureSetUsage( - enabled, - realmsUsageRef.get(), - rolesUsageRef.get(), - roleMappingUsageRef.get(), - sslUsage, - auditUsage, - ipFilterUsage, - anonymousUsage, - tokenServiceUsage, - apiKeyServiceUsage, - fips140Usage, - operatorPrivilegesUsage, - domainsUsageRef.get(), - userProfileUsageRef.get(), - remoteClusterServerUsageRef.get() - ); - listener.onResponse(new XPackUsageFeatureResponse(usage)); - } - }; - - final ActionListener> rolesStoreUsageListener = ActionListener.wrap(rolesStoreUsage -> { - rolesUsageRef.set(rolesStoreUsage); - doCountDown.run(); - }, listener::onFailure); - - final ActionListener> roleMappingStoreUsageListener = ActionListener.wrap(nativeRoleMappingStoreUsage -> { - Map usage = singletonMap("native", nativeRoleMappingStoreUsage); - roleMappingUsageRef.set(usage); - doCountDown.run(); - }, listener::onFailure); - - final ActionListener> realmsUsageListener = ActionListener.wrap(realmsUsage -> { - realmsUsageRef.set(realmsUsage); - doCountDown.run(); - }, listener::onFailure); - - final ActionListener> userProfileUsageListener = ActionListener.wrap(userProfileUsage -> { - userProfileUsageRef.set(userProfileUsage); - doCountDown.run(); - }, listener::onFailure); - - final ActionListener> remoteClusterServerUsageListener = ActionListener.wrap(remoteClusterServerUsage -> { - remoteClusterServerUsageRef.set(remoteClusterServerUsage); - doCountDown.run(); - }, listener::onFailure); - - if (rolesStore == null || enabled == false) { - rolesStoreUsageListener.onResponse(Collections.emptyMap()); - } else { - rolesStore.usageStats(rolesStoreUsageListener); - } - if (roleMappingStore == null || enabled == false) { - roleMappingStoreUsageListener.onResponse(Collections.emptyMap()); - } else { - roleMappingStore.usageStats(roleMappingStoreUsageListener); - } - if (realms == null || enabled == false) { - domainsUsageRef.set(Map.of()); - realmsUsageListener.onResponse(Collections.emptyMap()); - } else { - domainsUsageRef.set(realms.domainUsageStats()); - realms.usageStats(realmsUsageListener); - } - if (profileService == null || enabled == false) { - userProfileUsageListener.onResponse(Map.of()); - } else { - profileService.usageStats(userProfileUsageListener); - } - if (apiKeyService == null || enabled == false) { - remoteClusterServerUsageListener.onResponse(Map.of()); - } else { - remoteClusterServerUsage(remoteClusterServerUsageListener); - } - } - private void remoteClusterServerUsage(ActionListener> listener) { - apiKeyService.crossClusterApiKeyUsageStats( - ActionListener.wrap( - usage -> listener.onResponse( - Map.of( - "available", - ADVANCED_REMOTE_CLUSTER_SECURITY_FEATURE.checkWithoutTracking(licenseState), - "enabled", - RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.get(settings), - "api_keys", - usage + try ( + var listeners = new RefCountingListener( + listener.map( + ignored -> new XPackUsageFeatureResponse( + new SecurityFeatureSetUsage( + enabled, + realmsUsageRef.get(), + rolesUsageRef.get(), + roleMappingUsageRef.get(), + sslUsage, + auditUsage, + ipFilterUsage, + anonymousUsage, + tokenServiceUsage, + apiKeyServiceUsage, + fips140Usage, + operatorPrivilegesUsage, + domainsUsageRef.get(), + userProfileUsageRef.get(), + remoteClusterServerUsageRef.get() + ) ) - ), - listener::onFailure + ) ) - ); + ) { + if (enabled == false) { + return; + } + if (rolesStore != null) { + rolesStore.usageStats(listeners.acquire(rolesUsageRef::set)); + } + if (roleMappingStore != null) { + roleMappingStore.usageStats( + listeners.acquire(nativeRoleMappingStoreUsage -> roleMappingUsageRef.set(Map.of("native", nativeRoleMappingStoreUsage))) + ); + } + if (realms != null) { + domainsUsageRef.set(realms.domainUsageStats()); + realms.usageStats(listeners.acquire(realmsUsageRef::set)); + } + if (profileService != null) { + profileService.usageStats(listeners.acquire(userProfileUsageRef::set)); + } + if (apiKeyService != null) { + apiKeyService.crossClusterApiKeyUsageStats( + listeners.acquire( + usage -> remoteClusterServerUsageRef.set( + Map.of( + "available", + ADVANCED_REMOTE_CLUSTER_SECURITY_FEATURE.checkWithoutTracking(licenseState), + "enabled", + RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.get(settings), + "api_keys", + usage + ) + ) + ) + ); + } + } } static Map sslUsage(Settings settings) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java index 0e509c8af26b0..e2e1cf1511211 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java @@ -11,7 +11,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.GroupedActionListener; @@ -57,6 +60,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -368,50 +372,79 @@ public void putPrivileges( WriteRequest.RefreshPolicy refreshPolicy, ActionListener>> listener ) { - securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { - ActionListener groupListener = new GroupedActionListener<>( - privileges.size(), - ActionListener.wrap((Collection responses) -> { - final Map> createdNames = responses.stream() - .filter(r -> r.getResult() == DocWriteResponse.Result.CREATED) - .map(r -> r.getId()) - .map(NativePrivilegeStore::nameFromDocId) - .collect(TUPLES_TO_MAP); - clearCaches( - listener, - privileges.stream().map(ApplicationPrivilegeDescriptor::getApplication).collect(Collectors.toUnmodifiableSet()), - createdNames - ); - }, listener::onFailure) - ); + if (privileges.isEmpty()) { + listener.onResponse(Map.of()); + return; + } + + final BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + bulkRequestBuilder.setRefreshPolicy(refreshPolicy); + + try { for (ApplicationPrivilegeDescriptor privilege : privileges) { - innerPutPrivilege(privilege, refreshPolicy, groupListener); + bulkRequestBuilder.add(preparePutPrivilege(privilege)); } + } catch (IOException e) { + listener.onFailure(e); + } + + securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + ClientHelper.executeAsyncWithOrigin( + client.threadPool().getThreadContext(), + SECURITY_ORIGIN, + bulkRequestBuilder.request(), + ActionListener.wrap(bulkResponse -> handleBulkResponse(bulkResponse, listener), ex -> { + logger.warn(Strings.format("Failed to write application privileges to %s", securityIndexManager.aliasName()), ex); + listener.onFailure(ex); + }), + client::bulk + ); }); } - private void innerPutPrivilege( - ApplicationPrivilegeDescriptor privilege, - WriteRequest.RefreshPolicy refreshPolicy, - ActionListener listener - ) { + private IndexRequest preparePutPrivilege(ApplicationPrivilegeDescriptor privilege) throws IOException { try { final String name = privilege.getName(); final XContentBuilder xContentBuilder = privilege.toXContent(jsonBuilder(), true); - ClientHelper.executeAsyncWithOrigin( - client.threadPool().getThreadContext(), - SECURITY_ORIGIN, - client.prepareIndex(SECURITY_MAIN_ALIAS) - .setId(toDocId(privilege.getApplication(), name)) - .setSource(xContentBuilder) - .setRefreshPolicy(refreshPolicy) - .request(), - listener, - client::index - ); - } catch (Exception e) { - logger.warn("Failed to put privilege {} - {}", Strings.toString(privilege), e.toString()); - listener.onFailure(e); + return client.prepareIndex(SECURITY_MAIN_ALIAS) + .setId(toDocId(privilege.getApplication(), name)) + .setSource(xContentBuilder) + .request(); + } catch (IOException e) { + logger.warn("Failed to build application privilege {} - {}", Strings.toString(privilege), e.toString()); + throw e; + } + } + + private void handleBulkResponse(BulkResponse bulkResponse, ActionListener>> listener) { + ElasticsearchException failure = null; + final Map> createdPrivilegesByAppName = new HashMap<>(); + for (var item : bulkResponse.getItems()) { + if (item.isFailed()) { + if (failure == null) { + failure = new ElasticsearchException("Failed to put application privileges", item.getFailure().getCause()); + } else { + failure.addSuppressed(item.getFailure().getCause()); + } + } else { + if (item.getResponse().getResult() == DocWriteResponse.Result.CREATED) { + final Tuple name = nameFromDocId(item.getId()); + final String appName = name.v1(); + final String privilegeName = name.v2(); + + List createdPrivileges = createdPrivilegesByAppName.get(appName); + if (createdPrivileges == null) { + createdPrivileges = new ArrayList<>(); + createdPrivilegesByAppName.put(appName, createdPrivileges); + } + createdPrivileges.add(privilegeName); + } + } + } + if (failure != null) { + listener.onFailure(failure); + } else { + clearCaches(listener, createdPrivilegesByAppName.keySet(), createdPrivilegesByAppName); } } @@ -465,7 +498,7 @@ public void onFailure(Exception e) { logger.error("unable to clear application privileges and role cache", e); listener.onFailure( new ElasticsearchException( - "clearing the application privileges and role cache failed. " + "please clear the caches manually", + "clearing the application privileges and role cache failed, please clear the caches manually", e ) ); @@ -473,6 +506,9 @@ public void onFailure(Exception e) { }); } + /** + * @return A Tuple of (application-name, privilege-name) + */ private static Tuple nameFromDocId(String docId) { final String name = docId.substring(DOC_TYPE_VALUE.length() + 1); assert name != null && name.length() > 0 : "Invalid name '" + name + "'"; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index ecc69e957d8ba..0d8c44964b01f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -11,6 +11,10 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; @@ -748,15 +752,20 @@ public void testPutPrivileges() throws Exception { final PlainActionFuture>> putPrivilegeFuture = new PlainActionFuture<>(); store.putPrivileges(putPrivileges, WriteRequest.RefreshPolicy.IMMEDIATE, putPrivilegeFuture); - assertThat(requests, iterableWithSize(putPrivileges.size())); - assertThat(requests, everyItem(instanceOf(IndexRequest.class))); + assertThat(requests, iterableWithSize(1)); + assertThat(requests, everyItem(instanceOf(BulkRequest.class))); - final List indexRequests = new ArrayList<>(requests.size()); - requests.stream().map(IndexRequest.class::cast).forEach(indexRequests::add); + final BulkRequest bulkRequest = (BulkRequest) requests.get(0); requests.clear(); - final ActionListener indexListener = listener.get(); + assertThat(bulkRequest.requests(), iterableWithSize(putPrivileges.size())); + assertThat(bulkRequest.requests(), everyItem(instanceOf(IndexRequest.class))); + + final List indexRequests = new ArrayList<>(putPrivileges.size()); + bulkRequest.requests().stream().map(IndexRequest.class::cast).forEach(indexRequests::add); + final String uuid = UUIDs.randomBase64UUID(random()); + final BulkItemResponse[] responses = new BulkItemResponse[putPrivileges.size()]; for (int i = 0; i < putPrivileges.size(); i++) { ApplicationPrivilegeDescriptor privilege = putPrivileges.get(i); IndexRequest request = indexRequests.get(i); @@ -765,11 +774,15 @@ public void testPutPrivileges() throws Exception { final XContentBuilder builder = privilege.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), true); assertThat(request.source(), equalTo(BytesReference.bytes(builder))); final boolean created = privilege.getName().equals("user") == false; - indexListener.onResponse( + responses[i] = BulkItemResponse.success( + i, + created ? DocWriteRequest.OpType.CREATE : DocWriteRequest.OpType.UPDATE, new IndexResponse(new ShardId(SecuritySystemIndices.SECURITY_MAIN_ALIAS, uuid, i), request.id(), 1, 1, 1, created) ); } + listener.get().onResponse(new BulkResponse(responses, randomLongBetween(1, 1_000))); + assertBusy(() -> assertFalse(requests.isEmpty()), 1, TimeUnit.SECONDS); assertThat(requests, iterableWithSize(1)); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/SpatialPlugin.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/SpatialPlugin.java index 7f171230e7628..7c2bc504eb768 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/SpatialPlugin.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/SpatialPlugin.java @@ -46,6 +46,8 @@ import org.elasticsearch.xpack.spatial.action.SpatialStatsTransportAction; import org.elasticsearch.xpack.spatial.action.SpatialUsageTransportAction; import org.elasticsearch.xpack.spatial.common.CartesianBoundingBox; +import org.elasticsearch.xpack.spatial.index.fielddata.CartesianShapeValues; +import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; import org.elasticsearch.xpack.spatial.index.mapper.GeoShapeScriptFieldType; import org.elasticsearch.xpack.spatial.index.mapper.GeoShapeWithDocValuesFieldMapper; import org.elasticsearch.xpack.spatial.index.mapper.PointFieldMapper; @@ -201,7 +203,11 @@ public Map getProcessors(Processor.Parameters paramet @Override public List getGenericNamedWriteables() { - return List.of(new GenericNamedWriteableSpec("CartesianBoundingBox", CartesianBoundingBox::new)); + return List.of( + new GenericNamedWriteableSpec("CartesianBoundingBox", CartesianBoundingBox::new), + new GenericNamedWriteableSpec("GeoShapeValue", GeoShapeValues.GeoShapeValue::new), + new GenericNamedWriteableSpec("CartesianShapeValue", CartesianShapeValues.CartesianShapeValue::new) + ); } private static void registerGeoShapeBoundsAggregator(ValuesSourceRegistry.Builder builder) { diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java index 2751dca2ef891..21b5b7934e42e 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java @@ -10,6 +10,7 @@ import org.apache.lucene.geo.Component2D; import org.apache.lucene.geo.XYGeometry; import org.apache.lucene.geo.XYPoint; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.StandardValidator; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -66,6 +67,11 @@ public CartesianShapeValue() { super(CoordinateEncoder.CARTESIAN, CartesianPoint::new); } + public CartesianShapeValue(StreamInput in) throws IOException { + this(); + this.reset(in); + } + @Override protected Component2D centroidAsComponent2D() throws IOException { return XYGeometry.create(new XYPoint((float) getX(), (float) getY())); @@ -80,5 +86,10 @@ protected Component2D centroidAsComponent2D() throws IOException { public GeoRelation relate(XYGeometry geometry) throws IOException { return relate(XYGeometry.create(geometry)); } + + @Override + public String getWriteableName() { + return "CartesianShapeValue"; + } } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java index b0bacf7d295f8..799ff035da73f 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java @@ -12,6 +12,7 @@ import org.apache.lucene.geo.Point; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.geometry.utils.GeographyValidator; import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.index.mapper.GeoShapeIndexer; @@ -69,6 +70,11 @@ public GeoShapeValue() { this.tile2DVisitor = new Tile2DVisitor(); } + public GeoShapeValue(StreamInput in) throws IOException { + this(); + reset(in); + } + @Override protected Component2D centroidAsComponent2D() throws IOException { return LatLonGeometry.create(new Point(getY(), getX())); @@ -93,5 +99,10 @@ public GeoRelation relate(int minX, int maxX, int minY, int maxY) throws IOExcep public GeoRelation relate(LatLonGeometry geometry) throws IOException { return relate(LatLonGeometry.create(geometry)); } + + @Override + public String getWriteableName() { + return "GeoShapeValue"; + } } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java index 51ce9124eac5b..16b655a1ad034 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java @@ -40,6 +40,7 @@ public class GeometryDocValueReader { private final Extent extent; private int treeOffset; private int docValueOffset; + private BytesRef bytesRef; public GeometryDocValueReader() { this.extent = new Extent(); @@ -50,6 +51,7 @@ public GeometryDocValueReader() { * reset the geometry. */ public void reset(BytesRef bytesRef) throws IOException { + this.bytesRef = bytesRef; // Needed only for supporting Writable, maintaining original offset, not adjusted on from input this.input.reset(bytesRef.bytes, bytesRef.offset, bytesRef.length); docValueOffset = bytesRef.offset; treeOffset = 0; @@ -109,4 +111,7 @@ public void visit(TriangleTreeVisitor visitor) throws IOException { } } + public BytesRef getBytesRef() { + return bytesRef; + } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValues.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValues.java index 1036030546bcf..2dcb2ff99848c 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValues.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValues.java @@ -10,7 +10,14 @@ import org.apache.lucene.document.ShapeField; import org.apache.lucene.geo.Component2D; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.SpatialPoint; +import org.elasticsearch.common.io.stream.GenericNamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.WellKnownText; @@ -90,7 +97,7 @@ public T missing(String missing) { * thin wrapper around a {@link GeometryDocValueReader} which encodes / decodes values using * the provided decoder (could be geo or cartesian) */ - protected abstract static class ShapeValue implements ToXContentFragment { + protected abstract static class ShapeValue implements ToXContentFragment, GenericNamedWriteable { private final GeometryDocValueReader reader; private final BoundingBox boundingBox; private final CoordinateEncoder encoder; @@ -113,6 +120,11 @@ public void reset(BytesRef bytesRef) throws IOException { this.boundingBox.reset(reader.getExtent(), encoder); } + protected void reset(StreamInput in) throws IOException { + BytesReference bytes = in.readBytesReference(); + reset(bytes.toBytesRef()); + } + public BoundingBox boundingBox() { return boundingBox; } @@ -187,6 +199,29 @@ public double getX() throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { throw new IllegalArgumentException("cannot write xcontent for geo_shape doc value"); } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.SHAPE_VALUE_SERIALIZATION_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBytesReference(new BytesArray(reader.getBytesRef())); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof ShapeValue other) { + return reader.getBytesRef().equals(other.reader.getBytesRef()); + } + return false; + } + + @Override + public int hashCode() { + return reader.getBytesRef().hashCode(); + } } public static class BoundingBox { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/SpatialPluginTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/SpatialPluginTests.java index 8129b26c28241..2c5e9bf4917ee 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/SpatialPluginTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/SpatialPluginTests.java @@ -141,7 +141,11 @@ public void testGenericNamedWriteables() { .filter(e -> e.categoryClass.equals(GenericNamedWriteable.class)) .map(e -> e.name) .collect(Collectors.toSet()); - assertThat("Expect both Geo and Cartesian BoundingBox", names, equalTo(Set.of("GeoBoundingBox", "CartesianBoundingBox"))); + assertThat( + "Expect both Geo and Cartesian BoundingBox and ShapeValue", + names, + equalTo(Set.of("GeoBoundingBox", "CartesianBoundingBox", "GeoShapeValue", "CartesianShapeValue")) + ); } private SpatialPlugin getPluginWithOperationMode(License.OperationMode operationMode) { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValuesGenericWriteableTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValuesGenericWriteableTests.java new file mode 100644 index 0000000000000..37997bd291996 --- /dev/null +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValuesGenericWriteableTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.spatial.index.fielddata; + +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.xpack.spatial.util.GeoTestUtils; + +import java.io.IOException; + +public class CartesianShapeValuesGenericWriteableTests extends ShapeValuesGenericWriteableTests { + + @Override + protected String shapeValueName() { + return "CartesianShapeValue"; + } + + @Override + protected GenericWriteableWrapper createTestInstance() { + try { + double minX = randomDoubleBetween(-Float.MAX_VALUE, 0, false); + double minY = randomDoubleBetween(-Float.MAX_VALUE, 0, false); + double maxX = randomDoubleBetween(minX + 10, Float.MAX_VALUE, false); + double maxY = randomDoubleBetween(minY + 10, Float.MAX_VALUE, false); + Rectangle rectangle = new Rectangle(minX, maxX, maxY, minY); + CartesianShapeValues.CartesianShapeValue shapeValue = GeoTestUtils.cartesianShapeValue(rectangle); + return new GenericWriteableWrapper(shapeValue); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + protected GenericWriteableWrapper mutateInstance(GenericWriteableWrapper instance) throws IOException { + ShapeValues.ShapeValue shapeValue = instance.shapeValue(); + ShapeValues.BoundingBox bbox = shapeValue.boundingBox(); + double height = bbox.maxY() - bbox.minY(); + double width = bbox.maxX() - bbox.minX(); + double xs = width * 0.001; + double ys = height * 0.001; + Rectangle rectangle = new Rectangle(bbox.minX() + xs, bbox.maxX() - xs, bbox.maxY() - ys, bbox.minY() + ys); + return new GenericWriteableWrapper(GeoTestUtils.cartesianShapeValue(rectangle)); + } +} diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValuesGenericWriteableTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValuesGenericWriteableTests.java new file mode 100644 index 0000000000000..edd357738299c --- /dev/null +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValuesGenericWriteableTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.spatial.index.fielddata; + +import org.elasticsearch.common.geo.GeoBoundingBox; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.xpack.spatial.util.GeoTestUtils; + +import java.io.IOException; + +public class GeoShapeValuesGenericWriteableTests extends ShapeValuesGenericWriteableTests { + + @Override + protected String shapeValueName() { + return "GeoShapeValue"; + } + + @Override + protected GenericWriteableWrapper createTestInstance() { + try { + GeoBoundingBox bbox = GeoTestUtils.randomBBox(); + Rectangle rectangle = new Rectangle(bbox.left(), bbox.right(), bbox.top(), bbox.bottom()); + GeoShapeValues.GeoShapeValue shapeValue = GeoTestUtils.geoShapeValue(rectangle); + return new GenericWriteableWrapper(shapeValue); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + protected GenericWriteableWrapper mutateInstance(GenericWriteableWrapper instance) throws IOException { + ShapeValues.ShapeValue shapeValue = instance.shapeValue(); + ShapeValues.BoundingBox bbox = shapeValue.boundingBox(); + double height = bbox.maxY() - bbox.minY(); + double width = bbox.maxX() - bbox.minX(); + double xs = width * 0.001; + double ys = height * 0.001; + Rectangle rectangle = new Rectangle(bbox.minX() + xs, bbox.maxX() - xs, bbox.maxY() - ys, bbox.minY() + ys); + return new GenericWriteableWrapper(GeoTestUtils.geoShapeValue(rectangle)); + } +} diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValuesGenericWriteableTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValuesGenericWriteableTests.java new file mode 100644 index 0000000000000..cb123ad724dc0 --- /dev/null +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValuesGenericWriteableTests.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.spatial.index.fielddata; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.GenericNamedWriteable; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireTestCase; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; + +public abstract class ShapeValuesGenericWriteableTests extends AbstractWireTestCase< + ShapeValuesGenericWriteableTests.GenericWriteableWrapper> { + + /** + * Wrapper around a GeoShapeValue to verify that it round-trips via {@code writeGenericValue} and {@code readGenericValue} + */ + public record GenericWriteableWrapper(ShapeValues.ShapeValue shapeValue) implements Writeable { + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeGenericValue(shapeValue); + } + + public static GenericWriteableWrapper readFrom(StreamInput in) throws IOException { + return new GenericWriteableWrapper((ShapeValues.ShapeValue) in.readGenericValue()); + } + } + + private static final NamedWriteableRegistry NAMED_WRITEABLE_REGISTRY = new NamedWriteableRegistry( + List.of( + new NamedWriteableRegistry.Entry( + GenericNamedWriteable.class, + GeoShapeValues.GeoShapeValue.class.getSimpleName(), + GeoShapeValues.GeoShapeValue::new + ), + new NamedWriteableRegistry.Entry( + GenericNamedWriteable.class, + CartesianShapeValues.CartesianShapeValue.class.getSimpleName(), + CartesianShapeValues.CartesianShapeValue::new + ) + ) + ); + + @Override + protected NamedWriteableRegistry writableRegistry() { + return NAMED_WRITEABLE_REGISTRY; + } + + @Override + protected GenericWriteableWrapper copyInstance(GenericWriteableWrapper instance, TransportVersion version) throws IOException { + return copyInstance(instance, writableRegistry(), StreamOutput::writeWriteable, GenericWriteableWrapper::readFrom, version); + } + + protected abstract String shapeValueName(); + + public void testSerializationFailsWithOlderVersion() { + TransportVersion older = TransportVersions.KNN_AS_QUERY_ADDED; + assert older.before(TransportVersions.SHAPE_VALUE_SERIALIZATION_ADDED); + final var testInstance = createTestInstance().shapeValue(); + try (var output = new BytesStreamOutput()) { + output.setTransportVersion(older); + assertThat( + expectThrows(Throwable.class, () -> output.writeGenericValue(testInstance)).getMessage(), + containsString("[" + shapeValueName() + "] requires minimal transport version") + ); + } + } +} diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java index 52d4e6ebb10cc..4e20e872ac446 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java @@ -28,6 +28,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.spatial.index.fielddata.CartesianShapeValues; import org.elasticsearch.xpack.spatial.index.fielddata.CentroidCalculator; import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; @@ -70,6 +71,12 @@ public static GeoShapeValues.GeoShapeValue geoShapeValue(Geometry geometry) thro return value; } + public static CartesianShapeValues.CartesianShapeValue cartesianShapeValue(Geometry geometry) throws IOException { + CartesianShapeValues.CartesianShapeValue value = new CartesianShapeValues.CartesianShapeValue(); + value.reset(binaryCartesianShapeDocValuesField("test", geometry).binaryValue()); + return value; + } + public static GeoBoundingBox randomBBox() { Rectangle rectangle = GeometryTestUtils.randomRectangle(); return new GeoBoundingBox( diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index 0ece466dcdfad..85d778f9ec87f 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -37,7 +37,6 @@ dependencies { compileOnly project(path: xpackModule('ql')) testImplementation project(':test:framework') testImplementation(testArtifact(project(xpackModule('core')))) - testImplementation(testArtifact(project(xpackModule('security')))) testImplementation(testArtifact(project(xpackModule('ql')))) testImplementation project(path: ':modules:reindex') testImplementation project(path: ':modules:parent-join') diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java index 16bd33ca31d74..43d6d04bfac2c 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java @@ -38,8 +38,8 @@ public void checkSearchContent() throws IOException { /** * Read an address for Elasticsearch suitable for the JDBC driver from the system properties. */ - public static String elasticsearchAddress() { - String cluster = System.getProperty("tests.rest.cluster"); + public String elasticsearchAddress() { + String cluster = getTestRestCluster(); // JDBC only supports a single node at a time so we just give it one. return cluster.split(",")[0]; /* This doesn't include "jdbc:es://" because we want the example in diff --git a/x-pack/plugin/sql/qa/server/build.gradle b/x-pack/plugin/sql/qa/server/build.gradle index c2c9731b8d363..cee10d81c9573 100644 --- a/x-pack/plugin/sql/qa/server/build.gradle +++ b/x-pack/plugin/sql/qa/server/build.gradle @@ -38,73 +38,80 @@ subprojects { apply plugin: 'elasticsearch.java' } - if (project.name != 'security') { // The security project just configures its subprojects - apply plugin: 'elasticsearch.legacy-java-rest-test' - - testClusters.matching { it.name == "javaRestTest" }.configureEach { - testDistribution = 'DEFAULT' - setting 'xpack.ml.enabled', 'false' - setting 'xpack.watcher.enabled', 'false' - } - - - dependencies { - configurations.javaRestTestRuntimeClasspath { - resolutionStrategy.force "org.slf4j:slf4j-api:1.7.25" - } - configurations.javaRestTestRuntimeOnly { - // This is also required to make resolveAllDependencies work - resolutionStrategy.force "org.slf4j:slf4j-api:1.7.25" - } - - /* Since we're a standalone rest test we actually get transitive - * dependencies but we don't really want them because they cause - * all kinds of trouble with the jar hell checks. So we suppress - * them explicitly for non-es projects. */ - javaRestTestImplementation(project(':x-pack:plugin:sql:qa:server')) { - transitive = false - } - javaRestTestImplementation project(":test:framework") - javaRestTestImplementation project(xpackModule('ql:test-fixtures')) - // JDBC testing dependencies - javaRestTestRuntimeOnly "net.sourceforge.csvjdbc:csvjdbc:${csvjdbcVersion}" - javaRestTestRuntimeOnly "com.h2database:h2:${h2Version}" - - // H2GIS testing dependencies - javaRestTestRuntimeOnly("org.orbisgis:h2gis:${h2gisVersion}") - javaRestTestRuntimeOnly("org.orbisgis:h2gis-api:${h2gisVersion}") - javaRestTestRuntimeOnly("org.orbisgis:h2gis-utilities:${h2gisVersion}") - javaRestTestRuntimeOnly("org.orbisgis:cts:1.5.2") - - - javaRestTestRuntimeOnly project(path: xpackModule('sql:jdbc')) - javaRestTestRuntimeOnly project(':x-pack:plugin:sql:sql-client') - - // CLI testing dependencies - javaRestTestRuntimeOnly project(path: xpackModule('sql:sql-cli')) - javaRestTestRuntimeOnly(project(':x-pack:plugin:sql:sql-action')) { - transitive = false + if (project.parent.name == 'security') + { + apply plugin: 'elasticsearch.legacy-java-rest-test' + + testClusters.matching { it.name == "javaRestTest" }.configureEach { + testDistribution = 'DEFAULT' + setting 'xpack.ml.enabled', 'false' + setting 'xpack.watcher.enabled', 'false' + } + } else { + apply plugin: 'elasticsearch.internal-java-rest-test' + tasks.named('javaRestTest') { + usesDefaultDistribution() + } } - javaRestTestRuntimeOnly("org.jline:jline-terminal-jna:${jlineVersion}") { - exclude group: "net.java.dev.jna" + dependencies { + configurations.javaRestTestRuntimeClasspath { + resolutionStrategy.force "org.slf4j:slf4j-api:1.7.25" + } + configurations.javaRestTestRuntimeOnly { + // This is also required to make resolveAllDependencies work + resolutionStrategy.force "org.slf4j:slf4j-api:1.7.25" + } + + /* Since we're a standalone rest test we actually get transitive + * dependencies but we don't really want them because they cause + * all kinds of trouble with the jar hell checks. So we suppress + * them explicitly for non-es projects. */ + javaRestTestImplementation(project(':x-pack:plugin:sql:qa:server')) { + transitive = false + } + javaRestTestImplementation project(":test:framework") + javaRestTestImplementation project(xpackModule('ql:test-fixtures')) + + // JDBC testing dependencies + javaRestTestRuntimeOnly "net.sourceforge.csvjdbc:csvjdbc:${csvjdbcVersion}" + javaRestTestRuntimeOnly "com.h2database:h2:${h2Version}" + + // H2GIS testing dependencies + javaRestTestRuntimeOnly("org.orbisgis:h2gis:${h2gisVersion}") + javaRestTestRuntimeOnly("org.orbisgis:h2gis-api:${h2gisVersion}") + javaRestTestRuntimeOnly("org.orbisgis:h2gis-utilities:${h2gisVersion}") + javaRestTestRuntimeOnly("org.orbisgis:cts:1.5.2") + + + javaRestTestRuntimeOnly project(path: xpackModule('sql:jdbc')) + javaRestTestRuntimeOnly project(':x-pack:plugin:sql:sql-client') + + // CLI testing dependencies + javaRestTestRuntimeOnly project(path: xpackModule('sql:sql-cli')) + javaRestTestRuntimeOnly(project(':x-pack:plugin:sql:sql-action')) { + transitive = false + } + + javaRestTestRuntimeOnly("org.jline:jline-terminal-jna:${jlineVersion}") { + exclude group: "net.java.dev.jna" + } + javaRestTestRuntimeOnly "org.jline:jline-terminal:${jlineVersion}" + javaRestTestRuntimeOnly "org.jline:jline-reader:${jlineVersion}" + javaRestTestRuntimeOnly "org.jline:jline-style:${jlineVersion}" + + javaRestTestRuntimeOnly "net.java.dev.jna:jna:${versions.jna}" + + // spatial dependency + javaRestTestRuntimeOnly project(path: xpackModule('spatial')) + javaRestTestRuntimeOnly project(path: ':modules:legacy-geo') + + javaRestTestRuntimeOnly project(path: ':modules:rest-root') + + javaRestTestRuntimeOnly "org.slf4j:slf4j-api:1.7.25" } - javaRestTestRuntimeOnly "org.jline:jline-terminal:${jlineVersion}" - javaRestTestRuntimeOnly "org.jline:jline-reader:${jlineVersion}" - javaRestTestRuntimeOnly "org.jline:jline-style:${jlineVersion}" - - javaRestTestRuntimeOnly "net.java.dev.jna:jna:${versions.jna}" - - // spatial dependency - javaRestTestRuntimeOnly project(path: xpackModule('spatial')) - javaRestTestRuntimeOnly project(path: ':modules:legacy-geo') - - javaRestTestRuntimeOnly project(path: ':modules:rest-root') - - javaRestTestRuntimeOnly "org.slf4j:slf4j-api:1.7.25" - } } } diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle index b42ae29e257f0..04f25f7175451 100644 --- a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle @@ -1,56 +1,6 @@ -import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask -import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE - -apply plugin: 'elasticsearch.legacy-java-rest-test' - dependencies { javaRestTestImplementation project(path: xpackModule('ql:test-fixtures')) + clusterPlugins project(':x-pack:qa:freeze-plugin') } -def remoteClusterReg = testClusters.register('remote-cluster') { - testDistribution = 'DEFAULT' - setting 'node.roles', '[data,ingest,master]' - setting 'xpack.ml.enabled', 'false' - setting 'xpack.watcher.enabled', 'false' - setting 'xpack.security.enabled', 'true' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.autoconfiguration.enabled', 'false' - - user username: "test_user", password: "x-pack-test-password" - plugin ':x-pack:qa:freeze-plugin' -} - -def javaRestTestClusterReg = testClusters.register('javaRestTest') { - testDistribution = 'DEFAULT' - setting 'xpack.ml.enabled', 'false' - setting 'xpack.watcher.enabled', 'false' - setting 'cluster.remote.my_remote_cluster.seeds', { - remoteClusterReg.get().getAllTransportPortURI().collect { "\"$it\"" }.toString() - }, IGNORE_VALUE - setting 'cluster.remote.connections_per_cluster', "1" - setting 'xpack.security.enabled', 'true' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.autoconfiguration.enabled', 'false' - - user username: "test_user", password: "x-pack-test-password" - plugin ':x-pack:qa:freeze-plugin' -} - -tasks.register("startRemoteCluster", DefaultTestClustersTask.class) { - useCluster remoteClusterReg - doLast { - "Starting remote cluster before integ tests and integ test cluster is started" - } -} - -tasks.named("javaRestTest").configure { - dependsOn 'startRemoteCluster' - useCluster remoteClusterReg - doFirst { - nonInputProperties.systemProperty 'tests.rest.cluster.remote.host', remoteClusterReg.map(c->c.getAllHttpSocketURI().get(0)) - // credentials for both local and remote clusters - nonInputProperties.systemProperty 'tests.rest.cluster.multi.user', "test_user" - nonInputProperties.systemProperty 'tests.rest.cluster.multi.password', "x-pack-test-password" - } -} tasks.named("check").configure {dependsOn("javaRestTest") } // run these tests as part of the "check" task diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcCatalogIT.java b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcCatalogIT.java index 8807eb679cc27..edd4f6c375e75 100644 --- a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcCatalogIT.java +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcCatalogIT.java @@ -8,44 +8,82 @@ package org.elasticsearch.xpack.sql.qa.multi_cluster_with_security; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.sql.qa.jdbc.JdbcIntegrationTestCase; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; -import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; +import java.util.Properties; import static org.elasticsearch.transport.RemoteClusterAware.buildRemoteIndexName; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.LOCAL_CLUSTER_NAME; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.PASSWORD; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.REMOTE_CLUSTER_ALIAS; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.USER_NAME; public class JdbcCatalogIT extends JdbcIntegrationTestCase { + public static SqlTestClusterWithRemote clusterAndRemote = new SqlTestClusterWithRemote(); + public static TestRule setupIndex = new TestRule() { + @Override + public Statement apply(Statement base, Description description) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + try { + index(INDEX_NAME, body -> body.field("zero", 0), clusterAndRemote.getRemoteClient()); + base.evaluate(); + } finally { + clusterAndRemote.getRemoteClient().performRequest(new Request("DELETE", "/" + INDEX_NAME)); + } + } + }; + } + }; - // gradle defines - public static final String LOCAL_CLUSTER_NAME = "javaRestTest"; - public static final String REMOTE_CLUSTER_NAME = "my_remote_cluster"; + @ClassRule + public static RuleChain testSetup = RuleChain.outerRule(clusterAndRemote).around(setupIndex); - private static final String INDEX_NAME = "test"; + @Override + protected String getTestRestCluster() { + return clusterAndRemote.getCluster().getHttpAddresses(); + } - @BeforeClass - static void setupIndex() throws IOException { - index(INDEX_NAME, body -> body.field("zero", 0)); + @Override + protected Settings restClientSettings() { + return clusterAndRemote.clusterAuthSettings(); } - @AfterClass - static void cleanupIndex() throws IOException { - provisioningClient().performRequest(new Request("DELETE", "/" + INDEX_NAME)); + @Override + protected RestClient provisioningClient() { + return clusterAndRemote.getRemoteClient(); } + @Override + protected Properties connectionProperties() { + Properties connectionProperties = super.connectionProperties(); + connectionProperties.put("user", USER_NAME); + connectionProperties.put("password", PASSWORD); + return connectionProperties; + } + + private static final String INDEX_NAME = "test"; + public void testJdbcSetCatalog() throws Exception { try (Connection es = esJdbc()) { PreparedStatement ps = es.prepareStatement("SELECT count(*) FROM " + INDEX_NAME); SQLException ex = expectThrows(SQLException.class, ps::executeQuery); assertTrue(ex.getMessage().contains("Unknown index [" + INDEX_NAME + "]")); - String catalog = REMOTE_CLUSTER_NAME.substring(0, randomIntBetween(0, REMOTE_CLUSTER_NAME.length())) + "*"; + String catalog = REMOTE_CLUSTER_ALIAS.substring(0, randomIntBetween(0, REMOTE_CLUSTER_ALIAS.length())) + "*"; es.setCatalog(catalog); assertEquals(catalog, es.getCatalog()); @@ -62,7 +100,7 @@ public void testJdbcSetCatalog() throws Exception { public void testQueryCatalogPrecedence() throws Exception { try (Connection es = esJdbc()) { - PreparedStatement ps = es.prepareStatement("SELECT count(*) FROM " + buildRemoteIndexName(REMOTE_CLUSTER_NAME, INDEX_NAME)); + PreparedStatement ps = es.prepareStatement("SELECT count(*) FROM " + buildRemoteIndexName(REMOTE_CLUSTER_ALIAS, INDEX_NAME)); es.setCatalog(LOCAL_CLUSTER_NAME); ResultSet rs = ps.executeQuery(); assertTrue(rs.next()); @@ -73,7 +111,7 @@ public void testQueryCatalogPrecedence() throws Exception { public void testQueryWithQualifierAndSetCatalog() throws Exception { try (Connection es = esJdbc()) { PreparedStatement ps = es.prepareStatement("SELECT " + INDEX_NAME + ".zero FROM " + INDEX_NAME); - es.setCatalog(REMOTE_CLUSTER_NAME); + es.setCatalog(REMOTE_CLUSTER_ALIAS); ResultSet rs = ps.executeQuery(); assertTrue(rs.next()); assertEquals(0, rs.getInt(1)); @@ -84,7 +122,7 @@ public void testQueryWithQualifierAndSetCatalog() throws Exception { public void testQueryWithQualifiedFieldAndIndex() throws Exception { try (Connection es = esJdbc()) { PreparedStatement ps = es.prepareStatement( - "SELECT " + INDEX_NAME + ".zero FROM " + buildRemoteIndexName(REMOTE_CLUSTER_NAME, INDEX_NAME) + "SELECT " + INDEX_NAME + ".zero FROM " + buildRemoteIndexName(REMOTE_CLUSTER_ALIAS, INDEX_NAME) ); es.setCatalog(LOCAL_CLUSTER_NAME); // set, but should be ignored ResultSet rs = ps.executeQuery(); @@ -105,7 +143,7 @@ public void testCatalogDependentCommands() throws Exception { ResultSet rs = ps.executeQuery(); assertFalse(rs.next()); - es.setCatalog(REMOTE_CLUSTER_NAME); + es.setCatalog(REMOTE_CLUSTER_ALIAS); rs = ps.executeQuery(); assertTrue(rs.next()); assertFalse(rs.next()); diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcCsvSpecIT.java b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcCsvSpecIT.java index 5a6e1956d39d1..6552cd0df2355 100644 --- a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcCsvSpecIT.java @@ -8,24 +8,55 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.ql.SpecReader; import org.elasticsearch.xpack.sql.qa.jdbc.CsvSpecTestCase; +import org.junit.ClassRule; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.Locale; +import java.util.Properties; import java.util.regex.Pattern; import static org.elasticsearch.transport.RemoteClusterAware.buildRemoteIndexName; import static org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.PASSWORD; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.REMOTE_CLUSTER_ALIAS; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.USER_NAME; public class JdbcCsvSpecIT extends CsvSpecTestCase { + @ClassRule + public static SqlTestClusterWithRemote clusterAndRemote = new SqlTestClusterWithRemote(); + + @Override + protected String getTestRestCluster() { + return clusterAndRemote.getCluster().getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + return clusterAndRemote.clusterAuthSettings(); + } + + @Override + protected RestClient provisioningClient() { + return clusterAndRemote.getRemoteClient(); + } + + @Override + protected Properties connectionProperties() { + Properties connectionProperties = super.connectionProperties(); + connectionProperties.put("user", USER_NAME); + connectionProperties.put("password", PASSWORD); + return connectionProperties; + } - public static final String REMOTE_CLUSTER_NAME = "my_remote_cluster"; // gradle defined public static final String EXTRACT_FN_NAME = "EXTRACT"; private static final Pattern DESCRIBE_OR_SHOW = Pattern.compile("(?i)\\s*(DESCRIBE|SHOW).*"); @@ -58,7 +89,7 @@ private static CsvTestCase qualifyFromClause(CsvTestCase testCase) { j = j >= 0 ? i + j : query.length(); sb.append( query.substring(i, j) - .replaceAll("(?i)(FROM)(\\s+)(\\w+|\"[^\"]+\")", "$1$2" + buildRemoteIndexName(REMOTE_CLUSTER_NAME, "$3")) + .replaceAll("(?i)(FROM)(\\s+)(\\w+|\"[^\"]+\")", "$1$2" + buildRemoteIndexName(REMOTE_CLUSTER_ALIAS, "$3")) ); boolean inString = false, escaping = false; char stringDelim = 0, crrChar; @@ -104,7 +135,7 @@ public Connection esJdbc() throws SQLException { // Only set the default catalog if the query index isn't yet qualified with the catalog, which can happen if query has been written // qualified from the start (for the documentation) or edited in qualifyFromClause() above. if (isFromQualified(csvTestCase().query) == false) { - connection.setCatalog(REMOTE_CLUSTER_NAME); + connection.setCatalog(REMOTE_CLUSTER_ALIAS); } return connection; } diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcMetadataIT.java b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcMetadataIT.java index ce18532dc12a2..8317b8975382c 100644 --- a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcMetadataIT.java +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/JdbcMetadataIT.java @@ -7,17 +7,47 @@ package org.elasticsearch.xpack.sql.qa.multi_cluster_with_security; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.sql.qa.jdbc.JdbcIntegrationTestCase; +import org.junit.ClassRule; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.Properties; + +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.LOCAL_CLUSTER_NAME; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.PASSWORD; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.REMOTE_CLUSTER_ALIAS; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.USER_NAME; public class JdbcMetadataIT extends JdbcIntegrationTestCase { + @ClassRule + public static SqlTestClusterWithRemote clusterAndRemote = new SqlTestClusterWithRemote(); + + @Override + protected String getTestRestCluster() { + return clusterAndRemote.getCluster().getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + return clusterAndRemote.clusterAuthSettings(); + } + + @Override + protected RestClient provisioningClient() { + return clusterAndRemote.getRemoteClient(); + } - // gradle defines - public static final String LOCAL_CLUSTER_NAME = "javaRestTest"; - public static final String REMOTE_CLUSTER_NAME = "my_remote_cluster"; + @Override + protected Properties connectionProperties() { + Properties connectionProperties = super.connectionProperties(); + connectionProperties.put("user", USER_NAME); + connectionProperties.put("password", PASSWORD); + return connectionProperties; + } public void testJdbcGetClusters() throws SQLException { try (Connection es = esJdbc()) { @@ -26,7 +56,7 @@ public void testJdbcGetClusters() throws SQLException { assertTrue(rs.next()); assertEquals(LOCAL_CLUSTER_NAME, rs.getString(1)); assertTrue(rs.next()); - assertEquals(REMOTE_CLUSTER_NAME, rs.getString(1)); + assertEquals(REMOTE_CLUSTER_ALIAS, rs.getString(1)); assertFalse(rs.next()); } } diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/RestSqlIT.java b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/RestSqlIT.java index b56cde303446e..c8bb5608db1df 100644 --- a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/RestSqlIT.java +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/RestSqlIT.java @@ -6,20 +6,39 @@ */ package org.elasticsearch.xpack.sql.qa.multi_cluster_with_security; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase; +import org.junit.ClassRule; import static org.elasticsearch.transport.RemoteClusterAware.buildRemoteIndexName; +import static org.elasticsearch.xpack.sql.qa.multi_cluster_with_security.SqlTestClusterWithRemote.REMOTE_CLUSTER_ALIAS; public class RestSqlIT extends RestSqlTestCase { + @ClassRule + public static SqlTestClusterWithRemote clusterAndRemote = new SqlTestClusterWithRemote(); - public static final String REMOTE_CLUSTER_NAME = "my_remote_cluster"; // gradle defined + @Override + protected String getTestRestCluster() { + return clusterAndRemote.getCluster().getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + return clusterAndRemote.clusterAuthSettings(); + } + + @Override + protected RestClient provisioningClient() { + return clusterAndRemote.getRemoteClient(); + } @Override protected String indexPattern(String pattern) { if (randomBoolean()) { - return buildRemoteIndexName(REMOTE_CLUSTER_NAME, pattern); + return buildRemoteIndexName(REMOTE_CLUSTER_ALIAS, pattern); } else { - String cluster = REMOTE_CLUSTER_NAME.substring(0, randomIntBetween(0, REMOTE_CLUSTER_NAME.length())) + "*"; + String cluster = REMOTE_CLUSTER_ALIAS.substring(0, randomIntBetween(0, REMOTE_CLUSTER_ALIAS.length())) + "*"; if (pattern.startsWith("\\\"") && pattern.endsWith("\\\"") && pattern.length() > 4) { pattern = pattern.substring(2, pattern.length() - 2); } diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java new file mode 100644 index 0000000000000..a6e5baabd98f3 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.qa.multi_cluster_with_security; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +import java.io.IOException; + +import static org.elasticsearch.test.rest.ESRestTestCase.basicAuthHeaderValue; +import static org.elasticsearch.xpack.sql.qa.rest.RemoteClusterAwareSqlRestTestCase.clientBuilder; + +public class SqlTestClusterWithRemote implements TestRule { + public static final String LOCAL_CLUSTER_NAME = "javaRestTest"; + public static final String REMOTE_CLUSTER_NAME = "remote-cluster"; + public static final String REMOTE_CLUSTER_ALIAS = "my_remote_cluster"; + public static final String USER_NAME = "test_user"; + public static final String PASSWORD = "x-pack-test-password"; + + private static ElasticsearchCluster clusterSettings(String remoteAddress) { + return ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .name(LOCAL_CLUSTER_NAME) + .setting("xpack.ml.enabled", "false") + .setting("xpack.watcher.enabled", "false") + .setting("cluster.remote." + REMOTE_CLUSTER_ALIAS + ".seeds", remoteAddress) + .setting("cluster.remote.connections_per_cluster", "1") + .setting("xpack.security.enabled", "true") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.autoconfiguration.enabled", "false") + .user(USER_NAME, PASSWORD) + .plugin(":x-pack:qa:freeze-plugin") + .build(); + } + + private static ElasticsearchCluster remoteClusterSettings() { + return ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .name(REMOTE_CLUSTER_NAME) + .setting("node.roles", "[data,ingest,master]") + .setting("xpack.ml.enabled", "false") + .setting("xpack.watcher.enabled", "false") + .setting("xpack.security.enabled", "true") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.autoconfiguration.enabled", "false") + .user(USER_NAME, PASSWORD) + .plugin(":x-pack:qa:freeze-plugin") + .build(); + } + + /** + * Auth settings for both the cluster and the remote. + */ + private static Settings clientAuthSettings() { + final String value = basicAuthHeaderValue(USER_NAME, new SecureString(PASSWORD.toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", value).build(); + } + + private ElasticsearchCluster cluster; + private final ElasticsearchCluster remote = remoteClusterSettings(); + private RestClient remoteClient; + + public Statement apply(Statement base, Description description) { + return remote.apply(startRemoteClient(startCluster(base)), null); + } + + public ElasticsearchCluster getCluster() { + return cluster; + } + + public Settings clusterAuthSettings() { + return clientAuthSettings(); + } + + public RestClient getRemoteClient() { + return remoteClient; + } + + private Statement startCluster(Statement base) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + // Remote address will look like [::1]:12345 - elasticsearch.yml does not like the square brackets. + String remoteAddress = remote.getTransportEndpoint(0).replaceAll("\\[|\\]", ""); + cluster = clusterSettings(remoteAddress); + cluster.apply(base, null).evaluate(); + } + }; + } + + private Statement startRemoteClient(Statement base) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + try { + remoteClient = initRemoteClient(); + base.evaluate(); + } finally { + IOUtils.close(remoteClient); + } + } + }; + } + + private RestClient initRemoteClient() throws IOException { + String crossClusterHost = remote.getHttpAddress(0); + int portSeparator = crossClusterHost.lastIndexOf(':'); + if (portSeparator < 0) { + throw new IllegalArgumentException("Illegal cluster url [" + crossClusterHost + "]"); + } + String host = crossClusterHost.substring(0, portSeparator); + int port = Integer.parseInt(crossClusterHost.substring(portSeparator + 1)); + HttpHost[] remoteHttpHosts = new HttpHost[] { new HttpHost(host, port) }; + + return clientBuilder(clientAuthSettings(), remoteHttpHosts); + } +} diff --git a/x-pack/plugin/sql/qa/server/multi-node/build.gradle b/x-pack/plugin/sql/qa/server/multi-node/build.gradle index 4ded053302803..e7a558ba68dd9 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/build.gradle +++ b/x-pack/plugin/sql/qa/server/multi-node/build.gradle @@ -6,9 +6,6 @@ description = 'Run a subset of SQL tests against multiple nodes' * feel should need to be tested against more than one node. */ -testClusters.matching { it.name == "javaRestTest" }.configureEach { - numberOfNodes = 2 - setting 'xpack.security.enabled', 'false' - setting 'xpack.license.self_generated.type', 'trial' - plugin ':x-pack:qa:freeze-plugin' +dependencies { + clusterPlugins project(':x-pack:qa:freeze-plugin') } diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java index fc4a04570ff67..6a920dcc00b7c 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.multi_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; +import org.junit.ClassRule; -public class CliLenientIT extends LenientTestCase {} +public class CliLenientIT extends LenientTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliSelectIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliSelectIT.java index 6e8162ef11b67..c1ec6ffd25251 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliSelectIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliSelectIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.multi_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.cli.SelectTestCase; +import org.junit.ClassRule; -public class CliSelectIT extends SelectTestCase {} +public class CliSelectIT extends SelectTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliShowIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliShowIT.java index db1e506f74301..86d8d89e591ed 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliShowIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CliShowIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.multi_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.cli.ShowTestCase; +import org.junit.ClassRule; -public class CliShowIT extends ShowTestCase {} +public class CliShowIT extends ShowTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CustomDateFormatIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CustomDateFormatIT.java index 81b3fd59c6bed..5b8b52e5312c8 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CustomDateFormatIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/CustomDateFormatIT.java @@ -7,8 +7,16 @@ package org.elasticsearch.xpack.sql.qa.multi_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.CustomDateFormatTestCase; +import org.junit.ClassRule; public class CustomDateFormatIT extends CustomDateFormatTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/GeoJdbcCsvSpecIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/GeoJdbcCsvSpecIT.java index bca7c41b539c8..e21e5cb64a7ab 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/GeoJdbcCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/GeoJdbcCsvSpecIT.java @@ -7,11 +7,21 @@ package org.elasticsearch.xpack.sql.qa.multi_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.geo.GeoCsvSpecTestCase; +import org.junit.ClassRule; import static org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; public class GeoJdbcCsvSpecIT extends GeoCsvSpecTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + public GeoJdbcCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { super(fileName, groupName, testName, lineNumber, testCase); } diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/GeoJdbcSqlSpecIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/GeoJdbcSqlSpecIT.java index 65b433afcd102..68f6701892ec6 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/GeoJdbcSqlSpecIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/GeoJdbcSqlSpecIT.java @@ -7,9 +7,19 @@ package org.elasticsearch.xpack.sql.qa.multi_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.geo.GeoSqlSpecTestCase; +import org.junit.ClassRule; public class GeoJdbcSqlSpecIT extends GeoSqlSpecTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + public GeoJdbcSqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, String query) { super(fileName, groupName, testName, lineNumber, query); } diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/JdbcDatabaseMetaDataIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/JdbcDatabaseMetaDataIT.java index 2477a04f95c8a..0de80872a0fa0 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/JdbcDatabaseMetaDataIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/JdbcDatabaseMetaDataIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.multi_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.DatabaseMetaDataTestCase; +import org.junit.ClassRule; -public class JdbcDatabaseMetaDataIT extends DatabaseMetaDataTestCase {} +public class JdbcDatabaseMetaDataIT extends DatabaseMetaDataTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/JdbcShowTablesIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/JdbcShowTablesIT.java index ded5bb81663de..3c8356b9e88f3 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/JdbcShowTablesIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/JdbcShowTablesIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.multi_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.ShowTablesTestCase; +import org.junit.ClassRule; -public class JdbcShowTablesIT extends ShowTablesTestCase {} +public class JdbcShowTablesIT extends ShowTablesTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlIT.java index 98a8441f8cdab..ae909789f9c66 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlIT.java @@ -6,10 +6,20 @@ */ package org.elasticsearch.xpack.sql.qa.multi_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase; +import org.junit.ClassRule; /** * Integration test for the rest sql action. The one that speaks json directly to a * user rather than to the JDBC driver or CLI. */ -public class RestSqlIT extends RestSqlTestCase {} +public class RestSqlIT extends RestSqlTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlMultinodeIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlMultinodeIT.java index 37e19fe428b4a..a51a2f0d34342 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlMultinodeIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlMultinodeIT.java @@ -12,10 +12,12 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; import org.elasticsearch.test.NotEqualMessageBuilder; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.sql.qa.rest.BaseRestSqlTestCase; +import org.junit.ClassRule; import java.io.IOException; import java.nio.charset.UnsupportedCharsetException; @@ -34,6 +36,14 @@ * Tests specific to multiple nodes. */ public class RestSqlMultinodeIT extends ESRestTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + /** * Tests count of index run across multiple nodes. */ diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlProtocolIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlProtocolIT.java index 7ea96c39f3b44..cd99bb3744864 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlProtocolIT.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlProtocolIT.java @@ -7,6 +7,16 @@ package org.elasticsearch.xpack.sql.qa.multi_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.SqlProtocolTestCase; +import org.junit.ClassRule; -public class SqlProtocolIT extends SqlProtocolTestCase {} +public class SqlProtocolIT extends SqlProtocolTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java new file mode 100644 index 0000000000000..9859be524ce6a --- /dev/null +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.qa.multi_node; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; + +public class SqlTestCluster { + public static ElasticsearchCluster getCluster() { + return ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .nodes(2) + .name("javaRestTest") + .setting("xpack.ml.enabled", "false") + .setting("xpack.watcher.enabled", "false") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .plugin(":x-pack:qa:freeze-plugin") + .build(); + } +} diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliSecurityIT.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliSecurityIT.java index 4fb5143860380..d0eb8a4b6eade 100644 --- a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliSecurityIT.java +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliSecurityIT.java @@ -21,12 +21,10 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.sql.qa.cli.CliIntegrationTestCase.elasticsearchAddress; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.startsWith; public class CliSecurityIT extends SqlSecurityTestCase { - @Override public void testDescribeWorksAsFullAccess() {} @@ -64,7 +62,7 @@ static SecurityConfig adminSecurityConfig() { /** * Perform security test actions using the CLI. */ - private static class CliActions implements Actions { + private class CliActions implements Actions { @Override public String minimalPermissionsForAllActions() { return "cli_or_drivers_minimal"; @@ -227,7 +225,14 @@ protected void assertConnectionTest() throws IOException { } } + private final Actions actions; + + @Override + Actions actions() { + return actions; + } + public CliSecurityIT() { - super(new CliActions()); + actions = new CliActions(); } } diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/JdbcSecurityIT.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/JdbcSecurityIT.java index 1d88bf4f59100..0e0c2bc8d78b4 100644 --- a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/JdbcSecurityIT.java +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/JdbcSecurityIT.java @@ -26,7 +26,6 @@ import java.util.Properties; import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcAssert.assertResultSets; -import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcIntegrationTestCase.elasticsearchAddress; import static org.elasticsearch.xpack.sql.qa.security.RestSqlIT.SSL_ENABLED; import static org.hamcrest.Matchers.containsString; @@ -41,7 +40,7 @@ static Properties adminProperties() { return properties; } - static Connection es(Properties properties) throws SQLException { + Connection es(Properties properties) throws SQLException { Properties props = new Properties(); props.put("timezone", randomZone().getId()); props.putAll(properties); @@ -82,7 +81,7 @@ private static void addSslPropertiesIfNeeded(Properties properties) { properties.put("ssl.truststore.pass", "keypass"); } - static void expectActionMatchesAdmin( + void expectActionMatchesAdmin( CheckedFunction adminAction, String user, CheckedFunction userAction @@ -92,15 +91,15 @@ static void expectActionMatchesAdmin( } } - static void expectForbidden(String user, CheckedConsumer action) throws Exception { + void expectForbidden(String user, CheckedConsumer action) throws Exception { expectError(user, action, "is unauthorized for user [" + user + "]"); } - static void expectUnknownIndex(String user, CheckedConsumer action) throws Exception { + void expectUnknownIndex(String user, CheckedConsumer action) throws Exception { expectError(user, action, "Unknown index"); } - static void expectError(String user, CheckedConsumer action, String errorMessage) throws Exception { + void expectError(String user, CheckedConsumer action, String errorMessage) throws Exception { SQLException e; try (Connection connection = es(userProperties(user))) { e = expectThrows(SQLException.class, () -> action.accept(connection)); @@ -108,8 +107,7 @@ static void expectError(String user, CheckedConsumer a assertThat(e.getMessage(), containsString(errorMessage)); } - static void expectActionThrowsUnknownColumn(String user, CheckedConsumer action, String column) - throws Exception { + void expectActionThrowsUnknownColumn(String user, CheckedConsumer action, String column) throws Exception { SQLException e; try (Connection connection = es(userProperties(user))) { e = expectThrows(SQLException.class, () -> action.accept(connection)); @@ -117,7 +115,7 @@ static void expectActionThrowsUnknownColumn(String user, CheckedConsumer tables, String user) throws Exception @Override public void expectForbidden(String user, String sql) throws Exception { - JdbcSecurityIT.expectForbidden(user, con -> con.createStatement().executeQuery(sql)); + JdbcSecurityIT.this.expectForbidden(user, con -> con.createStatement().executeQuery(sql)); } @Override public void expectUnknownIndex(String user, String sql) throws Exception { - JdbcSecurityIT.expectUnknownIndex(user, con -> con.createStatement().executeQuery(sql)); + JdbcSecurityIT.this.expectUnknownIndex(user, con -> con.createStatement().executeQuery(sql)); } @Override @@ -245,8 +243,15 @@ private void expectUnauthorized(String action, String user, ThrowingRunnable r) } } + private final Actions actions; + + @Override + Actions actions() { + return actions; + } + public JdbcSecurityIT() { - super(new JdbcActions()); + actions = new JdbcActions(); } // Metadata methods only available to JDBC diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java index 3b23daf9dde54..7195fa00ce350 100644 --- a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java @@ -271,8 +271,15 @@ private static Map toMap(Response response, String mode) throws } } + private final Actions actions; + + @Override + Actions actions() { + return actions; + } + public RestSqlSecurityIT() { - super(new RestActions()); + actions = new RestActions(); } @Override diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/SqlSecurityTestCase.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/SqlSecurityTestCase.java index 7fd65a19b090e..0ab942fcff39f 100644 --- a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/SqlSecurityTestCase.java +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/SqlSecurityTestCase.java @@ -56,6 +56,11 @@ import static org.hamcrest.Matchers.is; public abstract class SqlSecurityTestCase extends ESRestTestCase { + public String elasticsearchAddress() { + // CLI only supports a single node at a time so we just give it one. + return getTestRestCluster().split(",")[0]; + } + /** * Actions taken by this test. *

@@ -131,7 +136,7 @@ private static Path lookupRolledOverAuditLog() { /** * The actions taken by this test. */ - private final Actions actions; + abstract Actions actions(); /** * How much of the audit log was written before the test started. @@ -143,10 +148,6 @@ private static Path lookupRolledOverAuditLog() { */ private static boolean auditFileRolledOver = false; - public SqlSecurityTestCase(Actions actions) { - this.actions = actions; - } - /** * All tests run as a an administrative user but use * es-security-runas-user to become a less privileged user when needed. @@ -237,23 +238,23 @@ protected String getProtocol() { } public void testQueryWorksAsAdmin() throws Exception { - actions.queryWorksAsAdmin(); + actions().queryWorksAsAdmin(); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test").assertLogs(); } public void testQueryWithFullAccess() throws Exception { - createUser("full_access", actions.minimalPermissionsForAllActions()); + createUser("full_access", actions().minimalPermissionsForAllActions()); - actions.expectMatchesAdmin("SELECT * FROM test ORDER BY a", "full_access", "SELECT * FROM test ORDER BY a"); + actions().expectMatchesAdmin("SELECT * FROM test ORDER BY a", "full_access", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") .expectSqlCompositeActionFieldCaps("full_access", "test") .assertLogs(); } public void testScrollWithFullAccess() throws Exception { - createUser("full_access", actions.minimalPermissionsForAllActions()); + createUser("full_access", actions().minimalPermissionsForAllActions()); - actions.expectScrollMatchesAdmin("SELECT * FROM test ORDER BY a", "full_access", "SELECT * FROM test ORDER BY a"); + actions().expectScrollMatchesAdmin("SELECT * FROM test ORDER BY a", "full_access", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") /* Scrolling doesn't have to access the index again, at least not through sql. * If we asserted query and scroll logs then we would see the scroll. */ @@ -268,14 +269,14 @@ public void testScrollWithFullAccess() throws Exception { public void testQueryNoAccess() throws Exception { createUser("no_access", "read_nothing"); - actions.expectForbidden("no_access", "SELECT * FROM test"); + actions().expectForbidden("no_access", "SELECT * FROM test"); createAuditLogAsserter().expect(false, SQL_ACTION_NAME, "no_access", empty()).assertLogs(); } public void testQueryWrongAccess() throws Exception { createUser("wrong_access", "read_something_else"); - actions.expectUnknownIndex("wrong_access", "SELECT * FROM test"); + actions().expectUnknownIndex("wrong_access", "SELECT * FROM test"); createAuditLogAsserter() // This user has permission to run sql queries so they are given preliminary authorization .expect(true, SQL_ACTION_NAME, "wrong_access", empty()) @@ -287,7 +288,7 @@ public void testQueryWrongAccess() throws Exception { public void testQuerySingleFieldGranted() throws Exception { createUser("only_a", "read_test_a"); - actions.expectMatchesAdmin("SELECT a FROM test ORDER BY a", "only_a", "SELECT * FROM test ORDER BY a"); + actions().expectMatchesAdmin("SELECT a FROM test ORDER BY a", "only_a", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") .expectSqlCompositeActionFieldCaps("only_a", "test") .assertLogs(); @@ -296,7 +297,7 @@ public void testQuerySingleFieldGranted() throws Exception { public void testScrollWithSingleFieldGranted() throws Exception { createUser("only_a", "read_test_a"); - actions.expectScrollMatchesAdmin("SELECT a FROM test ORDER BY a", "only_a", "SELECT * FROM test ORDER BY a"); + actions().expectScrollMatchesAdmin("SELECT a FROM test ORDER BY a", "only_a", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") /* Scrolling doesn't have to access the index again, at least not through sql. * If we asserted query and scroll logs then we would see the scroll. */ @@ -311,7 +312,7 @@ public void testScrollWithSingleFieldGranted() throws Exception { public void testQueryStringSingleFieldGrantedWrongRequested() throws Exception { createUser("only_a", "read_test_a"); - actions.expectUnknownColumn("only_a", "SELECT c FROM test", "c"); + actions().expectUnknownColumn("only_a", "SELECT c FROM test", "c"); /* The user has permission to query the index but one of the * columns that they explicitly mention is hidden from them * by field level access control. This *looks* like a successful @@ -324,7 +325,7 @@ public void testQueryStringSingleFieldGrantedWrongRequested() throws Exception { public void testQuerySingleFieldExcepted() throws Exception { createUser("not_c", "read_test_a_and_b"); - actions.expectMatchesAdmin("SELECT a, b FROM test ORDER BY a", "not_c", "SELECT * FROM test ORDER BY a"); + actions().expectMatchesAdmin("SELECT a, b FROM test ORDER BY a", "not_c", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") .expectSqlCompositeActionFieldCaps("not_c", "test") .assertLogs(); @@ -333,7 +334,7 @@ public void testQuerySingleFieldExcepted() throws Exception { public void testScrollWithSingleFieldExcepted() throws Exception { createUser("not_c", "read_test_a_and_b"); - actions.expectScrollMatchesAdmin("SELECT a, b FROM test ORDER BY a", "not_c", "SELECT * FROM test ORDER BY a"); + actions().expectScrollMatchesAdmin("SELECT a, b FROM test ORDER BY a", "not_c", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") /* Scrolling doesn't have to access the index again, at least not through sql. * If we asserted query and scroll logs then we would see the scroll. */ @@ -348,7 +349,7 @@ public void testScrollWithSingleFieldExcepted() throws Exception { public void testQuerySingleFieldExceptionedWrongRequested() throws Exception { createUser("not_c", "read_test_a_and_b"); - actions.expectUnknownColumn("not_c", "SELECT c FROM test", "c"); + actions().expectUnknownColumn("not_c", "SELECT c FROM test", "c"); /* The user has permission to query the index but one of the * columns that they explicitly mention is hidden from them * by field level access control. This *looks* like a successful @@ -361,21 +362,21 @@ public void testQuerySingleFieldExceptionedWrongRequested() throws Exception { public void testQueryDocumentExcluded() throws Exception { createUser("no_3s", "read_test_without_c_3"); - actions.expectMatchesAdmin("SELECT * FROM test WHERE c != 3 ORDER BY a", "no_3s", "SELECT * FROM test ORDER BY a"); + actions().expectMatchesAdmin("SELECT * FROM test WHERE c != 3 ORDER BY a", "no_3s", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") .expectSqlCompositeActionFieldCaps("no_3s", "test") .assertLogs(); } public void testShowTablesWorksAsAdmin() throws Exception { - actions.expectShowTables(Arrays.asList("bort", "test"), null); + actions().expectShowTables(Arrays.asList("bort", "test"), null); createAuditLogAsserter().expectSqlCompositeActionGetIndex("test_admin", "bort", "test").assertLogs(); } public void testShowTablesWorksAsFullAccess() throws Exception { - createUser("full_access", actions.minimalPermissionsForAllActions()); + createUser("full_access", actions().minimalPermissionsForAllActions()); - actions.expectMatchesAdmin("SHOW TABLES LIKE '%t'", "full_access", "SHOW TABLES"); + actions().expectMatchesAdmin("SHOW TABLES LIKE '%t'", "full_access", "SHOW TABLES"); createAuditLogAsserter().expectSqlCompositeActionGetIndex("test_admin", "bort", "test") .expectSqlCompositeActionGetIndex("full_access", "bort", "test") .assertLogs(); @@ -384,14 +385,14 @@ public void testShowTablesWorksAsFullAccess() throws Exception { public void testShowTablesWithNoAccess() throws Exception { createUser("no_access", "read_nothing"); - actions.expectForbidden("no_access", "SHOW TABLES"); + actions().expectForbidden("no_access", "SHOW TABLES"); createAuditLogAsserter().expect(false, SQL_ACTION_NAME, "no_access", empty()).assertLogs(); } public void testShowTablesWithLimitedAccess() throws Exception { createUser("read_bort", "read_bort"); - actions.expectMatchesAdmin("SHOW TABLES LIKE 'bort'", "read_bort", "SHOW TABLES"); + actions().expectMatchesAdmin("SHOW TABLES LIKE 'bort'", "read_bort", "SHOW TABLES"); createAuditLogAsserter().expectSqlCompositeActionGetIndex("test_admin", "bort") .expectSqlCompositeActionGetIndex("read_bort", "bort") .assertLogs(); @@ -400,7 +401,7 @@ public void testShowTablesWithLimitedAccess() throws Exception { public void testShowTablesWithLimitedAccessUnaccessableIndex() throws Exception { createUser("read_bort", "read_bort"); - actions.expectMatchesAdmin("SHOW TABLES LIKE 'not-created'", "read_bort", "SHOW TABLES LIKE 'test'"); + actions().expectMatchesAdmin("SHOW TABLES LIKE 'not-created'", "read_bort", "SHOW TABLES LIKE 'test'"); createAuditLogAsserter().expect(true, SQL_ACTION_NAME, "test_admin", empty()) .expect(true, GetIndexAction.NAME, "test_admin", contains("not-created")) .expect(true, SQL_ACTION_NAME, "read_bort", empty()) @@ -413,14 +414,14 @@ public void testDescribeWorksAsAdmin() throws Exception { expected.put("a", asList("BIGINT", "long")); expected.put("b", asList("BIGINT", "long")); expected.put("c", asList("BIGINT", "long")); - actions.expectDescribe(expected, null); + actions().expectDescribe(expected, null); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test").assertLogs(); } public void testDescribeWorksAsFullAccess() throws Exception { - createUser("full_access", actions.minimalPermissionsForAllActions()); + createUser("full_access", actions().minimalPermissionsForAllActions()); - actions.expectMatchesAdmin("DESCRIBE test", "full_access", "DESCRIBE test"); + actions().expectMatchesAdmin("DESCRIBE test", "full_access", "DESCRIBE test"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") .expectSqlCompositeActionFieldCaps("full_access", "test") .assertLogs(); @@ -429,14 +430,14 @@ public void testDescribeWorksAsFullAccess() throws Exception { public void testDescribeWithNoAccess() throws Exception { createUser("no_access", "read_nothing"); - actions.expectForbidden("no_access", "DESCRIBE test"); + actions().expectForbidden("no_access", "DESCRIBE test"); createAuditLogAsserter().expect(false, SQL_ACTION_NAME, "no_access", empty()).assertLogs(); } public void testDescribeWithWrongAccess() throws Exception { createUser("wrong_access", "read_something_else"); - actions.expectDescribe(Collections.emptyMap(), "wrong_access"); + actions().expectDescribe(Collections.emptyMap(), "wrong_access"); createAuditLogAsserter() // This user has permission to run sql queries so they are given preliminary authorization .expect(true, SQL_ACTION_NAME, "wrong_access", empty()) @@ -448,7 +449,7 @@ public void testDescribeWithWrongAccess() throws Exception { public void testDescribeSingleFieldGranted() throws Exception { createUser("only_a", "read_test_a"); - actions.expectDescribe(singletonMap("a", asList("BIGINT", "long")), "only_a"); + actions().expectDescribe(singletonMap("a", asList("BIGINT", "long")), "only_a"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("only_a", "test").assertLogs(); } @@ -458,14 +459,14 @@ public void testDescribeSingleFieldExcepted() throws Exception { Map> expected = new TreeMap<>(); expected.put("a", asList("BIGINT", "long")); expected.put("b", asList("BIGINT", "long")); - actions.expectDescribe(expected, "not_c"); + actions().expectDescribe(expected, "not_c"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("not_c", "test").assertLogs(); } public void testDescribeDocumentExcluded() throws Exception { createUser("no_3s", "read_test_without_c_3"); - actions.expectMatchesAdmin("DESCRIBE test", "no_3s", "DESCRIBE test"); + actions().expectMatchesAdmin("DESCRIBE test", "no_3s", "DESCRIBE test"); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") .expectSqlCompositeActionFieldCaps("no_3s", "test") .assertLogs(); @@ -473,15 +474,15 @@ public void testDescribeDocumentExcluded() throws Exception { public void testNoMonitorMain() throws Exception { createUser("no_monitor_main", "no_monitor_main"); - actions.checkNoMonitorMain("no_monitor_main"); + actions().checkNoMonitorMain("no_monitor_main"); } public void testNoGetIndex() throws Exception { createUser("no_get_index", "no_get_index"); - actions.expectForbidden("no_get_index", "SELECT * FROM test"); - actions.expectForbidden("no_get_index", "SHOW TABLES LIKE 'test'"); - actions.expectForbidden("no_get_index", "DESCRIBE test"); + actions().expectForbidden("no_get_index", "SELECT * FROM test"); + actions().expectForbidden("no_get_index", "SHOW TABLES LIKE 'test'"); + actions().expectForbidden("no_get_index", "DESCRIBE test"); } protected static void createUser(String name, String role) throws IOException { diff --git a/x-pack/plugin/sql/qa/server/single-node/build.gradle b/x-pack/plugin/sql/qa/server/single-node/build.gradle index c58dca254db03..ce74893733372 100644 --- a/x-pack/plugin/sql/qa/server/single-node/build.gradle +++ b/x-pack/plugin/sql/qa/server/single-node/build.gradle @@ -1,7 +1,3 @@ -testClusters.matching { it.name == "javaRestTest" }.configureEach { - testDistribution = 'DEFAULT' - setting 'xpack.security.enabled', 'false' - setting 'xpack.license.self_generated.type', 'trial' - plugin ':x-pack:qa:freeze-plugin' +dependencies { + clusterPlugins project(':x-pack:qa:freeze-plugin') } - diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java index 2256890f33a1b..02cdfc993c12c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.cli.ErrorsTestCase; +import org.junit.ClassRule; -public class CliErrorsIT extends ErrorsTestCase {} +public class CliErrorsIT extends ErrorsTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java index 7f95afc32181a..46e16418e0642 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java @@ -6,7 +6,9 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.cli.CliIntegrationTestCase; +import org.junit.ClassRule; import java.io.IOException; @@ -14,6 +16,14 @@ import static org.hamcrest.Matchers.startsWith; public class CliExplainIT extends CliIntegrationTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + public void testExplainBasic() throws IOException { index("test", body -> body.field("test_field", "test_value")); diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java index f7a6854b02fce..9811142d3611c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.cli.FetchSizeTestCase; +import org.junit.ClassRule; -public class CliFetchSizeIT extends FetchSizeTestCase {} +public class CliFetchSizeIT extends FetchSizeTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java index afcfca0a01ed2..99895823adc7f 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; +import org.junit.ClassRule; -public class CliLenientIT extends LenientTestCase {} +public class CliLenientIT extends LenientTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java index 82e89da3cefb6..8baa265780f40 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java @@ -6,8 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.cli.PartialResultsTestCase; +import org.junit.ClassRule; public class CliPartialResultsIT extends PartialResultsTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java index d45d82512fe55..ecdd41a203ad3 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.cli.SelectTestCase; +import org.junit.ClassRule; -public class CliSelectIT extends SelectTestCase {} +public class CliSelectIT extends SelectTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java index 982dd744a6934..2f9deffa48f08 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.cli.ShowTestCase; +import org.junit.ClassRule; -public class CliShowIT extends ShowTestCase {} +public class CliShowIT extends ShowTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java index a3f966e712b29..de502bf886ff3 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java @@ -14,7 +14,9 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.JdbcIntegrationTestCase; +import org.junit.ClassRule; import java.io.IOException; import java.nio.file.Files; @@ -49,6 +51,13 @@ * new Fn("ASCII", "foobar").ignore()

*/ public class ConsistentFunctionArgHandlingIT extends JdbcIntegrationTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } private static final List FUNCTION_CALLS_TO_TEST = asList( new Fn("ASCII", "foobar"), diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java index c0d0127f17f77..4a91372abe5d4 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java @@ -7,8 +7,16 @@ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.CustomDateFormatTestCase; +import org.junit.ClassRule; public class CustomDateFormatIT extends CustomDateFormatTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java index 2fa2457a5c608..ac967710e360c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java @@ -7,8 +7,16 @@ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.FieldExtractorTestCase; +import org.junit.ClassRule; public class FieldExtractorIT extends FieldExtractorTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java index bb0d16cc5ec9a..8fb61b79970ec 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java @@ -9,7 +9,9 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.geo.GeoCsvSpecTestCase; +import org.junit.ClassRule; import java.util.ArrayList; import java.util.List; @@ -18,6 +20,13 @@ import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; public class GeoJdbcCsvSpecIT extends GeoCsvSpecTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } @ParametersFactory(argumentFormatting = PARAM_FORMATTING) public static List readScriptSpec() throws Exception { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java index 7eb7a7be5febd..e2796051653b5 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java @@ -7,9 +7,19 @@ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.geo.GeoSqlSpecTestCase; +import org.junit.ClassRule; public class GeoJdbcSqlSpecIT extends GeoSqlSpecTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + public GeoJdbcSqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, String query) { super(fileName, groupName, testName, lineNumber, query); } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java index 4346aad97e4cd..6ccc2662fcf9f 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java @@ -8,7 +8,9 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.CsvSpecTestCase; +import org.junit.ClassRule; import java.util.ArrayList; import java.util.List; @@ -17,6 +19,13 @@ import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; public class JdbcCsvSpecIT extends CsvSpecTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } @ParametersFactory(argumentFormatting = PARAM_FORMATTING) public static List readScriptSpec() throws Exception { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java index da1ec865922a2..958bb212e8a13 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.DatabaseMetaDataTestCase; +import org.junit.ClassRule; -public class JdbcDatabaseMetaDataIT extends DatabaseMetaDataTestCase {} +public class JdbcDatabaseMetaDataIT extends DatabaseMetaDataTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java index 73742a553f7a2..6147d0759f459 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java @@ -10,10 +10,12 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.client.RestClient; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.DataLoader; import org.elasticsearch.xpack.sql.qa.jdbc.JdbcAssert; import org.elasticsearch.xpack.sql.qa.jdbc.SpecBaseIntegrationTestCase; import org.elasticsearch.xpack.sql.qa.jdbc.SqlSpecTestCase; +import org.junit.ClassRule; import java.sql.Connection; import java.sql.ResultSet; @@ -39,6 +41,13 @@ * at this stage and, to not keep things stalling, started with this approach. */ public class JdbcDocCsvSpecIT extends SpecBaseIntegrationTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } private final CsvTestCase testCase; diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java index d912eb5a6261e..fcf6241f51c0f 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java @@ -8,7 +8,9 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.CsvSpecTestCase; +import org.junit.ClassRule; import java.util.List; import java.util.Properties; @@ -18,6 +20,13 @@ import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; public class JdbcFrozenCsvSpecIT extends CsvSpecTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } @ParametersFactory(argumentFormatting = PARAM_FORMATTING) public static List readScriptSpec() throws Exception { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java index 492971cf2a13c..572c809468784 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java @@ -8,7 +8,9 @@ import org.elasticsearch.client.Request; import org.elasticsearch.core.Strings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.JdbcIntegrationTestCase; +import org.junit.ClassRule; import java.io.IOException; import java.sql.Connection; @@ -22,7 +24,27 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class JdbcShardFailureIT extends JdbcIntegrationTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + private String nodeAddresses; + + /** + * Caches the node addresses when called for the first time. + * Once cluster is in red health, calling this will time out if it was not called before. + */ + @Override + protected String getTestRestCluster() { + if (nodeAddresses == null) { + nodeAddresses = cluster.getHttpAddresses(); + } + return nodeAddresses; + } + private void createTestIndex() throws IOException { + // This method will put the cluster into a red state intentionally, so cache the node addresses first. + getTestRestCluster(); + Request createTest1 = new Request("PUT", "/test1"); String body1 = """ {"aliases":{"test":{}}, "mappings": {"properties": {"test_field":{"type":"integer"}}}}"""; diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java index d9677bd832226..40b90e1a42c6c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java @@ -6,6 +6,16 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.ShowTablesTestCase; +import org.junit.ClassRule; -public class JdbcShowTablesIT extends ShowTablesTestCase {} +public class JdbcShowTablesIT extends ShowTablesTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java index 15b69f0158ef3..15d6895be9010 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java @@ -6,9 +6,19 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.SqlSpecTestCase; +import org.junit.ClassRule; public class JdbcSqlSpecIT extends SqlSpecTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + public JdbcSqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, String query) { super(fileName, groupName, testName, lineNumber, query); } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java index 88af42929a741..7074091f4f166 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java @@ -10,13 +10,22 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Request; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.rest.BaseRestSqlTestCase; +import org.junit.ClassRule; import java.io.IOException; import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.SQL_QUERY_REST_ENDPOINT; public class RestSqlDeprecationIT extends BaseRestSqlTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } public void testIndexIncludeParameterIsDeprecated() throws IOException { testDeprecationWarning( diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java index c0a1a79e4c9a7..167cc212685d7 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java @@ -10,7 +10,9 @@ import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase; +import org.junit.ClassRule; import java.io.IOException; @@ -21,6 +23,13 @@ * user rather than to the JDBC driver or CLI. */ public class RestSqlIT extends RestSqlTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } public void testErrorMessageForTranslatingQueryWithWhereEvaluatingToFalse() throws IOException { index("{\"foo\":1}"); diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java index da26f550cafe4..6ef56274cdbb0 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java @@ -7,6 +7,16 @@ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.rest.RestSqlPaginationTestCase; +import org.junit.ClassRule; -public class RestSqlPaginationIT extends RestSqlPaginationTestCase {} +public class RestSqlPaginationIT extends RestSqlPaginationTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java index e1f9ff782146d..297302c534030 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java @@ -7,6 +7,16 @@ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.rest.RestSqlUsageTestCase; +import org.junit.ClassRule; -public class RestSqlUsageIT extends RestSqlUsageTestCase {} +public class RestSqlUsageIT extends RestSqlUsageTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java index c3d08d34542bd..e59a8392f7335 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java @@ -7,6 +7,16 @@ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.SqlProtocolTestCase; +import org.junit.ClassRule; -public class SqlProtocolIT extends SqlProtocolTestCase {} +public class SqlProtocolIT extends SqlProtocolTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java new file mode 100644 index 0000000000000..bfcd1671e4d39 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.qa.single_node; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; + +public class SqlTestCluster { + public static ElasticsearchCluster getCluster() { + return ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .name("javaRestTest") + .setting("xpack.ml.enabled", "false") + .setting("xpack.watcher.enabled", "false") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .plugin(":x-pack:qa:freeze-plugin") + .build(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java index 5c5dcd2afbe72..928916b3c40ae 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java @@ -7,8 +7,16 @@ package org.elasticsearch.xpack.sql.qa.single_node; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.sql.qa.jdbc.SysColumnsTestCase; +import org.junit.ClassRule; public class SysColumnsIT extends SysColumnsTestCase { + @ClassRule + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/CliIntegrationTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/CliIntegrationTestCase.java index eb253e16cd848..698df9882ede0 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/CliIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/CliIntegrationTestCase.java @@ -24,10 +24,9 @@ public abstract class CliIntegrationTestCase extends ESRestTestCase { /** * Read an address for Elasticsearch suitable for the CLI from the system properties. */ - public static String elasticsearchAddress() { - String cluster = System.getProperty("tests.rest.cluster"); + public String elasticsearchAddress() { // CLI only supports a single node at a time so we just give it one. - return cluster.split(",")[0]; + return getTestRestCluster().split(",")[0]; } private EmbeddedCli cli; @@ -37,7 +36,7 @@ public static String elasticsearchAddress() { */ @Before public void startCli() throws IOException { - cli = new EmbeddedCli(CliIntegrationTestCase.elasticsearchAddress(), true, securityConfig()); + cli = new EmbeddedCli(elasticsearchAddress(), true, securityConfig()); } @After diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DatabaseMetaDataTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DatabaseMetaDataTestCase.java index e2fd7659fc7e7..0b2effe6e3e87 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DatabaseMetaDataTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DatabaseMetaDataTestCase.java @@ -129,7 +129,7 @@ public void testGetDataStreamViewByName() throws IOException, SQLException { private void expectDataStreamTable(String dataStreamName, String tableNamePattern, String[] types) throws SQLException, IOException { try { - createDataStream(dataStreamName); + createDataStream(dataStreamName, provisioningClient()); try (Connection es = esJdbc(); ResultSet rs = es.getMetaData().getTables("%", "%", tableNamePattern, types)) { assertTrue(rs.next()); assertEquals(dataStreamName, rs.getString(3)); @@ -137,7 +137,7 @@ private void expectDataStreamTable(String dataStreamName, String tableNamePatter assertFalse(rs.next()); } } finally { - deleteDataStream(dataStreamName); + deleteDataStream(dataStreamName, provisioningClient()); } } diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java index 34383404544a5..e5a77c0630575 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java @@ -8,6 +8,7 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.CheckedConsumer; @@ -38,8 +39,8 @@ public void checkSearchContent() throws Exception { /** * Read an address for Elasticsearch suitable for the JDBC driver from the system properties. */ - public static String elasticsearchAddress() { - String cluster = System.getProperty("tests.rest.cluster"); + public String elasticsearchAddress() { + String cluster = getTestRestCluster(); // JDBC only supports a single node at a time so we just give it one. return cluster.split(",")[0]; /* This doesn't include "jdbc:es://" because we want the example in @@ -72,21 +73,35 @@ protected Connection createConnection(Properties connectionProperties) throws SQ return connection; } - public static void index(String index, CheckedConsumer body) throws IOException { - index(index, "1", body); + public static void index(String index, CheckedConsumer body, RestClient provisioningClient) + throws IOException { + index(index, "1", body, provisioningClient); } - public static void index(String index, String documentId, CheckedConsumer body) throws IOException { + public void index(String index, CheckedConsumer body) throws IOException { + index(index, body, provisioningClient()); + } + + public static void index( + String index, + String documentId, + CheckedConsumer body, + RestClient provisioningClient + ) throws IOException { Request request = new Request("PUT", "/" + index + "/_doc/" + documentId); request.addParameter("refresh", "true"); XContentBuilder builder = JsonXContent.contentBuilder().startObject(); body.accept(builder); builder.endObject(); request.setJsonEntity(Strings.toString(builder)); - provisioningClient().performRequest(request); + provisioningClient.performRequest(request); + } + + public void index(String index, String documentId, CheckedConsumer body) throws IOException { + index(index, documentId, body, provisioningClient()); } - public static void delete(String index, String documentId) throws IOException { + public void delete(String index, String documentId) throws IOException { Request request = new Request("DELETE", "/" + index + "/_doc/" + documentId); request.addParameter("refresh", "true"); provisioningClient().performRequest(request); @@ -116,7 +131,7 @@ protected Properties connectionProperties() { return connectionProperties; } - protected static void createIndexWithSettingsAndMappings(String index) throws IOException { + protected void createIndexWithSettingsAndMappings(String index) throws IOException { Request request = new Request("PUT", "/" + index); XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); createIndex.startObject("settings"); @@ -135,7 +150,7 @@ protected static void createIndexWithSettingsAndMappings(String index) throws IO provisioningClient().performRequest(request); } - protected static void updateMapping(String index, CheckedConsumer body) throws IOException { + protected void updateMapping(String index, CheckedConsumer body) throws IOException { Request request = new Request("PUT", "/" + index + "/_mapping"); XContentBuilder updateMapping = JsonXContent.contentBuilder().startObject(); updateMapping.startObject("properties"); diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SysColumnsTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SysColumnsTestCase.java index 8239f9d2fc148..e2e3b1fe45af8 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SysColumnsTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SysColumnsTestCase.java @@ -419,7 +419,7 @@ public void testMultiIndicesMultiAlias() throws Exception { ); } - private static void createIndexWithMapping(String indexName, CheckedConsumer mapping) throws Exception { + private void createIndexWithMapping(String indexName, CheckedConsumer mapping) throws Exception { createIndexWithSettingsAndMappings(indexName); updateMapping(indexName, mapping); } diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/BaseRestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/BaseRestSqlTestCase.java index d6fd9b23860fc..bd43d3d651e52 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/BaseRestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/BaseRestSqlTestCase.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Tuple; @@ -221,24 +222,32 @@ protected void deleteTestIndex() throws IOException { deleteIndexWithProvisioningClient(TEST_INDEX); } - protected static void deleteIndexWithProvisioningClient(String name) throws IOException { + protected void deleteIndexWithProvisioningClient(String name) throws IOException { deleteIndex(provisioningClient(), name); } - public static void createDataStream(String dataStreamName) throws IOException { + public static void createDataStream(String dataStreamName, RestClient provisioningClient) throws IOException { Request request = new Request("PUT", "/_index_template/" + DATA_STREAM_TEMPLATE + "-" + dataStreamName); request.setJsonEntity("{\"index_patterns\": [\"" + dataStreamName + "*\"], \"data_stream\": {}}"); - assertOK(provisioningClient().performRequest(request)); + assertOK(provisioningClient.performRequest(request)); request = new Request("PUT", "/_data_stream/" + dataStreamName); - assertOK(provisioningClient().performRequest(request)); + assertOK(provisioningClient.performRequest(request)); } - public static void deleteDataStream(String dataStreamName) throws IOException { + public void createDataStream(String dataStreamName) throws IOException { + createDataStream(dataStreamName, provisioningClient()); + } + + public static void deleteDataStream(String dataStreamName, RestClient provisioningClient) throws IOException { Request request = new Request("DELETE", "_data_stream/" + dataStreamName); - provisioningClient().performRequest(request); + provisioningClient.performRequest(request); request = new Request("DELETE", "/_index_template/" + DATA_STREAM_TEMPLATE + "-" + dataStreamName); - provisioningClient().performRequest(request); + provisioningClient.performRequest(request); + } + + public void deleteDataStream(String dataStreamName) throws IOException { + deleteDataStream(dataStreamName, provisioningClient()); } public static RequestObjectBuilder query(String query) { diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RemoteClusterAwareSqlRestTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RemoteClusterAwareSqlRestTestCase.java index 1dfbe6ef34cce..c81fe83a96f66 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RemoteClusterAwareSqlRestTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RemoteClusterAwareSqlRestTestCase.java @@ -30,7 +30,7 @@ public abstract class RemoteClusterAwareSqlRestTestCase extends ESRestTestCase { // client used for loading data on a remote cluster only. private static RestClient remoteClient; - // gradle defines + // gradle defines when using legacy-java-rest-test public static final String AUTH_USER = System.getProperty("tests.rest.cluster.multi.user"); public static final String AUTH_PASS = System.getProperty("tests.rest.cluster.multi.password"); @@ -59,7 +59,7 @@ public static void closeRemoteClients() throws IOException { } } - protected static RestClient clientBuilder(Settings settings, HttpHost[] hosts) throws IOException { + public static RestClient clientBuilder(Settings settings, HttpHost[] hosts) throws IOException { RestClientBuilder builder = RestClient.builder(hosts); doConfigureClient(builder, settings); @@ -77,12 +77,22 @@ protected static TimeValue timeout() { return TimeValue.timeValueSeconds(CLIENT_TIMEOUT); } - // returned client is used to load the test data, either in the local cluster (for rest/javaRestTests) or a remote one (for - // multi-cluster). note: the client()/adminClient() will always connect to the local cluster. - protected static RestClient provisioningClient() { + /** + * Use this when using the {@code legacy-java-rest-test} plugin. + * @return a client to the remote cluster if it exists, otherwise a client to the local cluster + */ + public static RestClient defaultProvisioningClient() { return remoteClient == null ? client() : remoteClient; } + /** + * Override if the test data must be provisioned on a remote cluster while not using the {@code legacy-java-rest-test} plugin. + * @return client to use for loading test data + */ + protected RestClient provisioningClient() { + return defaultProvisioningClient(); + } + @Override protected Settings restClientSettings() { return secureRemoteClientSettings(); diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index 81cc54db19669..fb92ac096fc36 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -1434,7 +1434,7 @@ private void executeQueryWithNextPage(String format, String expectedHeader, Stri assertEquals(0, getNumberOfSearchContexts(provisioningClient(), "test")); } - private static void bulkLoadTestData(int count) throws IOException { + private void bulkLoadTestData(int count) throws IOException { Request request = new Request("POST", "/test/_bulk"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); @@ -1801,7 +1801,7 @@ public void testDataStreamInShowTablesFiltered() throws IOException { expectDataStreamInShowTables(dataStreamName, "SHOW TABLES \\\"" + dataStreamName + "*\\\""); } - private static void expectDataStreamInShowTables(String dataStreamName, String sql) throws IOException { + private void expectDataStreamInShowTables(String dataStreamName, String sql) throws IOException { try { createDataStream(dataStreamName); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml new file mode 100644 index 0000000000000..d0f7c7636582f --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml @@ -0,0 +1,51 @@ +setup: + + - skip: + version: " - 8.11.99" + reason: "counted_keyword was added in 8.12" + + - do: + indices.create: + index: test-events + body: + mappings: + properties: + events: + type: counted_keyword + + + - do: + index: + index: test-events + id: "1" + body: { "events": [ "a", "a", "b", "c" ] } + + - do: + index: + index: test-events + id: "2" + body: { "events": [ "a", "b", "b", "b", "c" ] } + + - do: + indices.refresh: { } + +--- +"Counted Terms agg": + + - do: + search: + index: test-events + body: + size: 0 + aggs: + event_terms: + counted_terms: + field: events + + - match: { aggregations.event_terms.buckets.0.key: "b" } + - match: { aggregations.event_terms.buckets.0.doc_count: 4 } + - match: { aggregations.event_terms.buckets.1.key: "a" } + - match: { aggregations.event_terms.buckets.1.doc_count: 3 } + - match: { aggregations.event_terms.buckets.2.key: "c" } + - match: { aggregations.event_terms.buckets.2.doc_count: 2 } + - length: { aggregations.event_terms.buckets: 3 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml index 03cbe665a0d5d..1fa675ff4284f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml @@ -695,3 +695,61 @@ setup: } } } + +--- +"Test put model config with prefix strings": + - do: + ml.put_trained_model: + model_id: model_with_prefixes + body: > + { + "model_type": "pytorch", + "inference_config": { + "text_embedding": { } + }, + "prefix_strings": { + "search": "this is a query", + "ingest": "this is a passage" + } + } + - match: { prefix_strings.search: "this is a query" } + - match: { prefix_strings.ingest: "this is a passage" } + + - do: + ml.get_trained_models: + model_id: model_with_prefixes + - match: { trained_model_configs.0.prefix_strings.search: "this is a query" } + - match: { trained_model_configs.0.prefix_strings.ingest: "this is a passage" } + + + - do: + ml.put_trained_model: + model_id: model_with_search_prefix + body: > + { + "model_type": "pytorch", + "inference_config": { + "text_embedding": { } + }, + "prefix_strings": { + "search": "this is a query" + } + } + - match: { prefix_strings.search: "this is a query" } + - is_false: prefix_strings.ingest + + - do: + ml.put_trained_model: + model_id: model_with_ingest_prefix + body: > + { + "model_type": "pytorch", + "inference_config": { + "text_embedding": { } + }, + "prefix_strings": { + "ingest": "this is a passage" + } + } + - is_false: prefix_strings.search + - match: { prefix_strings.ingest: "this is a passage" } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java index f892e8dc57bf1..4ac7d404c49ed 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java @@ -47,7 +47,7 @@ public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 3; + public static final int REGISTRY_VERSION = 4; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; @@ -55,6 +55,8 @@ public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { private final FeatureService featureService; private volatile boolean stackTemplateEnabled; + private static final Map ADDITIONAL_TEMPLATE_VARIABLES = Map.of("xpack.stack.template.deprecated", "true"); + // General mappings conventions for any data that ends up in a data stream public static final String DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "data-streams-mappings"; @@ -126,14 +128,14 @@ private void updateEnabledSetting(boolean newValue) { } private static final List LIFECYCLE_POLICY_CONFIGS = List.of( - new LifecyclePolicyConfig(LOGS_ILM_POLICY_NAME, "/logs@lifecycle.json"), - new LifecyclePolicyConfig(METRICS_ILM_POLICY_NAME, "/metrics@lifecycle.json"), - new LifecyclePolicyConfig(SYNTHETICS_ILM_POLICY_NAME, "/synthetics@lifecycle.json"), - new LifecyclePolicyConfig(ILM_7_DAYS_POLICY_NAME, "/7-days@lifecycle.json"), - new LifecyclePolicyConfig(ILM_30_DAYS_POLICY_NAME, "/30-days@lifecycle.json"), - new LifecyclePolicyConfig(ILM_90_DAYS_POLICY_NAME, "/90-days@lifecycle.json"), - new LifecyclePolicyConfig(ILM_180_DAYS_POLICY_NAME, "/180-days@lifecycle.json"), - new LifecyclePolicyConfig(ILM_365_DAYS_POLICY_NAME, "/365-days@lifecycle.json") + new LifecyclePolicyConfig(LOGS_ILM_POLICY_NAME, "/logs@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(METRICS_ILM_POLICY_NAME, "/metrics@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(SYNTHETICS_ILM_POLICY_NAME, "/synthetics@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(ILM_7_DAYS_POLICY_NAME, "/7-days@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(ILM_30_DAYS_POLICY_NAME, "/30-days@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(ILM_90_DAYS_POLICY_NAME, "/90-days@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(ILM_180_DAYS_POLICY_NAME, "/180-days@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(ILM_365_DAYS_POLICY_NAME, "/365-days@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES) ); @Override @@ -159,55 +161,64 @@ protected List getLifecyclePolicies() { DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/data-streams@mappings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( LOGS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/logs@mappings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( ECS_DYNAMIC_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/ecs@mappings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( LOGS_SETTINGS_COMPONENT_TEMPLATE_NAME, "/logs@settings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/metrics@mappings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( METRICS_SETTINGS_COMPONENT_TEMPLATE_NAME, "/metrics@settings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( METRICS_TSDB_SETTINGS_COMPONENT_TEMPLATE_NAME, "/metrics@tsdb-settings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/synthetics@mappings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME, "/synthetics@settings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ) )) { try { @@ -237,8 +248,22 @@ protected Map getComposableTemplateConfigs() { } private static final List INGEST_PIPELINE_CONFIGS = List.of( - new JsonIngestPipelineConfig("logs@json-message", "/logs@json-pipeline.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), - new JsonIngestPipelineConfig("logs-default-pipeline", "/logs@default-pipeline.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE) + new JsonIngestPipelineConfig( + "logs@json-message", + "/logs@json-pipeline.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + List.of(), + ADDITIONAL_TEMPLATE_VARIABLES + ), + new JsonIngestPipelineConfig( + "logs-default-pipeline", + "/logs@default-pipeline.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + List.of(), + ADDITIONAL_TEMPLATE_VARIABLES + ) ); @Override diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 354c0e9981921..36da14680c66a 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -43,7 +43,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 3; + public static final int REGISTRY_VERSION = 4; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; public static final Setting STACK_TEMPLATES_ENABLED = Setting.boolSetting( @@ -57,6 +57,8 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { private final FeatureService featureService; private volatile boolean stackTemplateEnabled; + private static final Map ADDITIONAL_TEMPLATE_VARIABLES = Map.of("xpack.stack.template.deprecated", "false"); + // General mappings conventions for any data that ends up in a data stream public static final String DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "data-streams@mappings"; @@ -136,14 +138,14 @@ private void updateEnabledSetting(boolean newValue) { } private static final List LIFECYCLE_POLICY_CONFIGS = List.of( - new LifecyclePolicyConfig(LOGS_ILM_POLICY_NAME, "/logs@lifecycle.json"), - new LifecyclePolicyConfig(METRICS_ILM_POLICY_NAME, "/metrics@lifecycle.json"), - new LifecyclePolicyConfig(SYNTHETICS_ILM_POLICY_NAME, "/synthetics@lifecycle.json"), - new LifecyclePolicyConfig(ILM_7_DAYS_POLICY_NAME, "/7-days@lifecycle.json"), - new LifecyclePolicyConfig(ILM_30_DAYS_POLICY_NAME, "/30-days@lifecycle.json"), - new LifecyclePolicyConfig(ILM_90_DAYS_POLICY_NAME, "/90-days@lifecycle.json"), - new LifecyclePolicyConfig(ILM_180_DAYS_POLICY_NAME, "/180-days@lifecycle.json"), - new LifecyclePolicyConfig(ILM_365_DAYS_POLICY_NAME, "/365-days@lifecycle.json") + new LifecyclePolicyConfig(LOGS_ILM_POLICY_NAME, "/logs@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(METRICS_ILM_POLICY_NAME, "/metrics@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(SYNTHETICS_ILM_POLICY_NAME, "/synthetics@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(ILM_7_DAYS_POLICY_NAME, "/7-days@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(ILM_30_DAYS_POLICY_NAME, "/30-days@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(ILM_90_DAYS_POLICY_NAME, "/90-days@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(ILM_180_DAYS_POLICY_NAME, "/180-days@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES), + new LifecyclePolicyConfig(ILM_365_DAYS_POLICY_NAME, "/365-days@lifecycle.json", ADDITIONAL_TEMPLATE_VARIABLES) ); @Override @@ -165,55 +167,64 @@ protected List getLifecyclePolicies() { DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/data-streams@mappings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( LOGS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/logs@mappings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( ECS_DYNAMIC_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/ecs@mappings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( LOGS_SETTINGS_COMPONENT_TEMPLATE_NAME, "/logs@settings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/metrics@mappings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( METRICS_SETTINGS_COMPONENT_TEMPLATE_NAME, "/metrics@settings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( METRICS_TSDB_SETTINGS_COMPONENT_TEMPLATE_NAME, "/metrics@tsdb-settings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/synthetics@mappings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ), new IndexTemplateConfig( SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME, "/synthetics@settings.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ) )) { try { @@ -234,14 +245,33 @@ protected Map getComponentTemplateConfigs() { } private static final Map COMPOSABLE_INDEX_TEMPLATE_CONFIGS = parseComposableTemplates( - new IndexTemplateConfig(LOGS_INDEX_TEMPLATE_NAME, "/logs@template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), - new IndexTemplateConfig(METRICS_INDEX_TEMPLATE_NAME, "/metrics@template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), - new IndexTemplateConfig(SYNTHETICS_INDEX_TEMPLATE_NAME, "/synthetics@template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), + new IndexTemplateConfig( + LOGS_INDEX_TEMPLATE_NAME, + "/logs@template.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES + ), + new IndexTemplateConfig( + METRICS_INDEX_TEMPLATE_NAME, + "/metrics@template.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES + ), + new IndexTemplateConfig( + SYNTHETICS_INDEX_TEMPLATE_NAME, + "/synthetics@template.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES + ), new IndexTemplateConfig( KIBANA_REPORTING_INDEX_TEMPLATE_NAME, "/kibana-reporting@template.json", REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ) ); @@ -255,8 +285,22 @@ protected Map getComposableTemplateConfigs() { } private static final List INGEST_PIPELINE_CONFIGS = List.of( - new JsonIngestPipelineConfig("logs@json-pipeline", "/logs@json-pipeline.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), - new JsonIngestPipelineConfig("logs@default-pipeline", "/logs@default-pipeline.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE) + new JsonIngestPipelineConfig( + "logs@json-pipeline", + "/logs@json-pipeline.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + List.of(), + ADDITIONAL_TEMPLATE_VARIABLES + ), + new JsonIngestPipelineConfig( + "logs@default-pipeline", + "/logs@default-pipeline.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + List.of(), + ADDITIONAL_TEMPLATE_VARIABLES + ) ); @Override diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java new file mode 100644 index 0000000000000..39f58e638aa68 --- /dev/null +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.stack; + +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.ingest.PipelineConfiguration; +import org.elasticsearch.test.ClusterServiceUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; +import org.junit.After; +import org.junit.Before; + +import java.util.List; + +public class LegacyStackTemplateRegistryTests extends ESTestCase { + private LegacyStackTemplateRegistry registry; + private ThreadPool threadPool; + + @Before + public void createRegistryAndClient() { + threadPool = new TestThreadPool(this.getClass().getName()); + Client client = new NoOpClient(threadPool); + ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); + var featureService = new FeatureService(List.of(new StackTemplatesFeatures())); + registry = new LegacyStackTemplateRegistry( + Settings.EMPTY, + clusterService, + threadPool, + client, + NamedXContentRegistry.EMPTY, + featureService + ); + } + + @After + @Override + public void tearDown() throws Exception { + super.tearDown(); + threadPool.shutdownNow(); + } + + public void testThatTemplatesAreDeprecated() { + for (ComposableIndexTemplate it : registry.getComposableTemplateConfigs().values()) { + assertTrue(it.isDeprecated()); + } + for (LifecyclePolicy ilm : registry.getLifecyclePolicies()) { + assertTrue(ilm.isDeprecated()); + } + for (ComponentTemplate ct : registry.getComponentTemplateConfigs().values()) { + assertTrue(ct.deprecated()); + } + registry.getIngestPipelines() + .stream() + .map(ipc -> new PipelineConfiguration(ipc.getId(), ipc.loadConfig(), XContentType.JSON)) + .map(PipelineConfiguration::getConfigAsMap) + .forEach(p -> assertTrue((Boolean) p.get("deprecated"))); + } + +} diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 4fbbd920bef6c..b6fd2e8dd1a53 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -520,6 +520,23 @@ public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { registry.clusterChanged(event); } + public void testThatTemplatesAreNotDeprecated() { + for (ComposableIndexTemplate it : registry.getComposableTemplateConfigs().values()) { + assertFalse(it.isDeprecated()); + } + for (LifecyclePolicy ilm : registry.getLifecyclePolicies()) { + assertFalse(ilm.isDeprecated()); + } + for (ComponentTemplate ct : registry.getComponentTemplateConfigs().values()) { + assertFalse(ct.deprecated()); + } + registry.getIngestPipelines() + .stream() + .map(ipc -> new PipelineConfiguration(ipc.getId(), ipc.loadConfig(), XContentType.JSON)) + .map(PipelineConfiguration::getConfigAsMap) + .forEach(p -> assertFalse((Boolean) p.get("deprecated"))); + } + // ------------- /** diff --git a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java index 04193ef270403..7b8355ec41e90 100644 --- a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java +++ b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java @@ -150,15 +150,15 @@ public boolean satisfiesAdditionalQuorumConstraints( } private static Predicate fullMasterWithSameState(long localAcceptedTerm, long localAcceptedVersion) { - return join -> isFullMasterNode(join.getVotingNode()) - && join.getLastAcceptedTerm() == localAcceptedTerm - && join.getLastAcceptedVersion() == localAcceptedVersion; + return join -> isFullMasterNode(join.votingNode()) + && join.lastAcceptedTerm() == localAcceptedTerm + && join.lastAcceptedVersion() == localAcceptedVersion; } private static Predicate fullMasterWithOlderState(long localAcceptedTerm, long localAcceptedVersion) { - return join -> isFullMasterNode(join.getVotingNode()) - && (join.getLastAcceptedTerm() < localAcceptedTerm - || (join.getLastAcceptedTerm() == localAcceptedTerm && join.getLastAcceptedVersion() < localAcceptedVersion)); + return join -> isFullMasterNode(join.votingNode()) + && (join.lastAcceptedTerm() < localAcceptedTerm + || (join.lastAcceptedTerm() == localAcceptedTerm && join.lastAcceptedVersion() < localAcceptedVersion)); } } diff --git a/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java b/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java index 479f9b3662c05..d983747571b34 100644 --- a/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java +++ b/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java @@ -153,16 +153,11 @@ private void setUpDataStreamWriteDocsAndRollover(String dataStreamName, Settings client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("my-template").indexTemplate( - new ComposableIndexTemplate( - List.of("logs-*"), - new Template(indexSettings, null, null), - null, - null, - null, - null, - new ComposableIndexTemplate.DataStreamTemplate(), - null - ) + ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs-*")) + .template(new Template(indexSettings, null, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build() ) ) );