Skip to content

Commit

Permalink
Merge branch 'main' into allowing-update-of-non-dynamic-settings
Browse files Browse the repository at this point in the history
  • Loading branch information
masseyke committed Nov 9, 2023
2 parents 237eea2 + 9fdc67c commit 5f964de
Show file tree
Hide file tree
Showing 57 changed files with 2,069 additions and 522 deletions.
29 changes: 29 additions & 0 deletions docs/changelog/101333.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
pr: 101333
summary: Fixed JWT principal from claims
area: Authorization
type: breaking
issues: []
breaking:
title: Fixed JWT principal from claims
area: Authorization
details: "This changes the format of a JWT's principal before the JWT is actually\
\ validated by any JWT realm. The JWT's principal is a convenient way to refer\
\ to a JWT that has not yet been verified by a JWT realm. The JWT's principal\
\ is printed in the audit and regular logs (notably for auditing authn failures)\
\ as well as the smart realm chain reordering optimization. The JWT principal\
\ is NOT required to be identical to the JWT-authenticated user's principal, but\
\ in general, they should be similar. Previously, the JWT's principal was built\
\ by individual realms in the same way the realms built the authenticated user's\
\ principal. This had the advantage that, in simpler JWT realms configurations\
\ (e.g. a single JWT realm in the chain), the JWT principal and the authenticated\
\ user's principal are very similar. However the drawback is that, in general,\
\ the JWT principal and the user principal can be very different (i.e. in the\
\ case where one JWT realm builds the JWT principal and a different one builds\
\ the user principal). Another downside is that the (unauthenticated) JWT principal\
\ depended on realm ordering, which makes identifying the JWT from its principal\
\ dependent on the ES authn realm configuration. This PR implements a consistent\
\ fixed logic to build the JWT principal, which only depends on the JWT's claims\
\ and no ES configuration."
impact: "Users will observe changed format and values for the `user.name` attribute\
\ of `authentication_failed` audit log events, in the JWT (failed) authn case."
notable: false
6 changes: 6 additions & 0 deletions docs/changelog/101907.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 101907
summary: Fail listener on exception in `TcpTransport#openConnection`
area: Network
type: bug
issues:
- 100510
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ POST /_search
--------------------------------------------------
// TEST[setup:sales]

<1> A `date_histogram` named "my_date_histo" is constructed on the "timestamp" field, with one-day intervals
<1> A `date_histogram` named "my_date_histo" is constructed on the "timestamp" field, with one-month intervals
<2> A `sum` metric is used to calculate the sum of a field. This could be any numeric metric (sum, min, max, etc)
<3> Finally, we specify a `moving_fn` aggregation which uses "the_sum" metric as its input.

Expand Down
9 changes: 9 additions & 0 deletions docs/reference/esql/esql-limitations.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,15 @@ now() - 2023-10-26

include::esql-enrich-data.asciidoc[tag=limitations]

[discrete]
[[esql-limitations-mv]]
=== Multivalue limitations

{esql} <<esql-multivalued-fields,supports multivalued fields>>, but functions
return `null` when applied to a multivalued field, unless documented otherwise.
Work around this limitation by converting the field to single value with one of
the <<esql-mv-functions,multivalue functions>>.

[discrete]
[[esql-limitations-kibana]]
=== Kibana limitations
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@

package org.elasticsearch.script.mustache;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchResponse;
Expand All @@ -31,6 +34,8 @@

public class TransportMultiSearchTemplateAction extends HandledTransportAction<MultiSearchTemplateRequest, MultiSearchTemplateResponse> {

private static final Logger logger = LogManager.getLogger(TransportMultiSearchTemplateAction.class);

private final ScriptService scriptService;
private final NamedXContentRegistry xContentRegistry;
private final NodeClient client;
Expand Down Expand Up @@ -76,6 +81,9 @@ protected void doExecute(Task task, MultiSearchTemplateRequest request, ActionLi
searchRequest = convert(searchTemplateRequest, searchTemplateResponse, scriptService, xContentRegistry, searchUsageHolder);
} catch (Exception e) {
items[i] = new MultiSearchTemplateResponse.Item(null, e);
if (ExceptionsHelper.status(e).getStatus() >= 500 && ExceptionsHelper.isNodeOrShardUnavailableTypeException(e) == false) {
logger.warn("MultiSearchTemplate convert failure", e);
}
continue;
}
items[i] = new MultiSearchTemplateResponse.Item(searchTemplateResponse, null);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -806,8 +806,10 @@ public void testHasChildInnerHitsHighlighting() throws Exception {
assertThat(response.getHits().getHits()[0].getId(), equalTo("1"));
SearchHit[] searchHits = response.getHits().getHits()[0].getInnerHits().get("child").getHits();
assertThat(searchHits.length, equalTo(1));
assertThat(searchHits[0].getHighlightFields().get("c_field").getFragments().length, equalTo(1));
assertThat(searchHits[0].getHighlightFields().get("c_field").getFragments()[0].string(), equalTo("foo <em>bar</em>"));
HighlightField highlightField1 = searchHits[0].getHighlightFields().get("c_field");
assertThat(highlightField1.fragments().length, equalTo(1));
HighlightField highlightField = searchHits[0].getHighlightFields().get("c_field");
assertThat(highlightField.fragments()[0].string(), equalTo("foo <em>bar</em>"));
}
);
}
Expand Down Expand Up @@ -1786,7 +1788,7 @@ public void testHighlightersIgnoreParentChild() throws IOException {
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("parent-id"));
HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("searchText");
assertThat(highlightField.getFragments()[0].string(), equalTo("quick brown <em>fox</em>"));
assertThat(highlightField.fragments()[0].string(), equalTo("quick brown <em>fox</em>"));
}
);

Expand All @@ -1799,7 +1801,7 @@ public void testHighlightersIgnoreParentChild() throws IOException {
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("child-id"));
HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("searchText");
assertThat(highlightField.getFragments()[0].string(), equalTo("quick brown <em>fox</em>"));
assertThat(highlightField.fragments()[0].string(), equalTo("quick brown <em>fox</em>"));
}
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
Expand Down Expand Up @@ -183,10 +184,8 @@ public void testSimpleParentChild() throws Exception {
response -> {
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(
innerHits.getAt(0).getHighlightFields().get("message").getFragments()[0].string(),
equalTo("<em>fox</em> eat quick")
);
HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("message");
assertThat(highlightField.fragments()[0].string(), equalTo("<em>fox</em> eat quick"));
assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(message:fox"));
assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("fox eat quick"));
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ protected Analyzer wrapAnalyzer(Analyzer analyzer, Integer maxAnalyzedOffset) {
}

@Override
protected PassageFormatter getPassageFormatter(HitContext hitContext, SearchHighlightContext.Field field, Encoder encoder) {
protected PassageFormatter getPassageFormatter(SearchHighlightContext.Field field, Encoder encoder) {
return new AnnotatedPassageFormatter(encoder);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,6 @@ public void testInvalidShardCountSettingsWithoutPrefix() throws Exception {
}
}

@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/96578")
public void testCreateAndDeleteIndexConcurrently() throws InterruptedException {
createIndex("test");
final AtomicInteger indexVersion = new AtomicInteger(0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,6 @@ private void assertEngineTypes() {
}
}

@AwaitsFix(bugUrl = "ES-4677")
public void testRelocation() {
var routingTableWatcher = new RoutingTableWatcher();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -630,8 +630,8 @@ public void testFetchFeatures() {

SearchHit hit = hits.getAt(0);
HighlightField highlightField = hit.getHighlightFields().get("text");
assertThat(highlightField.getFragments().length, equalTo(1));
assertThat(highlightField.getFragments()[0].string(), equalTo("some <em>text</em> to entertain"));
assertThat(highlightField.fragments().length, equalTo(1));
assertThat(highlightField.fragments()[0].string(), equalTo("some <em>text</em> to entertain"));

Explanation explanation = hit.getExplanation();
assertThat(explanation.toString(), containsString("text:text"));
Expand Down Expand Up @@ -903,8 +903,8 @@ public void testNestedFetchFeatures() {
assertThat(searchHit.getNestedIdentity().getOffset(), equalTo(0));

HighlightField highlightField = searchHit.getHighlightFields().get("comments.message");
assertThat(highlightField.getFragments().length, equalTo(1));
assertThat(highlightField.getFragments()[0].string(), equalTo("some <em>comment</em>"));
assertThat(highlightField.fragments().length, equalTo(1));
assertThat(highlightField.fragments()[0].string(), equalTo("some <em>comment</em>"));

// Can't explain nested hit with the main query, since both are in a different scopes, also the nested doc may not
// even have matched with the main query.
Expand Down Expand Up @@ -961,8 +961,8 @@ public void testTopHitsInNested() throws Exception {
assertThat(extractValue("id", searchHits.getAt(j).getSourceAsMap()), equalTo(0));

HighlightField highlightField = searchHits.getAt(j).getHighlightFields().get("comments.message");
assertThat(highlightField.getFragments().length, equalTo(1));
assertThat(highlightField.getFragments()[0].string(), equalTo("some <em>text</em>"));
assertThat(highlightField.fragments().length, equalTo(1));
assertThat(highlightField.fragments()[0].string(), equalTo("some <em>text</em>"));
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
Expand Down Expand Up @@ -193,10 +194,8 @@ public void testSimpleNested() throws Exception {
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
assertThat(innerHits.getTotalHits().value, equalTo(2L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(
innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(),
equalTo("<em>fox</em> eat quick")
);
HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("comments.message");
assertThat(highlightField.fragments()[0].string(), equalTo("<em>fox</em> eat quick"));
assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(comments.message:fox in"));
assertThat(
innerHits.getAt(0).getFields().get("comments").getValue(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3264,10 +3264,8 @@ public void testKeywordFieldHighlighting() throws IOException {
).get();
assertNoFailures(search);
assertThat(search.getHits().getTotalHits().value, equalTo(1L));
assertThat(
search.getHits().getAt(0).getHighlightFields().get("keyword_field").getFragments()[0].string(),
equalTo("<em>some text</em>")
);
HighlightField highlightField = search.getHits().getAt(0).getHighlightFields().get("keyword_field");
assertThat(highlightField.fragments()[0].string(), equalTo("<em>some text</em>"));
}

public void testCopyToFields() throws Exception {
Expand All @@ -3294,8 +3292,8 @@ public void testCopyToFields() throws Exception {

assertHitCount(response, 1);
HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo_copy");
assertThat(field.getFragments().length, equalTo(1));
assertThat(field.getFragments()[0].string(), equalTo("how now <em>brown</em> cow"));
assertThat(field.fragments().length, equalTo(1));
assertThat(field.fragments()[0].string(), equalTo("how now <em>brown</em> cow"));
}

public void testACopyFieldWithNestedQuery() throws Exception {
Expand Down Expand Up @@ -3343,9 +3341,9 @@ public void testACopyFieldWithNestedQuery() throws Exception {
.get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo_text");
assertThat(field.getFragments().length, equalTo(2));
assertThat(field.getFragments()[0].string(), equalTo("<em>brown</em>"));
assertThat(field.getFragments()[1].string(), equalTo("<em>cow</em>"));
assertThat(field.fragments().length, equalTo(2));
assertThat(field.fragments()[0].string(), equalTo("<em>brown</em>"));
assertThat(field.fragments()[1].string(), equalTo("<em>cow</em>"));
}

public void testFunctionScoreQueryHighlight() throws Exception {
Expand All @@ -3360,8 +3358,8 @@ public void testFunctionScoreQueryHighlight() throws Exception {
.get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text");
assertThat(field.getFragments().length, equalTo(1));
assertThat(field.getFragments()[0].string(), equalTo("<em>brown</em>"));
assertThat(field.fragments().length, equalTo(1));
assertThat(field.fragments()[0].string(), equalTo("<em>brown</em>"));
}

public void testFiltersFunctionScoreQueryHighlight() throws Exception {
Expand All @@ -3383,8 +3381,8 @@ public void testFiltersFunctionScoreQueryHighlight() throws Exception {
).highlighter(new HighlightBuilder().field(new Field("text"))).get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text");
assertThat(field.getFragments().length, equalTo(1));
assertThat(field.getFragments()[0].string(), equalTo("<em>brown</em>"));
assertThat(field.fragments().length, equalTo(1));
assertThat(field.fragments()[0].string(), equalTo("<em>brown</em>"));
}

public void testHighlightQueryRewriteDatesWithNow() throws Exception {
Expand Down Expand Up @@ -3465,33 +3463,33 @@ public void testWithNestedQuery() throws Exception {
).highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))).get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text");
assertThat(field.getFragments().length, equalTo(2));
assertThat(field.getFragments()[0].string(), equalTo("<em>brown</em> shoes"));
assertThat(field.getFragments()[1].string(), equalTo("<em>cow</em>"));
assertThat(field.fragments().length, equalTo(2));
assertThat(field.fragments()[0].string(), equalTo("<em>brown</em> shoes"));
assertThat(field.fragments()[1].string(), equalTo("<em>cow</em>"));

searchResponse = prepareSearch().setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None))
.highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type)))
.get();
assertHitCount(searchResponse, 1);
field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text");
assertThat(field.getFragments().length, equalTo(1));
assertThat(field.getFragments()[0].string(), equalTo("<em>brown</em> shoes"));
assertThat(field.fragments().length, equalTo(1));
assertThat(field.fragments()[0].string(), equalTo("<em>brown</em> shoes"));

searchResponse = prepareSearch().setQuery(nestedQuery("foo", matchPhraseQuery("foo.text", "brown shoes"), ScoreMode.None))
.highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type)))
.get();
assertHitCount(searchResponse, 1);
field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text");
assertThat(field.getFragments().length, equalTo(1));
assertThat(field.getFragments()[0].string(), equalTo("<em>brown</em> <em>shoes</em>"));
assertThat(field.fragments().length, equalTo(1));
assertThat(field.fragments()[0].string(), equalTo("<em>brown</em> <em>shoes</em>"));

searchResponse = prepareSearch().setQuery(nestedQuery("foo", matchPhrasePrefixQuery("foo.text", "bro"), ScoreMode.None))
.highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type)))
.get();
assertHitCount(searchResponse, 1);
field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text");
assertThat(field.getFragments().length, equalTo(1));
assertThat(field.getFragments()[0].string(), equalTo("<em>brown</em> shoes"));
assertThat(field.fragments().length, equalTo(1));
assertThat(field.fragments()[0].string(), equalTo("<em>brown</em> shoes"));
}

// For unified and fvh highlighters we just check that the nested query is correctly extracted
Expand All @@ -3503,8 +3501,8 @@ public void testWithNestedQuery() throws Exception {
.get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text");
assertThat(field.getFragments().length, equalTo(1));
assertThat(field.getFragments()[0].string(), equalTo("<em>brown</em>"));
assertThat(field.fragments().length, equalTo(1));
assertThat(field.fragments()[0].string(), equalTo("<em>brown</em>"));
}
}

Expand All @@ -3526,8 +3524,8 @@ public void testWithNormalizer() throws Exception {
.get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("keyword");
assertThat(field.getFragments().length, equalTo(1));
assertThat(field.getFragments()[0].string(), equalTo("<em>hello world</em>"));
assertThat(field.fragments().length, equalTo(1));
assertThat(field.fragments()[0].string(), equalTo("<em>hello world</em>"));
}
}

Expand Down
Loading

0 comments on commit 5f964de

Please sign in to comment.