Skip to content

Commit 59d0293

Browse files
authored
Merge branch 'main' into dependabot/gradle/plugins/repository-azure/com.nimbusds-nimbus-jose-jwt-10.0.2
Signed-off-by: gaobinlong <[email protected]>
2 parents 8a73280 + d9a9274 commit 59d0293

File tree

60 files changed

+977
-494
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+977
-494
lines changed

.ci/bwcVersions

-1
Original file line numberDiff line numberDiff line change
@@ -45,4 +45,3 @@ BWC_VERSION:
4545
- "2.19.0"
4646
- "2.19.1"
4747
- "2.19.2"
48-
- "2.20.0"

.github/workflows/changelog_verifier.yml

-37
Original file line numberDiff line numberDiff line change
@@ -14,42 +14,5 @@ jobs:
1414
token: ${{ secrets.GITHUB_TOKEN }}
1515
ref: ${{ github.event.pull_request.head.sha }}
1616
- uses: dangoslen/changelog-enforcer@v3
17-
id: verify-changelog-3x
1817
with:
1918
skipLabels: "autocut, skip-changelog"
20-
changeLogPath: 'CHANGELOG-3.0.md'
21-
continue-on-error: true
22-
- uses: dangoslen/changelog-enforcer@v3
23-
id: verify-changelog
24-
with:
25-
skipLabels: "autocut, skip-changelog"
26-
changeLogPath: 'CHANGELOG.md'
27-
continue-on-error: true
28-
- run: |
29-
# The check was possibly skipped leading to success for both the jobs
30-
has_backport_label=${{ contains(join(github.event.pull_request.labels.*.name, ', '), 'backport')}}
31-
has_breaking_label=${{ contains(join(github.event.pull_request.labels.*.name, ', '), '>breaking')}}
32-
if [[ $has_breaking_label == true && $has_backport_label == true ]]; then
33-
echo "error: Please make sure that the PR does not have a backport label associated with it when making breaking changes"
34-
exit 1
35-
fi
36-
37-
if [[ ${{ steps.verify-changelog-3x.outcome }} == 'success' && ${{ steps.verify-changelog.outcome }} == 'success' ]]; then
38-
exit 0
39-
fi
40-
41-
if [[ ${{ steps.verify-changelog-3x.outcome }} == 'failure' && ${{ steps.verify-changelog.outcome }} == 'failure' ]]; then
42-
echo "error: Please ensure a changelog entry exists in CHANGELOG.md or CHANGELOG-3.0.md"
43-
exit 1
44-
fi
45-
46-
# Concatenates the labels and checks if the string contains "backport"
47-
if [[ ${{ steps.verify-changelog.outcome }} == 'success' && $has_backport_label == false ]]; then
48-
echo "error: Please make sure that the PR has a backport label associated with it when making an entry to the CHANGELOG.md file"
49-
exit 1
50-
fi
51-
52-
if [[ ${{ steps.verify-changelog-3x.outcome }} == 'success' && $has_backport_label == true ]]; then
53-
echo "error: Please make sure that the PR does not have a backport label associated with it when making an entry to the CHANGELOG-3.0.md file"
54-
exit 1
55-
fi

CHANGELOG.md

+7-34
Original file line numberDiff line numberDiff line change
@@ -3,52 +3,25 @@ All notable changes to this project are documented in this file.
33

44
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). See the [CONTRIBUTING guide](./CONTRIBUTING.md#Changelog) for instructions on how to add changelog entries.
55

6-
## [Unreleased 2.x]
6+
## [Unreleased 3.x]
77
### Added
8-
- Latency and Memory allocation improvements to Multi Term Aggregation queries ([#14993](https://github.com/opensearch-project/OpenSearch/pull/14993))
9-
- Add logic in master service to optimize performance and retain detailed logging for critical cluster operations. ([#14795](https://github.com/opensearch-project/OpenSearch/pull/14795))
10-
- Add Setting to adjust the primary constraint weights ([#16471](https://github.com/opensearch-project/OpenSearch/pull/16471))
11-
- Introduce a setting to disable download of full cluster state from remote on term mismatch([#16798](https://github.com/opensearch-project/OpenSearch/pull/16798/))
12-
- Added ability to retrieve value from DocValues in a flat_object filed([#16802](https://github.com/opensearch-project/OpenSearch/pull/16802))
13-
- Improve performace of NumericTermAggregation by avoiding unnecessary sorting([#17252](https://github.com/opensearch-project/OpenSearch/pull/17252))
14-
- Fix Bug - Handle unsigned long in sorting order assertion of LongHashSet ([#17207](https://github.com/opensearch-project/OpenSearch/pull/17207))
15-
- Implemented computation of segment replication stats at shard level ([#17055](https://github.com/opensearch-project/OpenSearch/pull/17055))
16-
- [Rule Based Auto-tagging] Add in-memory attribute value store ([#17342](https://github.com/opensearch-project/OpenSearch/pull/17342))
8+
- Change priority for scheduling reroute during timeout([#16445](https://github.com/opensearch-project/OpenSearch/pull/16445))
9+
- Renaming the node role search to warm ([#17573](https://github.com/opensearch-project/OpenSearch/pull/17573))
10+
- Introduce a new search node role to hold search only shards ([#17620](https://github.com/opensearch-project/OpenSearch/pull/17620))
1711

1812
### Dependencies
19-
- Bump `org.awaitility:awaitility` from 4.2.0 to 4.3.0 ([#17230](https://github.com/opensearch-project/OpenSearch/pull/17230), [#17439](https://github.com/opensearch-project/OpenSearch/pull/17439))
20-
- Bump `dnsjava:dnsjava` from 3.6.2 to 3.6.3 ([#17231](https://github.com/opensearch-project/OpenSearch/pull/17231))
21-
- Bump `com.google.code.gson:gson` from 2.11.0 to 2.12.1 ([#17229](https://github.com/opensearch-project/OpenSearch/pull/17229))
22-
- Bump `org.jruby.joni:joni` from 2.2.1 to 2.2.3 ([#17136](https://github.com/opensearch-project/OpenSearch/pull/17136))
23-
- Bump `org.apache.ant:ant` from 1.10.14 to 1.10.15 ([#17288](https://github.com/opensearch-project/OpenSearch/pull/17288))
24-
- Bump `reactor_netty` from 1.1.26 to 1.1.27 ([#17322](https://github.com/opensearch-project/OpenSearch/pull/17322))
25-
- Bump `me.champeau.gradle.japicmp` from 0.4.5 to 0.4.6 ([#17375](https://github.com/opensearch-project/OpenSearch/pull/17375))
26-
- Bump `com.google.api.grpc:proto-google-common-protos` from 2.37.1 to 2.52.0 ([#17379](https://github.com/opensearch-project/OpenSearch/pull/17379))
27-
- Bump `net.minidev:json-smart` from 2.5.1 to 2.5.2 ([#17378](https://github.com/opensearch-project/OpenSearch/pull/17378))
28-
- Bump `com.netflix.nebula.ospackage-base` from 11.10.1 to 11.11.1 ([#17374](https://github.com/opensearch-project/OpenSearch/pull/17374))
29-
- Bump `ch.qos.logback:logback-classic` from 1.5.16 to 1.5.17 ([#17497](https://github.com/opensearch-project/OpenSearch/pull/17497))
30-
- Bump `software.amazon.awssdk` from 2.20.86 to 2.30.31 ([17396](https://github.com/opensearch-project/OpenSearch/pull/17396))
31-
- Bump `org.jruby.jcodings:jcodings` from 1.0.61 to 1.0.63 ([#17560](https://github.com/opensearch-project/OpenSearch/pull/17560))
32-
- Bump `com.azure:azure-storage-blob` from 12.28.1 to 12.29.1 ([#17562](https://github.com/opensearch-project/OpenSearch/pull/17562))
3313
- Bump `com.nimbusds:nimbus-jose-jwt` from 9.41.1 to 10.0.2 ([#17607](https://github.com/opensearch-project/OpenSearch/pull/17607))
14+
- Bump `ch.qos.logback:logback-core` from 1.5.16 to 1.5.17 ([#17609](https://github.com/opensearch-project/OpenSearch/pull/17609))
15+
- Bump `org.jruby.joni:joni` from 2.2.3 to 2.2.5 ([#17608](https://github.com/opensearch-project/OpenSearch/pull/17608))
3416

3517
### Changed
36-
- Convert transport-reactor-netty4 to use gradle version catalog [#17233](https://github.com/opensearch-project/OpenSearch/pull/17233)
37-
- Increase force merge threads to 1/8th of cores [#17255](https://github.com/opensearch-project/OpenSearch/pull/17255)
38-
- TieredSpilloverCache took-time threshold now guards heap tier as well as disk tier [#17190](https://github.com/opensearch-project/OpenSearch/pull/17190)
3918

4019
### Deprecated
4120

4221
### Removed
4322

4423
### Fixed
45-
- Fix visit of inner query for FunctionScoreQueryBuilder ([#16776](https://github.com/opensearch-project/OpenSearch/pull/16776))
46-
- Fix case insensitive and escaped query on wildcard ([#16827](https://github.com/opensearch-project/OpenSearch/pull/16827))
47-
- Fix exists queries on nested flat_object fields throws exception ([#16803](https://github.com/opensearch-project/OpenSearch/pull/16803))
48-
- Add highlighting for wildcard search on `match_only_text` field ([#17101](https://github.com/opensearch-project/OpenSearch/pull/17101))
49-
- Fix illegal argument exception when creating a PIT ([#16781](https://github.com/opensearch-project/OpenSearch/pull/16781))
50-
- Fix NPE in node stats due to QueryGroupTasks ([#17576](https://github.com/opensearch-project/OpenSearch/pull/17576))
5124

5225
### Security
5326

54-
[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.19...2.x
27+
[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/f58d846f...main

CONTRIBUTING.md

-9
Original file line numberDiff line numberDiff line change
@@ -146,15 +146,6 @@ Adding in the change is two step process:
146146
1. Add your changes to the corresponding section within the CHANGELOG file with dummy pull request information, publish the PR
147147
2. Update the entry for your change in [`CHANGELOG.md`](CHANGELOG.md) and make sure that you reference the pull request there.
148148

149-
### Where should I put my CHANGELOG entry?
150-
Please review the [branching strategy](https://github.com/opensearch-project/.github/blob/main/RELEASING.md#opensearch-branching) document. The changelog on the `main` branch will contain **two files**: `CHANGELOG.md` which corresponds to unreleased changes intended for the _next minor_ release and `CHANGELOG-3.0.md` which correspond to unreleased changes intended for the _next major_ release. Your entry should go into file corresponding to the version it is intended to be released in. In practice, most changes to `main` will be backported to the next minor release so most entries will be in the `CHANGELOG.md` file.
151-
152-
The following examples assume the _next major_ release on main is 3.0, then _next minor_ release is 2.5, and the _current_ release is 2.4.
153-
154-
- **Add a new feature to release in next minor:** Add a changelog entry to `[Unreleased 2.x]` in CHANGELOG.md on main, then backport to 2.x (including the changelog entry).
155-
- **Introduce a breaking API change to release in next major:** Add a changelog entry to `[Unreleased 3.0]` to CHANGELOG-3.0.md on main, do not backport.
156-
- **Upgrade a dependency to fix a CVE:** Add a changelog entry to `[Unreleased 2.x]` on main, then backport to 2.x (including the changelog entry), then backport to 2.4 and ensure the changelog entry is added to `[Unreleased 2.4.1]`.
157-
158149
## Review Process
159150

160151
We deeply appreciate everyone who takes the time to make a contribution. We will review all contributions as quickly as possible. As a reminder, [opening an issue](https://github.com/opensearch-project/OpenSearch/issues/new/choose) discussing your change before you make it is the best way to smooth the PR process. This will prevent a rejection because someone else is already working on the problem, or because the solution is incompatible with the architectural direction.

libs/core/src/main/java/org/opensearch/Version.java

-1
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,6 @@ public class Version implements Comparable<Version>, ToXContentFragment {
116116
public static final Version V_2_19_0 = new Version(2190099, org.apache.lucene.util.Version.LUCENE_9_12_1);
117117
public static final Version V_2_19_1 = new Version(2190199, org.apache.lucene.util.Version.LUCENE_9_12_1);
118118
public static final Version V_2_19_2 = new Version(2190299, org.apache.lucene.util.Version.LUCENE_9_12_1);
119-
public static final Version V_2_20_0 = new Version(2200099, org.apache.lucene.util.Version.LUCENE_9_12_1);
120119
public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_10_1_0);
121120
public static final Version CURRENT = V_3_0_0;
122121

libs/grok/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
*/
3030

3131
dependencies {
32-
api 'org.jruby.joni:joni:2.2.3'
32+
api 'org.jruby.joni:joni:2.2.5'
3333
// joni dependencies:
3434
api 'org.jruby.jcodings:jcodings:1.0.63'
3535

libs/grok/licenses/joni-2.2.3.jar.sha1

-1
This file was deleted.
+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
4ebafe67efa7395678a34d07e7585bed5ef0cc72

plugins/ingestion-kafka/src/internalClusterTest/java/org/opensearch/plugin/kafka/IngestFromKafkaIT.java

+12-18
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import org.opensearch.indices.pollingingest.PollingIngestStats;
2020
import org.opensearch.plugins.PluginInfo;
2121
import org.opensearch.test.OpenSearchIntegTestCase;
22+
import org.opensearch.transport.client.Requests;
2223
import org.junit.Assert;
2324

2425
import java.util.List;
@@ -56,27 +57,14 @@ public void testPluginsAreInstalled() {
5657
public void testKafkaIngestion() {
5758
produceData("1", "name1", "24");
5859
produceData("2", "name2", "20");
59-
60-
createIndex(
61-
"test",
62-
Settings.builder()
63-
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
64-
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
65-
.put("ingestion_source.type", "kafka")
66-
.put("ingestion_source.pointer.init.reset", "earliest")
67-
.put("ingestion_source.param.topic", "test")
68-
.put("ingestion_source.param.bootstrap_servers", kafka.getBootstrapServers())
69-
.put("index.replication.type", "SEGMENT")
70-
.build(),
71-
"{\"properties\":{\"name\":{\"type\": \"text\"},\"age\":{\"type\": \"integer\"}}}}"
72-
);
60+
createIndexWithDefaultSettings(1, 0);
7361

7462
RangeQueryBuilder query = new RangeQueryBuilder("age").gte(21);
7563
await().atMost(10, TimeUnit.SECONDS).untilAsserted(() -> {
76-
refresh("test");
77-
SearchResponse response = client().prepareSearch("test").setQuery(query).get();
64+
refresh(indexName);
65+
SearchResponse response = client().prepareSearch(indexName).setQuery(query).get();
7866
assertThat(response.getHits().getTotalHits().value(), is(1L));
79-
PollingIngestStats stats = client().admin().indices().prepareStats("test").get().getIndex("test").getShards()[0]
67+
PollingIngestStats stats = client().admin().indices().prepareStats(indexName).get().getIndex(indexName).getShards()[0]
8068
.getPollingIngestStats();
8169
assertNotNull(stats);
8270
assertThat(stats.getMessageProcessorStats().getTotalProcessedCount(), is(2L));
@@ -135,10 +123,16 @@ public void testKafkaIngestion_RewindByOffset() {
135123
);
136124

137125
RangeQueryBuilder query = new RangeQueryBuilder("age").gte(0);
138-
await().atMost(10, TimeUnit.SECONDS).untilAsserted(() -> {
126+
await().atMost(1, TimeUnit.MINUTES).untilAsserted(() -> {
139127
refresh("test_rewind_by_offset");
140128
SearchResponse response = client().prepareSearch("test_rewind_by_offset").setQuery(query).get();
141129
assertThat(response.getHits().getTotalHits().value(), is(1L));
142130
});
143131
}
132+
133+
public void testCloseIndex() throws Exception {
134+
createIndexWithDefaultSettings(1, 0);
135+
ensureGreen(indexName);
136+
client().admin().indices().close(Requests.closeIndexRequest(indexName)).get();
137+
}
144138
}

plugins/ingestion-kafka/src/internalClusterTest/java/org/opensearch/plugin/kafka/KafkaIngestionBaseIT.java

+35-1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@
1515
import org.apache.kafka.clients.producer.ProducerRecord;
1616
import org.apache.kafka.common.serialization.StringSerializer;
1717
import org.opensearch.action.search.SearchResponse;
18+
import org.opensearch.cluster.metadata.IndexMetadata;
19+
import org.opensearch.common.settings.Settings;
1820
import org.opensearch.plugins.Plugin;
1921
import org.opensearch.test.OpenSearchIntegTestCase;
2022
import org.junit.After;
@@ -25,13 +27,14 @@
2527
import java.util.List;
2628
import java.util.Locale;
2729
import java.util.Properties;
30+
import java.util.concurrent.Callable;
2831
import java.util.concurrent.TimeUnit;
2932

3033
import org.testcontainers.containers.KafkaContainer;
3134
import org.testcontainers.utility.DockerImageName;
3235

3336
/**
34-
* Base test class for Kafka ingestion tests
37+
* Base test class for Kafka ingestion tests.
3538
*/
3639
@ThreadLeakFilters(filters = TestContainerThreadLeakFilter.class)
3740
public class KafkaIngestionBaseIT extends OpenSearchIntegTestCase {
@@ -108,4 +111,35 @@ protected void waitForSearchableDocs(long docCount, List<String> nodes) throws E
108111
}
109112
}, 1, TimeUnit.MINUTES);
110113
}
114+
115+
protected void waitForState(Callable<Boolean> checkState) throws Exception {
116+
assertBusy(() -> {
117+
if (checkState.call() == false) {
118+
fail("Provided state requirements not met");
119+
}
120+
}, 1, TimeUnit.MINUTES);
121+
}
122+
123+
protected String getSettings(String indexName, String setting) {
124+
return client().admin().indices().prepareGetSettings(indexName).get().getSetting(indexName, setting);
125+
}
126+
127+
protected void createIndexWithDefaultSettings(int numShards, int numReplicas) {
128+
createIndex(
129+
indexName,
130+
Settings.builder()
131+
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards)
132+
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas)
133+
.put("ingestion_source.type", "kafka")
134+
.put("ingestion_source.pointer.init.reset", "earliest")
135+
.put("ingestion_source.param.topic", topicName)
136+
.put("ingestion_source.param.bootstrap_servers", kafka.getBootstrapServers())
137+
.put("index.replication.type", "SEGMENT")
138+
// set custom kafka consumer properties
139+
.put("ingestion_source.param.fetch.min.bytes", 30000)
140+
.put("ingestion_source.param.enable.auto.commit", false)
141+
.build(),
142+
"{\"properties\":{\"name\":{\"type\": \"text\"},\"age\":{\"type\": \"integer\"}}}}"
143+
);
144+
}
111145
}

0 commit comments

Comments
 (0)