diff --git a/.asf.yaml b/.asf.yaml
index 57999445552..9aa6153333c 100644
--- a/.asf.yaml
+++ b/.asf.yaml
@@ -29,6 +29,7 @@ github:
branch_9_5: {}
branch_9_6: {}
branch_9_7: {}
+ branch_9_8: {}
branch_9x: {}
protected_tags:
diff --git a/.github/labeler.yml b/.github/labeler.yml
index 793957db16d..a3b7d5fe64c 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -2,7 +2,8 @@
dependencies:
- changed-files:
- any-glob-to-any-file:
- - versions.props
+ - gradle/libs.versions.toml # Solr 10+
+ - versions.props # Solr < v10
- versions.lock
- solr/licenses/**
@@ -122,11 +123,6 @@ module:gcs-repository:
- any-glob-to-any-file:
- solr/modules/gcs-repository/**
-module:hadoop-auth:
- - changed-files:
- - any-glob-to-any-file:
- - solr/modules/hadoop-auth/**
-
module:hdfs:
- changed-files:
- any-glob-to-any-file:
diff --git a/.github/renovate.json b/.github/renovate.json
index 5cf53a4c5fd..94220882331 100644
--- a/.github/renovate.json
+++ b/.github/renovate.json
@@ -4,9 +4,9 @@
"enabled": true,
"dependencyDashboard": false,
"enabledManagers": ["gradle", "github-actions"],
- "includePaths": ["versions.*", "build.gradle", ".github/workflows/*"],
+ "includePaths": ["gradle/libs.versions.toml", "versions.*", "build.gradle", ".github/workflows/*"],
"postUpgradeTasks": {
- "commands": ["./gradlew updateLicenses"],
+ "commands": ["./gradlew writeLocks", "./gradlew updateLicenses"],
"fileFilters": ["solr/licenses/*.sha1"],
"executionMode": "branch"
},
diff --git a/.github/workflows/bin-solr-test.yml b/.github/workflows/bin-solr-test.yml
index a0a33ccc51a..126e8d62293 100644
--- a/.github/workflows/bin-solr-test.yml
+++ b/.github/workflows/bin-solr-test.yml
@@ -24,11 +24,11 @@ jobs:
steps:
# Setup
- uses: actions/checkout@v4
- - name: Set up JDK 11
+ - name: Set up JDK
uses: actions/setup-java@v4
with:
distribution: 'temurin'
- java-version: 11
+ java-version: 21
java-package: jdk
- name: Setup Gradle
uses: gradle/actions/setup-gradle@v4
diff --git a/.github/workflows/docker-test.yml b/.github/workflows/docker-test.yml
index 0b00a6ab308..d8593613ca6 100644
--- a/.github/workflows/docker-test.yml
+++ b/.github/workflows/docker-test.yml
@@ -26,11 +26,11 @@ jobs:
steps:
# Setup
- uses: actions/checkout@v4
- - name: Set up JDK 11
+ - name: Set up JDK 21
uses: actions/setup-java@v4
with:
distribution: 'temurin'
- java-version: 11
+ java-version: 21
java-package: jdk
- name: Setup Gradle
uses: gradle/actions/setup-gradle@v4
diff --git a/.github/workflows/gradle-precommit.yml b/.github/workflows/gradle-precommit.yml
index dcc55ead323..67c1506ad0a 100644
--- a/.github/workflows/gradle-precommit.yml
+++ b/.github/workflows/gradle-precommit.yml
@@ -8,7 +8,7 @@ on:
jobs:
test:
- name: gradle check w/ Java 11
+ name: gradle check
runs-on: ubuntu-latest
@@ -19,11 +19,11 @@ jobs:
# Setup
- uses: actions/checkout@v4
- - name: Set up JDK 11
+ - name: Set up JDK
uses: actions/setup-java@v4
with:
distribution: 'temurin'
- java-version: 11
+ java-version: 21
java-package: jdk
- name: Setup Gradle
diff --git a/.github/workflows/solrj-test.yml b/.github/workflows/solrj-test.yml
index 1a0f6bfebde..3eb9bb4f5f6 100644
--- a/.github/workflows/solrj-test.yml
+++ b/.github/workflows/solrj-test.yml
@@ -21,11 +21,11 @@ jobs:
steps:
# Setup
- uses: actions/checkout@v4
- - name: Set up JDK 11
+ - name: Set up JDK 21
uses: actions/setup-java@v4
with:
distribution: 'temurin'
- java-version: 11
+ java-version: 21
java-package: jdk
- name: Setup Gradle
uses: gradle/actions/setup-gradle@v4
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index f8e30632059..8580134b58b 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -24,14 +24,22 @@ jobs:
days-before-pr-stale: 60
days-before-issue-stale: -1 # we don't use issues
- days-before-close: -1 # don't close stale PRs/issues
+ days-before-close: 60 # Close PRs marked as stale after 60 days
exempt-draft-pr: true # don't mark draft PRs as stale
+ exempt-pr-labels: "exempt-stale" # don't mark PRs with these labels as stale
stale-pr-label: "stale" # label to use when marking as stale
+ close-pr-label: "closed-stale" # label to use when closing a stale PR
stale-pr-message: >
This PR has had no activity for 60 days and is now labeled as stale.
- Any new activity or converting it to draft will remove the stale label.
- To attract more reviewers, please tag people who might be familiar with the code area and/or notify the dev@solr.apache.org mailing list.
+ Any new activity will remove the stale label.
+ To attract more reviewers, please tag people who might be familiar with the code area and/or notify the dev@solr.apache.org mailing list.
+ To exempt this PR from being marked as stale, make it a draft PR or add the label "exempt-stale".
+ If left unattended, this PR will be closed after another 60 days of inactivity.
Thank you for your contribution!
+ close-pr-message: >
+ This PR is now closed due to 60 days of inactivity after being marked as stale.
+ Re-opening this PR is still possible, in which case it will be marked as active again.
+
operations-per-run: 100 # operations budget
diff --git a/NOTICE.txt b/NOTICE.txt
index 49724175b40..ce7c667fc29 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -1,6 +1,6 @@
==============================================================
Apache Solr
- Copyright 2006-2024 The Apache Software Foundation
+ Copyright 2006-2025 The Apache Software Foundation
==============================================================
This product includes software developed at
diff --git a/README.md b/README.md
index 920a4fa3005..24d419bd472 100644
--- a/README.md
+++ b/README.md
@@ -22,7 +22,7 @@ Solr is the blazing-fast, open source, multi-modal search platform built on [Apa
It powers full-text, vector, and geospatial search at many of the world's largest organizations.
[![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Artifacts-main/badge/icon?subject=Solr%20Artifacts)](https://ci-builds.apache.org/job/Solr/job/Solr-Artifacts-main/)
-[![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Check-main/badge/icon?subject=Solr%20Check)](https://ci-builds.apache.org/job/Solr/job/Solr-Check-main/)
+[![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Lint-main/badge/icon?subject=Solr%20Lint)](https://ci-builds.apache.org/job/Solr/job/Solr-Lint-main/)
For a complete description of the Solr project, team composition, source
code repositories, and other details, please see the Solr web site at
@@ -94,7 +94,7 @@ Solr uses [Gradle](https://gradle.org/) for its build system. Here are some usef
```
cd ./solr/packaging/build/dev
-bin/solr start -c
+bin/solr start
```
- Open a web browser and go to http://localhost:8983/solr/ to access the Solr Admin interface. You can also use the `bin/solr` script to create and manage Solr collections. For example use the `bin/solr post` tool to index some sample data.
@@ -108,4 +108,3 @@ To get involved in the developer community:
- Slack: `#solr-dev` in the `the-asf` organization. Sign up at https://the-asf.slack.com/messages/CE70MDPMF
- [Issue Tracker (JIRA)](https://issues.apache.org/jira/browse/SOLR)
- IRC: `#solr-dev` on [libera.chat](https://web.libera.chat/?channels=#solr-dev)
-
diff --git a/build-tools/build-infra/build.gradle b/build-tools/build-infra/build.gradle
new file mode 100644
index 00000000000..9b5ff387488
--- /dev/null
+++ b/build-tools/build-infra/build.gradle
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+plugins {
+ id "java-gradle-plugin"
+ alias(libs.plugins.diffplug.spotless) apply false
+}
+
+repositories {
+ mavenCentral()
+}
+
+group = "org.apache"
+
+// Make sure the build environment is consistent.
+apply from: file('../../gradle/conventions.gradle')
+apply from: file('../../gradle/validation/check-environment.gradle')
+
+// Add spotless/ tidy.
+tasks.register("checkJdkInternalsExportedToGradle") {}
+apply from: file('../../gradle/validation/spotless.gradle')
+
+java {
+ sourceCompatibility = JavaVersion.toVersion(libs.versions.java.min.get())
+ targetCompatibility = JavaVersion.toVersion(libs.versions.java.min.get())
+}
+
+gradlePlugin {
+ automatedPublishing = false
+
+ plugins {
+ buildInfra {
+ id = 'solr.build-infra'
+ implementationClass = 'org.apache.lucene.gradle.buildinfra.BuildInfraPlugin'
+ }
+ }
+}
+
+dependencies {
+ implementation gradleApi()
+ implementation localGroovy()
+
+ implementation libs.commonscodec.commonscodec
+}
diff --git a/build-tools/build-infra/settings.gradle b/build-tools/build-infra/settings.gradle
new file mode 100644
index 00000000000..7a55021b366
--- /dev/null
+++ b/build-tools/build-infra/settings.gradle
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+rootProject.name = 'build-infra'
+
+// Use project's version catalog for centralized dependency management
+dependencyResolutionManagement {
+ versionCatalogs {
+ libs {
+ from(files("../../gradle/libs.versions.toml"))
+ }
+ }
+}
diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/Checksum.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/Checksum.java
similarity index 99%
rename from buildSrc/src/main/java/org/apache/lucene/gradle/Checksum.java
rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/Checksum.java
index 0dab9dc7f05..a1d5c09586f 100644
--- a/buildSrc/src/main/java/org/apache/lucene/gradle/Checksum.java
+++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/Checksum.java
@@ -27,6 +27,11 @@
package org.apache.lucene.gradle;
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.util.Locale;
import org.apache.commons.codec.digest.DigestUtils;
import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
@@ -39,16 +44,10 @@
import org.gradle.work.Incremental;
import org.gradle.work.InputChanges;
-import java.io.File;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.util.Locale;
-
public class Checksum extends DefaultTask {
private FileCollection files;
private File outputDir;
- private Algorithm algorithm;
+ private Algorithm algorithm = Checksum.Algorithm.SHA512;
public enum Algorithm {
MD5(new DigestUtils(DigestUtils.getMd5Digest())),
diff --git a/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java
new file mode 100644
index 00000000000..c1fb7b83983
--- /dev/null
+++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.gradle;
+
+import java.io.BufferedReader;
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.io.Writer;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.regex.Pattern;
+import org.gradle.api.internal.tasks.testing.logging.FullExceptionFormatter;
+import org.gradle.api.internal.tasks.testing.logging.TestExceptionFormatter;
+import org.gradle.api.logging.Logger;
+import org.gradle.api.logging.Logging;
+import org.gradle.api.tasks.testing.TestDescriptor;
+import org.gradle.api.tasks.testing.TestListener;
+import org.gradle.api.tasks.testing.TestOutputEvent;
+import org.gradle.api.tasks.testing.TestOutputListener;
+import org.gradle.api.tasks.testing.TestResult;
+import org.gradle.api.tasks.testing.logging.TestLogging;
+
+/**
+ * An error reporting listener that queues test output streams and displays them on failure.
+ *
+ *
Heavily inspired by Elasticsearch's ErrorReportingTestListener (ASL 2.0 licensed).
+ */
+public class ErrorReportingTestListener implements TestOutputListener, TestListener {
+ private static final Logger LOGGER = Logging.getLogger(ErrorReportingTestListener.class);
+
+ private final TestExceptionFormatter formatter;
+ private final Map outputHandlers = new ConcurrentHashMap<>();
+ private final Path spillDir;
+ private final Path outputsDir;
+ private final boolean verboseMode;
+
+ public ErrorReportingTestListener(
+ TestLogging testLogging, Path spillDir, Path outputsDir, boolean verboseMode) {
+ this.formatter = new FullExceptionFormatter(testLogging);
+ this.spillDir = spillDir;
+ this.outputsDir = outputsDir;
+ this.verboseMode = verboseMode;
+ }
+
+ @Override
+ public void onOutput(TestDescriptor testDescriptor, TestOutputEvent outputEvent) {
+ handlerFor(testDescriptor).write(outputEvent);
+ }
+
+ @Override
+ public void beforeSuite(TestDescriptor suite) {
+ // noop.
+ }
+
+ @Override
+ public void beforeTest(TestDescriptor testDescriptor) {
+ // Noop.
+ }
+
+ @Override
+ public void afterSuite(final TestDescriptor suite, TestResult result) {
+ if (suite.getParent() == null || suite.getName().startsWith("Gradle")) {
+ return;
+ }
+
+ TestKey key = TestKey.of(suite);
+ try {
+ OutputHandler outputHandler = outputHandlers.get(key);
+ if (outputHandler != null) {
+ long length = outputHandler.length();
+ if (length > 1024 * 1024 * 10) {
+ LOGGER.warn(
+ String.format(
+ Locale.ROOT,
+ "WARNING: Test %s wrote %,d bytes of output.",
+ suite.getName(),
+ length));
+ }
+ }
+
+ boolean echoOutput = Objects.equals(result.getResultType(), TestResult.ResultType.FAILURE);
+ boolean dumpOutput = echoOutput;
+
+ // If the test suite failed, report output.
+ if (dumpOutput || echoOutput) {
+ Files.createDirectories(outputsDir);
+ Path outputLog = outputsDir.resolve(getOutputLogName(suite));
+
+ // Save the output of a failing test to disk.
+ try (Writer w = Files.newBufferedWriter(outputLog, StandardCharsets.UTF_8)) {
+ if (outputHandler != null) {
+ outputHandler.copyTo(w);
+ }
+ }
+
+ if (echoOutput && !verboseMode) {
+ synchronized (this) {
+ System.out.println("");
+ System.out.println(
+ suite.getClassName()
+ + " > test suite's output saved to "
+ + outputLog
+ + ", copied below:");
+ try (BufferedReader reader =
+ Files.newBufferedReader(outputLog, StandardCharsets.UTF_8)) {
+ char[] buf = new char[1024];
+ int len;
+ while ((len = reader.read(buf)) >= 0) {
+ System.out.print(new String(buf, 0, len));
+ }
+ System.out.println();
+ }
+ }
+ }
+ }
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ } finally {
+ OutputHandler handler = outputHandlers.remove(key);
+ if (handler != null) {
+ try {
+ handler.close();
+ } catch (IOException e) {
+ LOGGER.error("Failed to close output handler for: " + key, e);
+ }
+ }
+ }
+ }
+
+ private static Pattern SANITIZE = Pattern.compile("[^a-zA-Z .\\-_0-9]+");
+
+ public static String getOutputLogName(TestDescriptor suite) {
+ return SANITIZE.matcher("OUTPUT-" + suite.getName() + ".txt").replaceAll("_");
+ }
+
+ @Override
+ public void afterTest(TestDescriptor testDescriptor, TestResult result) {
+ // Include test failure exception stacktrace(s) in test output log.
+ if (result.getResultType() == TestResult.ResultType.FAILURE) {
+ if (result.getExceptions().size() > 0) {
+ String message = formatter.format(testDescriptor, result.getExceptions());
+ handlerFor(testDescriptor).write(message);
+ }
+ }
+ }
+
+ private OutputHandler handlerFor(TestDescriptor descriptor) {
+ // Attach output of leaves (individual tests) to their parent.
+ if (!descriptor.isComposite()) {
+ descriptor = descriptor.getParent();
+ }
+ return outputHandlers.computeIfAbsent(TestKey.of(descriptor), (key) -> new OutputHandler());
+ }
+
+ public static class TestKey {
+ private final String key;
+
+ private TestKey(String key) {
+ this.key = key;
+ }
+
+ public static TestKey of(TestDescriptor d) {
+ StringBuilder key = new StringBuilder();
+ key.append(d.getClassName());
+ key.append("::");
+ key.append(d.getName());
+ key.append("::");
+ key.append(d.getParent() == null ? "-" : d.getParent().toString());
+ return new TestKey(key.toString());
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ return o != null && o.getClass() == this.getClass() && Objects.equals(((TestKey) o).key, key);
+ }
+
+ @Override
+ public int hashCode() {
+ return key.hashCode();
+ }
+
+ @Override
+ public String toString() {
+ return key;
+ }
+ }
+
+ private class OutputHandler implements Closeable {
+ // Max single-line buffer before automatic wrap occurs.
+ private static final int MAX_LINE_WIDTH = 1024 * 4;
+
+ private final SpillWriter buffer;
+
+ // internal stream.
+ private final PrefixedWriter sint;
+ // stdout
+ private final PrefixedWriter sout;
+ // stderr
+ private final PrefixedWriter serr;
+
+ // last used stream (so that we can flush it properly and prefixes are not screwed up).
+ private PrefixedWriter last;
+
+ public OutputHandler() {
+ buffer =
+ new SpillWriter(
+ () -> {
+ try {
+ return Files.createTempFile(spillDir, "spill-", ".tmp");
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ });
+
+ Writer sink = buffer;
+ if (verboseMode) {
+ sink = new StdOutTeeWriter(buffer);
+ }
+
+ sint = new PrefixedWriter(" > ", sink, MAX_LINE_WIDTH);
+ sout = new PrefixedWriter(" 1> ", sink, MAX_LINE_WIDTH);
+ serr = new PrefixedWriter(" 2> ", sink, MAX_LINE_WIDTH);
+ last = sint;
+ }
+
+ public void write(TestOutputEvent event) {
+ write(
+ (event.getDestination() == TestOutputEvent.Destination.StdOut ? sout : serr),
+ event.getMessage());
+ }
+
+ public void write(String message) {
+ write(sint, message);
+ }
+
+ public long length() throws IOException {
+ return buffer.length();
+ }
+
+ private void write(PrefixedWriter out, String message) {
+ try {
+ if (out != last) {
+ last.completeLine();
+ last = out;
+ }
+ out.write(message);
+ } catch (IOException e) {
+ throw new UncheckedIOException("Unable to write to test output.", e);
+ }
+ }
+
+ public void copyTo(Writer out) throws IOException {
+ flush();
+ buffer.copyTo(out);
+ }
+
+ public void flush() throws IOException {
+ sout.completeLine();
+ serr.completeLine();
+ buffer.flush();
+ }
+
+ @Override
+ public void close() throws IOException {
+ buffer.close();
+ }
+ }
+}
diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java
similarity index 96%
rename from buildSrc/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java
rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java
index db4f804f12e..5436afe70f8 100644
--- a/buildSrc/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java
+++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java
@@ -67,6 +67,6 @@ public void run(Path source, Path destination) throws IOException {
fileContent = fileContent.replace(entry.getKey(), String.valueOf(entry.getValue()));
}
Files.writeString(
- destination, fileContent, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW);
+ destination, fileContent, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW);
}
}
diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java
similarity index 91%
rename from buildSrc/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java
rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java
index 7281d496001..3dc663e8332 100644
--- a/buildSrc/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java
+++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java
@@ -20,12 +20,13 @@
import java.io.Writer;
/**
- * Prefixes every new line with a given string, synchronizing multiple streams to emit consistent lines.
+ * Prefixes every new line with a given string, synchronizing multiple streams to emit consistent
+ * lines.
*/
public class PrefixedWriter extends Writer {
Writer sink;
- private final static char LF = '\n';
+ private static final char LF = '\n';
private final String prefix;
private final StringBuilder lineBuffer = new StringBuilder();
private final int maxLineLength;
@@ -45,7 +46,7 @@ public void write(int c) throws IOException {
sink.write(LF);
lineBuffer.setLength(0);
- if (c != LF) {
+ if (c != LF) {
lineBuffer.append((char) c);
}
} else {
@@ -70,9 +71,7 @@ public void close() throws IOException {
throw new UnsupportedOperationException();
}
- /**
- * Complete the current line (emit LF if not at the start of the line already).
- */
+ /** Complete the current line (emit LF if not at the start of the line already). */
public void completeLine() throws IOException {
if (lineBuffer.length() > 0) {
write(LF);
diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/ProfileResults.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ProfileResults.java
similarity index 79%
rename from buildSrc/src/main/java/org/apache/lucene/gradle/ProfileResults.java
rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ProfileResults.java
index 60def1a89d1..15e0f11c56e 100644
--- a/buildSrc/src/main/java/org/apache/lucene/gradle/ProfileResults.java
+++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ProfileResults.java
@@ -20,13 +20,12 @@
import java.io.IOException;
import java.nio.file.Paths;
import java.util.AbstractMap.SimpleEntry;
-import java.util.Arrays;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
-
import jdk.jfr.consumer.RecordedClass;
import jdk.jfr.consumer.RecordedEvent;
import jdk.jfr.consumer.RecordedFrame;
@@ -36,15 +35,12 @@
import jdk.jfr.consumer.RecordingFile;
/**
- * Processes an array of recording files (from tests), and prints a simple histogram.
- * Inspired by the JFR example code.
- * Whole stacks are deduplicated (with the default stacksize being 1): you can drill deeper
- * by adjusting the parameters.
+ * Processes an array of recording files (from tests), and prints a simple histogram. Inspired by
+ * the JFR example code. Whole stacks are deduplicated (with the default stacksize being 1): you can
+ * drill deeper by adjusting the parameters.
*/
public class ProfileResults {
- /**
- * Formats a frame to a formatted line. This is deduplicated on!
- */
+ /** Formats a frame to a formatted line. This is deduplicated on! */
static String frameToString(RecordedFrame frame, boolean lineNumbers) {
StringBuilder builder = new StringBuilder();
RecordedMethod method = frame.getMethod();
@@ -84,29 +80,32 @@ static String frameToString(RecordedFrame frame, boolean lineNumbers) {
/**
* Driver method, for testing standalone.
+ *
*
*/
public static void main(String[] args) throws IOException {
- printReport(Arrays.asList(args),
- System.getProperty(MODE_KEY, MODE_DEFAULT),
- Integer.parseInt(System.getProperty(STACKSIZE_KEY, STACKSIZE_DEFAULT)),
- Integer.parseInt(System.getProperty(COUNT_KEY, COUNT_DEFAULT)),
- Boolean.parseBoolean(System.getProperty(LINENUMBERS_KEY, LINENUMBERS_DEFAULT)));
+ printReport(
+ Arrays.asList(args),
+ System.getProperty(MODE_KEY, MODE_DEFAULT),
+ Integer.parseInt(System.getProperty(STACKSIZE_KEY, STACKSIZE_DEFAULT)),
+ Integer.parseInt(System.getProperty(COUNT_KEY, COUNT_DEFAULT)),
+ Boolean.parseBoolean(System.getProperty(LINENUMBERS_KEY, LINENUMBERS_DEFAULT)));
}
/** true if we care about this event */
static boolean isInteresting(String mode, RecordedEvent event) {
String name = event.getEventType().getName();
- switch(mode) {
+ switch (mode) {
case "cpu":
- return (name.equals("jdk.ExecutionSample") || name.equals("jdk.NativeMethodSample")) &&
- !isGradlePollThread(event.getThread("sampledThread"));
+ return (name.equals("jdk.ExecutionSample") || name.equals("jdk.NativeMethodSample"))
+ && !isGradlePollThread(event.getThread("sampledThread"));
case "heap":
- return (name.equals("jdk.ObjectAllocationInNewTLAB") || name.equals("jdk.ObjectAllocationOutsideTLAB")) &&
- !isGradlePollThread(event.getThread("eventThread"));
+ return (name.equals("jdk.ObjectAllocationInNewTLAB")
+ || name.equals("jdk.ObjectAllocationOutsideTLAB"))
+ && !isGradlePollThread(event.getThread("eventThread"));
default:
throw new UnsupportedOperationException(event.toString());
}
@@ -119,7 +118,7 @@ static boolean isGradlePollThread(RecordedThread thread) {
/** value we accumulate for this event */
static long getValue(RecordedEvent event) {
- switch(event.getEventType().getName()) {
+ switch (event.getEventType().getName()) {
case "jdk.ObjectAllocationInNewTLAB":
return event.getLong("tlabSize");
case "jdk.ObjectAllocationOutsideTLAB":
@@ -144,15 +143,17 @@ static String formatValue(long value) {
/** fixed width used for printing the different columns */
private static final int COLUMN_SIZE = 14;
+
private static final String COLUMN_PAD = "%-" + COLUMN_SIZE + "s";
+
private static String pad(String input) {
return String.format(Locale.ROOT, COLUMN_PAD, input);
}
- /**
- * Process all the JFR files passed in args and print a merged summary.
- */
- public static void printReport(List files, String mode, int stacksize, int count, boolean lineNumbers) throws IOException {
+ /** Process all the JFR files passed in args and print a merged summary. */
+ public static void printReport(
+ List files, String mode, int stacksize, int count, boolean lineNumbers)
+ throws IOException {
if (!"cpu".equals(mode) && !"heap".equals(mode)) {
throw new IllegalArgumentException("tests.profile.mode must be one of (cpu,heap)");
}
@@ -178,14 +179,13 @@ public static void printReport(List files, String mode, int stacksize, i
StringBuilder stack = new StringBuilder();
for (int i = 0; i < Math.min(stacksize, trace.getFrames().size()); i++) {
if (stack.length() > 0) {
- stack.append("\n")
- .append(framePadding)
- .append(" at ");
+ stack.append("\n").append(framePadding).append(" at ");
}
stack.append(frameToString(trace.getFrames().get(i), lineNumbers));
}
String line = stack.toString();
- SimpleEntry entry = histogram.computeIfAbsent(line, u -> new SimpleEntry<>(line, 0L));
+ SimpleEntry entry =
+ histogram.computeIfAbsent(line, u -> new SimpleEntry<>(line, 0L));
long value = getValue(event);
entry.setValue(entry.getValue() + value);
totalEvents++;
@@ -195,12 +195,20 @@ public static void printReport(List files, String mode, int stacksize, i
}
}
// print summary from histogram
- System.out.printf(Locale.ROOT, "PROFILE SUMMARY from %d events (total: %s)\n", totalEvents, formatValue(sumValues));
+ System.out.printf(
+ Locale.ROOT,
+ "PROFILE SUMMARY from %d events (total: %s)\n",
+ totalEvents,
+ formatValue(sumValues));
System.out.printf(Locale.ROOT, " tests.profile.mode=%s\n", mode);
System.out.printf(Locale.ROOT, " tests.profile.count=%d\n", count);
System.out.printf(Locale.ROOT, " tests.profile.stacksize=%d\n", stacksize);
System.out.printf(Locale.ROOT, " tests.profile.linenumbers=%b\n", lineNumbers);
- System.out.printf(Locale.ROOT, "%s%sSTACK\n", pad("PERCENT"), pad(mode.toUpperCase(Locale.ROOT) + " SAMPLES"));
+ System.out.printf(
+ Locale.ROOT,
+ "%s%sSTACK\n",
+ pad("PERCENT"),
+ pad(mode.toUpperCase(Locale.ROOT) + " SAMPLES"));
List> entries = new ArrayList<>(histogram.values());
entries.sort((u, v) -> v.getValue().compareTo(u.getValue()));
int seen = 0;
@@ -209,7 +217,8 @@ public static void printReport(List files, String mode, int stacksize, i
break;
}
String percent = String.format("%2.2f%%", 100 * (c.getValue() / (float) sumValues));
- System.out.printf(Locale.ROOT, "%s%s%s\n", pad(percent), pad(formatValue(c.getValue())), c.getKey());
+ System.out.printf(
+ Locale.ROOT, "%s%s%s\n", pad(percent), pad(formatValue(c.getValue())), c.getKey());
}
}
}
diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/SpillWriter.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/SpillWriter.java
similarity index 92%
rename from buildSrc/src/main/java/org/apache/lucene/gradle/SpillWriter.java
rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/SpillWriter.java
index f89977c2503..e9783d070b5 100644
--- a/buildSrc/src/main/java/org/apache/lucene/gradle/SpillWriter.java
+++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/SpillWriter.java
@@ -24,9 +24,10 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.function.Supplier;
+import org.jetbrains.annotations.NotNull;
public class SpillWriter extends Writer {
- private final static int MAX_BUFFERED = 2 * 1024;
+ private static final int MAX_BUFFERED = 2 * 1024;
private final StringWriter buffer = new StringWriter(MAX_BUFFERED);
private final Supplier spillPathSupplier;
@@ -38,7 +39,7 @@ public SpillWriter(Supplier spillPathSupplier) {
}
@Override
- public void write(char[] cbuf, int off, int len) throws IOException {
+ public void write(char @NotNull [] cbuf, int off, int len) throws IOException {
getSink(len).write(cbuf, off, len);
}
@@ -58,7 +59,7 @@ public void write(String str) throws IOException {
}
@Override
- public void write(String str, int off, int len) throws IOException {
+ public void write(@NotNull String str, int off, int len) throws IOException {
getSink(len).write(str, off, len);
}
diff --git a/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java
new file mode 100644
index 00000000000..71901259ea2
--- /dev/null
+++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.gradle;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.io.Writer;
+import org.jetbrains.annotations.NotNull;
+
+class StdOutTeeWriter extends Writer {
+ private final Writer delegate;
+ private final PrintStream out = System.out;
+
+ public StdOutTeeWriter(Writer delegate) {
+ this.delegate = delegate;
+ }
+
+ @Override
+ public void write(int c) throws IOException {
+ delegate.write(c);
+ out.write(c);
+ }
+
+ @Override
+ public void write(char @NotNull [] cbuf) throws IOException {
+ delegate.write(cbuf);
+ out.print(cbuf);
+ }
+
+ @Override
+ public void write(@NotNull String str) throws IOException {
+ delegate.write(str);
+ out.print(str);
+ }
+
+ @Override
+ public void write(@NotNull String str, int off, int len) throws IOException {
+ delegate.write(str, off, len);
+ out.append(str, off, len);
+ }
+
+ @Override
+ public Writer append(CharSequence csq) throws IOException {
+ delegate.append(csq);
+ out.append(csq);
+ return this;
+ }
+
+ @Override
+ public Writer append(CharSequence csq, int start, int end) throws IOException {
+ delegate.append(csq, start, end);
+ out.append(csq, start, end);
+ return this;
+ }
+
+ @Override
+ public Writer append(char c) throws IOException {
+ delegate.append(c);
+ out.append(c);
+ return this;
+ }
+
+ @Override
+ public void write(char @NotNull [] cbuf, int off, int len) throws IOException {
+ delegate.write(cbuf, off, len);
+ out.print(new String(cbuf, off, len));
+ }
+
+ @Override
+ public void flush() throws IOException {
+ delegate.flush();
+ out.flush();
+ }
+
+ @Override
+ public void close() throws IOException {
+ delegate.close();
+ // Don't close the actual output.
+ }
+}
diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java
similarity index 75%
rename from buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java
rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java
index e6930af7c74..adb8f3eaf07 100644
--- a/buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java
+++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java
@@ -16,32 +16,26 @@
*/
package org.apache.lucene.gradle;
+import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
+
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
-import java.net.URL;
-import java.net.URLConnection;
-import java.nio.channels.Channels;
-import java.nio.channels.FileChannel;
-import java.nio.channels.ReadableByteChannel;
+import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
-import java.util.EnumSet;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
-import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
-import static java.nio.file.StandardOpenOption.APPEND;
-
/**
* Standalone class that can be used to download a gradle-wrapper.jar
- *
- * Has no dependencies outside of standard java libraries
+ *
+ *
Has no dependencies outside of standard java libraries
*/
public class WrapperDownloader {
public static void main(String[] args) {
@@ -60,20 +54,23 @@ public static void main(String[] args) {
}
public static void checkVersion() {
- int major = Runtime.getRuntime().version().feature();
- if (major < 11 || major > 21) {
- throw new IllegalStateException("java version must be between 11 and 21, your version: " + major);
+ int major = Runtime.version().feature();
+ if (major < 21 || major > 23) {
+ throw new IllegalStateException(
+ "java version must be between 21 and 23, your version: " + major);
}
}
public void run(Path destination) throws IOException, NoSuchAlgorithmException {
- Path checksumPath = destination.resolveSibling(destination.getFileName().toString() + ".sha256");
+ Path checksumPath =
+ destination.resolveSibling(destination.getFileName().toString() + ".sha256");
if (!Files.exists(checksumPath)) {
throw new IOException("Checksum file not found: " + checksumPath);
}
String expectedChecksum = Files.readString(checksumPath, StandardCharsets.UTF_8).trim();
- Path versionPath = destination.resolveSibling(destination.getFileName().toString() + ".version");
+ Path versionPath =
+ destination.resolveSibling(destination.getFileName().toString() + ".version");
if (!Files.exists(versionPath)) {
throw new IOException("Wrapper version file not found: " + versionPath);
}
@@ -92,8 +89,12 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException {
}
}
- URL url = new URL("https://raw.githubusercontent.com/gradle/gradle/v" + wrapperVersion + "/gradle/wrapper/gradle-wrapper.jar");
- System.err.println("Downloading gradle-wrapper.jar from " + url);
+ URI uri =
+ URI.create(
+ "https://raw.githubusercontent.com/gradle/gradle/v"
+ + wrapperVersion
+ + "/gradle/wrapper/gradle-wrapper.jar");
+ System.err.println("Downloading gradle-wrapper.jar from " + uri);
// Zero-copy save the jar to a temp file
Path temp = Files.createTempFile(destination.getParent(), ".gradle-wrapper", ".tmp");
@@ -102,13 +103,14 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException {
int retryDelay = 30;
HttpURLConnection connection;
while (true) {
- connection = (HttpURLConnection) url.openConnection();
+ connection = (HttpURLConnection) uri.toURL().openConnection();
try {
connection.connect();
} catch (IOException e) {
if (retries-- > 0) {
// Retry after a short delay
- System.err.println("Error connecting to server: " + e + ", will retry in " + retryDelay + " seconds.");
+ System.err.println(
+ "Error connecting to server: " + e + ", will retry in " + retryDelay + " seconds.");
Thread.sleep(TimeUnit.SECONDS.toMillis(retryDelay));
continue;
}
@@ -120,7 +122,12 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException {
case HttpURLConnection.HTTP_BAD_GATEWAY:
if (retries-- > 0) {
// Retry after a short delay.
- System.err.println("Server returned HTTP " + connection.getResponseCode() + ", will retry in " + retryDelay + " seconds.");
+ System.err.println(
+ "Server returned HTTP "
+ + connection.getResponseCode()
+ + ", will retry in "
+ + retryDelay
+ + " seconds.");
Thread.sleep(TimeUnit.SECONDS.toMillis(retryDelay));
continue;
}
@@ -131,13 +138,15 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException {
}
try (InputStream is = connection.getInputStream();
- OutputStream out = Files.newOutputStream(temp)){
+ OutputStream out = Files.newOutputStream(temp)) {
is.transferTo(out);
}
String checksum = checksum(digest, temp);
if (!checksum.equalsIgnoreCase(expectedChecksum)) {
- throw new IOException(String.format(Locale.ROOT,
+ throw new IOException(
+ String.format(
+ Locale.ROOT,
"Checksum mismatch on downloaded gradle-wrapper.jar (was: %s, expected: %s).",
checksum,
expectedChecksum));
@@ -146,8 +155,12 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException {
Files.move(temp, destination, REPLACE_EXISTING);
temp = null;
} catch (IOException | InterruptedException e) {
- throw new IOException("Could not download gradle-wrapper.jar (" +
- e.getClass().getSimpleName() + ": " + e.getMessage() + ").");
+ throw new IOException(
+ "Could not download gradle-wrapper.jar ("
+ + e.getClass().getSimpleName()
+ + ": "
+ + e.getMessage()
+ + ").");
} finally {
if (temp != null) {
Files.deleteIfExists(temp);
@@ -165,7 +178,8 @@ private String checksum(MessageDigest messageDigest, Path path) throws IOExcepti
}
return sb.toString();
} catch (IOException e) {
- throw new IOException("Could not compute digest of file: " + path + " (" + e.getMessage() + ")");
+ throw new IOException(
+ "Could not compute digest of file: " + path + " (" + e.getMessage() + ")");
}
}
}
diff --git a/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/buildinfra/BuildInfraPlugin.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/buildinfra/BuildInfraPlugin.java
new file mode 100644
index 00000000000..415922a1916
--- /dev/null
+++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/buildinfra/BuildInfraPlugin.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.gradle.buildinfra;
+
+import java.nio.file.Path;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.lucene.gradle.Checksum;
+import org.apache.lucene.gradle.ErrorReportingTestListener;
+import org.apache.lucene.gradle.ProfileResults;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.tasks.testing.TestDescriptor;
+import org.gradle.api.tasks.testing.logging.TestLogging;
+
+public class BuildInfraPlugin implements Plugin {
+ @Override
+ public void apply(Project project) {
+ project.getExtensions().create(BuildInfraExtension.NAME, BuildInfraExtension.class);
+ }
+
+ public static class BuildInfraExtension {
+ public static final String NAME = "buildinfra";
+
+ public ErrorReportingTestListener newErrorReportingTestListener(
+ TestLogging testLogging, Path spillDir, Path outputsDir, boolean verboseMode) {
+ return new ErrorReportingTestListener(testLogging, spillDir, outputsDir, verboseMode);
+ }
+
+ public DigestUtils sha1Digest() {
+ return new DigestUtils(DigestUtils.getSha1Digest());
+ }
+
+ public String getOutputLogName(TestDescriptor suite) {
+ return ErrorReportingTestListener.getOutputLogName(suite);
+ }
+
+ public Class> checksumClass() {
+ return Checksum.class;
+ }
+
+ public Class> profileResultsClass() {
+ return ProfileResults.class;
+ }
+ }
+}
diff --git a/build-tools/missing-doclet/build.gradle b/build-tools/missing-doclet/build.gradle
new file mode 100644
index 00000000000..11a7fc6e2a8
--- /dev/null
+++ b/build-tools/missing-doclet/build.gradle
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+plugins {
+ id 'java-library'
+ alias(libs.plugins.diffplug.spotless) apply false
+}
+
+repositories {
+ mavenCentral()
+}
+
+group = "org.apache.solr.tools"
+description = 'Doclet-based javadoc validation'
+
+// Make sure the build environment is consistent.
+apply from: file('../../gradle/conventions.gradle')
+apply from: file('../../gradle/validation/check-environment.gradle')
+
+// Add spotless/ tidy.
+tasks.register("checkJdkInternalsExportedToGradle") {}
+apply from: file('../../gradle/validation/spotless.gradle')
+
+java {
+ sourceCompatibility = JavaVersion.toVersion(libs.versions.java.min.get())
+ targetCompatibility = JavaVersion.toVersion(libs.versions.java.min.get())
+}
+
+tasks.withType(JavaCompile).configureEach {
+ options.compilerArgs += ["--release", targetCompatibility.toString()]
+ options.encoding = "UTF-8"
+}
diff --git a/build-tools/missing-doclet/settings.gradle b/build-tools/missing-doclet/settings.gradle
new file mode 100644
index 00000000000..c39219472be
--- /dev/null
+++ b/build-tools/missing-doclet/settings.gradle
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+rootProject.name = "missing-doclet"
+
+// Use project's version catalog for centralized dependency management
+dependencyResolutionManagement {
+ versionCatalogs {
+ libs {
+ from(files("../../gradle/libs.versions.toml"))
+ }
+ }
+}
diff --git a/dev-tools/solr-missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java b/build-tools/missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java
similarity index 60%
rename from dev-tools/solr-missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java
rename to build-tools/missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java
index 36c0e4fe4c8..bb0b0fb3ea0 100644
--- a/dev-tools/solr-missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java
+++ b/build-tools/missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java
@@ -16,6 +16,9 @@
*/
package org.apache.lucene.missingdoclet;
+import com.sun.source.doctree.DocCommentTree;
+import com.sun.source.doctree.ParamTree;
+import com.sun.source.util.DocTrees;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
@@ -24,7 +27,6 @@
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
@@ -35,24 +37,19 @@
import javax.lang.model.util.ElementFilter;
import javax.lang.model.util.Elements;
import javax.tools.Diagnostic;
-
-import com.sun.source.doctree.DocCommentTree;
-import com.sun.source.doctree.ParamTree;
-import com.sun.source.util.DocTrees;
-
import jdk.javadoc.doclet.Doclet;
import jdk.javadoc.doclet.DocletEnvironment;
import jdk.javadoc.doclet.Reporter;
import jdk.javadoc.doclet.StandardDoclet;
/**
- * Checks for missing javadocs, where missing also means "only whitespace" or "license header".
- * Has option --missing-level (package, class, method, parameter) so that we can improve over time.
- * Has option --missing-ignore to ignore individual elements (such as split packages).
- * It isn't recursive, just ignores exactly the elements you tell it.
- * This should be removed when packaging is fixed to no longer be split across JARs.
- * Has option --missing-method to apply "method" level to selected packages (fix one at a time).
- * Matches package names exactly: so you'll need to list subpackages separately.
+ * Checks for missing javadocs, where missing also means "only whitespace" or "license header". Has
+ * option --missing-level (package, class, method, parameter) so that we can improve over time. Has
+ * option --missing-ignore to ignore individual elements (such as split packages). It isn't
+ * recursive, just ignores exactly the elements you tell it. This should be removed when packaging
+ * is fixed to no longer be split across JARs. Has option --missing-method to apply "method" level
+ * to selected packages (fix one at a time). Matches package names exactly: so you'll need to list
+ * subpackages separately.
*/
public class MissingDoclet extends StandardDoclet {
// checks that modules and packages have documentation
@@ -70,121 +67,124 @@ public class MissingDoclet extends StandardDoclet {
Elements elementUtils;
Set ignored = Collections.emptySet();
Set methodPackages = Collections.emptySet();
-
+
@Override
public Set getSupportedOptions() {
Set options = new HashSet<>();
options.addAll(super.getSupportedOptions());
- options.add(new Doclet.Option() {
- @Override
- public int getArgumentCount() {
- return 1;
- }
+ options.add(
+ new Doclet.Option() {
+ @Override
+ public int getArgumentCount() {
+ return 1;
+ }
- @Override
- public String getDescription() {
- return "level to enforce for missing javadocs: [package, class, method, parameter]";
- }
+ @Override
+ public String getDescription() {
+ return "level to enforce for missing javadocs: [package, class, method, parameter]";
+ }
- @Override
- public Kind getKind() {
- return Option.Kind.STANDARD;
- }
+ @Override
+ public Kind getKind() {
+ return Option.Kind.STANDARD;
+ }
- @Override
- public List getNames() {
- return Collections.singletonList("--missing-level");
- }
+ @Override
+ public List getNames() {
+ return Collections.singletonList("--missing-level");
+ }
- @Override
- public String getParameters() {
- return "level";
- }
+ @Override
+ public String getParameters() {
+ return "level";
+ }
- @Override
- public boolean process(String option, List arguments) {
- switch (arguments.get(0)) {
- case "package":
- level = PACKAGE;
- return true;
- case "class":
- level = CLASS;
- return true;
- case "method":
- level = METHOD;
- return true;
- case "parameter":
- level = PARAMETER;
- return true;
- default:
- return false;
- }
- }
- });
- options.add(new Doclet.Option() {
- @Override
- public int getArgumentCount() {
- return 1;
- }
+ @Override
+ public boolean process(String option, List arguments) {
+ switch (arguments.get(0)) {
+ case "package":
+ level = PACKAGE;
+ return true;
+ case "class":
+ level = CLASS;
+ return true;
+ case "method":
+ level = METHOD;
+ return true;
+ case "parameter":
+ level = PARAMETER;
+ return true;
+ default:
+ return false;
+ }
+ }
+ });
+ options.add(
+ new Doclet.Option() {
+ @Override
+ public int getArgumentCount() {
+ return 1;
+ }
- @Override
- public String getDescription() {
- return "comma separated list of element names to ignore (e.g. as a workaround for split packages)";
- }
+ @Override
+ public String getDescription() {
+ return "comma separated list of element names to ignore (e.g. as a workaround for split packages)";
+ }
- @Override
- public Kind getKind() {
- return Option.Kind.STANDARD;
- }
+ @Override
+ public Kind getKind() {
+ return Option.Kind.STANDARD;
+ }
- @Override
- public List getNames() {
- return Collections.singletonList("--missing-ignore");
- }
+ @Override
+ public List getNames() {
+ return Collections.singletonList("--missing-ignore");
+ }
- @Override
- public String getParameters() {
- return "ignoredNames";
- }
+ @Override
+ public String getParameters() {
+ return "ignoredNames";
+ }
- @Override
- public boolean process(String option, List arguments) {
- ignored = new HashSet<>(Arrays.asList(arguments.get(0).split(",")));
- return true;
- }
- });
- options.add(new Doclet.Option() {
- @Override
- public int getArgumentCount() {
- return 1;
- }
+ @Override
+ public boolean process(String option, List arguments) {
+ ignored = new HashSet<>(Arrays.asList(arguments.get(0).split(",")));
+ return true;
+ }
+ });
+ options.add(
+ new Doclet.Option() {
+ @Override
+ public int getArgumentCount() {
+ return 1;
+ }
- @Override
- public String getDescription() {
- return "comma separated list of packages to check at 'method' level";
- }
+ @Override
+ public String getDescription() {
+ return "comma separated list of packages to check at 'method' level";
+ }
- @Override
- public Kind getKind() {
- return Option.Kind.STANDARD;
- }
+ @Override
+ public Kind getKind() {
+ return Option.Kind.STANDARD;
+ }
- @Override
- public List getNames() {
- return Collections.singletonList("--missing-method");
- }
+ @Override
+ public List getNames() {
+ return Collections.singletonList("--missing-method");
+ }
- @Override
- public String getParameters() {
- return "packages";
- }
+ @Override
+ public String getParameters() {
+ return "packages";
+ }
- @Override
- public boolean process(String option, List arguments) {
- methodPackages = new HashSet<>(Arrays.asList(arguments.get(0).split(",")));
- return true;
- }
- });
+ @Override
+ public boolean process(String option, List arguments) {
+ methodPackages = new HashSet<>(Arrays.asList(arguments.get(0).split(",")));
+ return true;
+ }
+ });
return options;
}
@@ -205,10 +205,8 @@ public boolean run(DocletEnvironment docEnv) {
return super.run(docEnv);
}
-
- /**
- * Returns effective check level for this element
- */
+
+ /** Returns effective check level for this element */
private int level(Element element) {
String pkg = elementUtils.getPackageOf(element).getQualifiedName().toString();
if (methodPackages.contains(pkg)) {
@@ -217,24 +215,24 @@ private int level(Element element) {
return level;
}
}
-
- /**
- * Check an individual element.
- * This checks packages and types from the doctrees.
- * It will recursively check methods/fields from encountered types when the level is "method"
+
+ /**
+ * Check an individual element. This checks packages and types from the doctrees. It will
+ * recursively check methods/fields from encountered types when the level is "method"
*/
private void check(Element element) {
- switch(element.getKind()) {
+ switch (element.getKind()) {
case MODULE:
// don't check the unnamed module, it won't have javadocs
- if (!((ModuleElement)element).isUnnamed()) {
+ if (!((ModuleElement) element).isUnnamed()) {
checkComment(element);
}
break;
case PACKAGE:
checkComment(element);
break;
- // class-like elements, check them, then recursively check their children (fields and methods)
+ // class-like elements, check them, then recursively check their children (fields and
+ // methods)
case CLASS:
case INTERFACE:
case ENUM:
@@ -242,17 +240,18 @@ private void check(Element element) {
if (level(element) >= CLASS) {
checkComment(element);
for (var subElement : element.getEnclosedElements()) {
- // don't recurse into enclosed types, otherwise we'll double-check since they are already in the included docTree
- if (subElement.getKind() == ElementKind.METHOD ||
- subElement.getKind() == ElementKind.CONSTRUCTOR ||
- subElement.getKind() == ElementKind.FIELD ||
- subElement.getKind() == ElementKind.ENUM_CONSTANT) {
+ // don't recurse into enclosed types, otherwise we'll double-check since they are
+ // already in the included docTree
+ if (subElement.getKind() == ElementKind.METHOD
+ || subElement.getKind() == ElementKind.CONSTRUCTOR
+ || subElement.getKind() == ElementKind.FIELD
+ || subElement.getKind() == ElementKind.ENUM_CONSTANT) {
check(subElement);
}
}
}
break;
- // method-like elements, check them if we are configured to do so
+ // method-like elements, check them if we are configured to do so
case METHOD:
case CONSTRUCTOR:
case FIELD:
@@ -267,9 +266,9 @@ private void check(Element element) {
}
/**
- * Return true if the method is synthetic enum method (values/valueOf).
- * According to the doctree documentation, the "included" set never includes synthetic elements.
- * UweSays: It should not happen but it happens!
+ * Return true if the method is synthetic enum method (values/valueOf). According to the doctree
+ * documentation, the "included" set never includes synthetic elements. UweSays: It should not
+ * happen but it happens!
*/
private boolean isSyntheticEnumMethod(Element element) {
String simpleName = element.getSimpleName().toString();
@@ -280,20 +279,23 @@ private boolean isSyntheticEnumMethod(Element element) {
}
return false;
}
-
+
/**
- * Checks that an element doesn't have missing javadocs.
- * In addition to truly "missing", check that comments aren't solely whitespace (generated by some IDEs),
- * that they aren't a license header masquerading as a javadoc comment.
+ * Checks that an element doesn't have missing javadocs. In addition to truly "missing", check
+ * that comments aren't solely whitespace (generated by some IDEs), that they aren't a license
+ * header masquerading as a javadoc comment.
*/
private void checkComment(Element element) {
// sanity check that the element is really "included", because we do some recursion into types
if (!docEnv.isIncluded(element)) {
return;
}
- // check that this element isn't on our ignore list. This is only used as a workaround for "split packages".
- // ignoring a package isn't recursive (on purpose), we still check all the classes, etc. inside it.
- // we just need to cope with the fact package-info.java isn't there because it is split across multiple jars.
+ // check that this element isn't on our ignore list. This is only used as a workaround for
+ // "split packages".
+ // ignoring a package isn't recursive (on purpose), we still check all the classes, etc. inside
+ // it.
+ // we just need to cope with the fact package-info.java isn't there because it is split across
+ // multiple jars.
if (ignored.contains(element.toString())) {
return;
}
@@ -306,14 +308,17 @@ private void checkComment(Element element) {
error(element, "javadocs are missing");
}
} else {
- var normalized = tree.getFirstSentence().get(0).toString()
- .replace('\u00A0', ' ')
- .trim()
- .toLowerCase(Locale.ROOT);
+ var normalized =
+ tree.getFirstSentence()
+ .get(0)
+ .toString()
+ .replace('\u00A0', ' ')
+ .trim()
+ .toLowerCase(Locale.ROOT);
if (normalized.isEmpty()) {
error(element, "blank javadoc comment");
- } else if (normalized.startsWith("licensed to the apache software foundation") ||
- normalized.startsWith("copyright 2004 the apache software foundation")) {
+ } else if (normalized.startsWith("licensed to the apache software foundation")
+ || normalized.startsWith("copyright 2004 the apache software foundation")) {
error(element, "comment is really a license");
}
}
@@ -323,19 +328,20 @@ private void checkComment(Element element) {
}
private boolean hasInheritedJavadocs(Element element) {
- boolean hasOverrides = element.getAnnotationMirrors().stream()
- .anyMatch(ann -> ann.getAnnotationType().toString().equals(Override.class.getName()));
+ boolean hasOverrides =
+ element.getAnnotationMirrors().stream()
+ .anyMatch(ann -> ann.getAnnotationType().toString().equals(Override.class.getName()));
if (hasOverrides) {
// If an element has explicit @Overrides annotation, assume it does
// have inherited javadocs somewhere.
- reporter.print(Diagnostic.Kind.NOTE, element, "javadoc empty but @Override declared, skipping.");
+ reporter.print(
+ Diagnostic.Kind.NOTE, element, "javadoc empty but @Override declared, skipping.");
return true;
}
// Check for methods up the types tree.
- if (element instanceof ExecutableElement) {
- ExecutableElement thisMethod = (ExecutableElement) element;
+ if (element instanceof ExecutableElement thisMethod) {
Iterable superTypes =
() -> superTypeForInheritDoc(thisMethod.getEnclosingElement()).iterator();
@@ -346,7 +352,10 @@ private boolean hasInheritedJavadocs(Element element) {
// We could check supMethod for non-empty javadoc here. Don't know if this makes
// sense though as all methods will be verified in the end so it'd fail on the
// top of the hierarchy (if empty) anyway.
- reporter.print(Diagnostic.Kind.NOTE, element, "javadoc empty but method overrides another, skipping.");
+ reporter.print(
+ Diagnostic.Kind.NOTE,
+ element,
+ "javadoc empty but method overrides another, skipping.");
return true;
}
}
@@ -356,15 +365,14 @@ private boolean hasInheritedJavadocs(Element element) {
return false;
}
-
/* Find types from which methods in type may inherit javadoc, in the proper order.*/
private Stream superTypeForInheritDoc(Element type) {
TypeElement clazz = (TypeElement) type;
- List interfaces = clazz.getInterfaces()
- .stream()
- .filter(tm -> tm.getKind() == TypeKind.DECLARED)
- .map(tm -> ((DeclaredType) tm).asElement())
- .collect(Collectors.toList());
+ List interfaces =
+ clazz.getInterfaces().stream()
+ .filter(tm -> tm.getKind() == TypeKind.DECLARED)
+ .map(tm -> ((DeclaredType) tm).asElement())
+ .collect(Collectors.toList());
Stream result = interfaces.stream();
result = Stream.concat(result, interfaces.stream().flatMap(this::superTypeForInheritDoc));
@@ -386,13 +394,13 @@ private void checkParameters(Element element, DocCommentTree tree) {
if (tree != null) {
for (var tag : tree.getBlockTags()) {
if (tag instanceof ParamTree) {
- var name = ((ParamTree)tag).getName().getName().toString();
+ var name = ((ParamTree) tag).getName().getName().toString();
seenParameters.add(name);
}
}
}
// now compare the method's formal parameter list against it
- for (var param : ((ExecutableElement)element).getParameters()) {
+ for (var param : ((ExecutableElement) element).getParameters()) {
var name = param.getSimpleName().toString();
if (!seenParameters.contains(name)) {
error(element, "missing javadoc @param for parameter '" + name + "'");
@@ -400,7 +408,7 @@ private void checkParameters(Element element, DocCommentTree tree) {
}
}
}
-
+
/** logs a new error for the particular element */
private void error(Element element, String message) {
var fullMessage = new StringBuilder();
@@ -430,13 +438,6 @@ private void error(Element element, String message) {
fullMessage.append("): ");
fullMessage.append(message);
- if (Runtime.version().feature() == 11 && element.getKind() == ElementKind.PACKAGE) {
- // Avoid JDK 11 bug:
- // https://issues.apache.org/jira/browse/LUCENE-9747
- // https://bugs.openjdk.java.net/browse/JDK-8224082
- reporter.print(Diagnostic.Kind.ERROR, fullMessage.toString());
- } else {
- reporter.print(Diagnostic.Kind.ERROR, element, fullMessage.toString());
- }
+ reporter.print(Diagnostic.Kind.ERROR, element, fullMessage.toString());
}
}
diff --git a/build.gradle b/build.gradle
index 3b3c0e0999e..9439438f9a8 100644
--- a/build.gradle
+++ b/build.gradle
@@ -20,19 +20,24 @@ import java.time.format.DateTimeFormatter
plugins {
id 'base'
- id 'com.palantir.consistent-versions' version '2.16.0'
- id 'org.owasp.dependencycheck' version '9.0.8'
- id 'ca.cutterslade.analyze' version '1.10.0'
- id 'de.thetaphi.forbiddenapis' version '3.7' apply false
- id 'de.undercouch.download' version '5.5.0' apply false
- id 'net.ltgt.errorprone' version '3.1.0' apply false
- id 'com.diffplug.spotless' version '6.5.2' apply false
- id 'com.github.node-gradle.node' version '7.0.1' apply false
+ id 'solr.build-infra'
+
+ alias(libs.plugins.carrotsearch.dependencychecks)
+ alias(libs.plugins.owasp.dependencycheck)
+ alias(libs.plugins.cutterslade.analyze)
+ alias(libs.plugins.benmanes.versions)
+ alias(libs.plugins.littlerobots.versioncatalogupdate) apply false
+ alias(libs.plugins.thetaphi.forbiddenapis) apply false
+ alias(libs.plugins.undercouch.download) apply false
+ alias(libs.plugins.ltgt.errorprone) apply false
+ alias(libs.plugins.diffplug.spotless) apply false
+ alias(libs.plugins.nodegradle.node) apply false
+ alias(libs.plugins.openapi.generator) apply false
}
// Declare default Java versions for the entire project and for SolrJ separately
-rootProject.ext.minJavaVersionDefault = JavaVersion.VERSION_11
-rootProject.ext.minJavaVersionSolrJ = JavaVersion.VERSION_11
+rootProject.ext.minJavaVersionDefault = JavaVersion.toVersion(libs.versions.java.min.get())
+rootProject.ext.minJavaVersionSolrJ = JavaVersion.toVersion(libs.versions.java.solrj.get())
apply from: file('gradle/globals.gradle')
@@ -97,7 +102,7 @@ ext {
}
luceneBaseVersionProvider = project.provider {
- def luceneVersion = getVersion('org.apache.lucene:lucene-core')
+ def luceneVersion = libs.versions.apache.lucene.get()
def m = (luceneVersion =~ /^\d+\.\d+\.\d+\b/)
if (!m) {
throw GradleException("Can't strip base version from " + luceneVersion)
@@ -108,14 +113,13 @@ ext {
}
}
-apply from: file('buildSrc/scriptDepVersions.gradle')
-
// Include smaller chunks configuring dedicated build areas.
// Some of these intersect or add additional functionality.
// The order of inclusion of these files shouldn't matter (but may
// if the build file is incorrectly written and evaluates something
// eagerly).
+apply from: file('gradle/conventions.gradle')
apply from: file('gradle/generation/local-settings.gradle')
// Ant-compatibility layer: apply folder layout early so that
@@ -129,7 +133,6 @@ apply from: file('gradle/java/javac.gradle')
apply from: file('gradle/testing/defaults-tests.gradle')
apply from: file('gradle/testing/randomization.gradle')
apply from: file('gradle/testing/fail-on-no-tests.gradle')
-apply from: file('gradle/testing/fail-on-unsupported-jdk.gradle')
apply from: file('gradle/testing/alternative-jdk-support.gradle')
apply from: file('gradle/java/jar-manifest.gradle')
apply from: file('gradle/testing/retry-test.gradle')
@@ -148,7 +151,6 @@ apply from: file('gradle/validation/precommit.gradle')
apply from: file('gradle/validation/forbidden-apis.gradle')
apply from: file('gradle/validation/jar-checks.gradle')
apply from: file('gradle/validation/git-status.gradle')
-apply from: file('gradle/validation/versions-props-sorted.gradle')
apply from: file('gradle/validation/validate-source-patterns.gradle')
apply from: file('gradle/validation/rat-sources.gradle')
apply from: file('gradle/validation/owasp-dependency-check.gradle')
@@ -159,9 +161,17 @@ apply from: file('gradle/validation/validate-log-calls.gradle')
apply from: file('gradle/validation/check-broken-links.gradle')
apply from: file('gradle/validation/solr.config-file-sanity.gradle')
-
+apply from: file('gradle/validation/dependencies.gradle')
apply from: file('gradle/validation/spotless.gradle')
+// Wire up included builds to some validation tasks.
+rootProject.tasks.named("tidy").configure {
+ dependsOn gradle.includedBuilds*.task(":tidy")
+}
+rootProject.tasks.named("clean").configure {
+ dependsOn gradle.includedBuilds*.task(":clean")
+}
+
// Source or data regeneration tasks
apply from: file('gradle/generation/regenerate.gradle')
apply from: file('gradle/generation/javacc.gradle')
@@ -195,7 +205,6 @@ apply from: file('gradle/hacks/global-exclude-dependencies.gradle')
apply from: file('gradle/hacks/gradle-archives.gradle')
apply from: file('gradle/hacks/wipe-temp.gradle')
-apply from: file('gradle/hacks/hashmapAssertions.gradle')
apply from: file('gradle/hacks/turbocharge-jvm-opts.gradle')
apply from: file('gradle/hacks/dummy-outputs.gradle')
diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java b/buildSrc/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java
deleted file mode 100644
index 44cd09b33f4..00000000000
--- a/buildSrc/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java
+++ /dev/null
@@ -1,275 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.gradle;
-
-import java.io.*;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Objects;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.regex.Pattern;
-
-import org.gradle.api.internal.tasks.testing.logging.FullExceptionFormatter;
-import org.gradle.api.internal.tasks.testing.logging.TestExceptionFormatter;
-import org.gradle.api.logging.Logger;
-import org.gradle.api.logging.Logging;
-import org.gradle.api.tasks.testing.TestDescriptor;
-import org.gradle.api.tasks.testing.TestListener;
-import org.gradle.api.tasks.testing.TestOutputEvent;
-import org.gradle.api.tasks.testing.TestOutputListener;
-import org.gradle.api.tasks.testing.TestResult;
-import org.gradle.api.tasks.testing.logging.TestLogging;
-
-/**
- * An error reporting listener that queues test output streams and displays them
- * on failure.
- *
- * Heavily inspired by Elasticsearch's ErrorReportingTestListener (ASL 2.0 licensed).
- */
-public class ErrorReportingTestListener implements TestOutputListener, TestListener {
- private static final Logger LOGGER = Logging.getLogger(ErrorReportingTestListener.class);
-
- private final TestExceptionFormatter formatter;
- private final Map outputHandlers = new ConcurrentHashMap<>();
- private final Path spillDir;
- private final Path outputsDir;
- private final boolean verboseMode;
-
- public ErrorReportingTestListener(TestLogging testLogging, Path spillDir, Path outputsDir, boolean verboseMode) {
- this.formatter = new FullExceptionFormatter(testLogging);
- this.spillDir = spillDir;
- this.outputsDir = outputsDir;
- this.verboseMode = verboseMode;
- }
-
- @Override
- public void onOutput(TestDescriptor testDescriptor, TestOutputEvent outputEvent) {
- handlerFor(testDescriptor).write(outputEvent);
- }
-
- @Override
- public void beforeSuite(TestDescriptor suite) {
- // noop.
- }
-
- @Override
- public void beforeTest(TestDescriptor testDescriptor) {
- // Noop.
- }
-
- @Override
- public void afterSuite(final TestDescriptor suite, TestResult result) {
- if (suite.getParent() == null || suite.getName().startsWith("Gradle")) {
- return;
- }
-
- TestKey key = TestKey.of(suite);
- try {
- OutputHandler outputHandler = outputHandlers.get(key);
- if (outputHandler != null) {
- long length = outputHandler.length();
- if (length > 1024 * 1024 * 10) {
- LOGGER.warn(String.format(Locale.ROOT, "WARNING: Test %s wrote %,d bytes of output.",
- suite.getName(),
- length));
- }
- }
-
- boolean echoOutput = Objects.equals(result.getResultType(), TestResult.ResultType.FAILURE);
- boolean dumpOutput = echoOutput;
-
- // If the test suite failed, report output.
- if (dumpOutput || echoOutput) {
- Files.createDirectories(outputsDir);
- Path outputLog = outputsDir.resolve(getOutputLogName(suite));
-
- // Save the output of a failing test to disk.
- try (Writer w = Files.newBufferedWriter(outputLog, StandardCharsets.UTF_8)) {
- if (outputHandler != null) {
- outputHandler.copyTo(w);
- }
- }
-
- if (echoOutput && !verboseMode) {
- synchronized (this) {
- System.out.println("");
- System.out.println(suite.getClassName() + " > test suite's output saved to " + outputLog + ", copied below:");
- try (BufferedReader reader = Files.newBufferedReader(outputLog, StandardCharsets.UTF_8)) {
- char[] buf = new char[1024];
- int len;
- while ((len = reader.read(buf)) >= 0) {
- System.out.print(new String(buf, 0, len));
- }
- System.out.println();
- }
- }
- }
- }
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- } finally {
- OutputHandler handler = outputHandlers.remove(key);
- if (handler != null) {
- try {
- handler.close();
- } catch (IOException e) {
- LOGGER.error("Failed to close output handler for: " + key, e);
- }
- }
- }
- }
-
- private static Pattern SANITIZE = Pattern.compile("[^a-zA-Z .\\-_0-9]+");
-
- public static String getOutputLogName(TestDescriptor suite) {
- return SANITIZE.matcher("OUTPUT-" + suite.getName() + ".txt").replaceAll("_");
- }
-
- @Override
- public void afterTest(TestDescriptor testDescriptor, TestResult result) {
- // Include test failure exception stacktrace(s) in test output log.
- if (result.getResultType() == TestResult.ResultType.FAILURE) {
- if (result.getExceptions().size() > 0) {
- String message = formatter.format(testDescriptor, result.getExceptions());
- handlerFor(testDescriptor).write(message);
- }
- }
- }
-
- private OutputHandler handlerFor(TestDescriptor descriptor) {
- // Attach output of leaves (individual tests) to their parent.
- if (!descriptor.isComposite()) {
- descriptor = descriptor.getParent();
- }
- return outputHandlers.computeIfAbsent(TestKey.of(descriptor), (key) -> new OutputHandler());
- }
-
- public static class TestKey {
- private final String key;
-
- private TestKey(String key) {
- this.key = key;
- }
-
- public static TestKey of(TestDescriptor d) {
- StringBuilder key = new StringBuilder();
- key.append(d.getClassName());
- key.append("::");
- key.append(d.getName());
- key.append("::");
- key.append(d.getParent() == null ? "-" : d.getParent().toString());
- return new TestKey(key.toString());
- }
-
- @Override
- public boolean equals(Object o) {
- return o != null &&
- o.getClass() == this.getClass() &&
- Objects.equals(((TestKey) o).key, key);
- }
-
- @Override
- public int hashCode() {
- return key.hashCode();
- }
-
- @Override
- public String toString() {
- return key;
- }
- }
-
- private class OutputHandler implements Closeable {
- // Max single-line buffer before automatic wrap occurs.
- private static final int MAX_LINE_WIDTH = 1024 * 4;
-
- private final SpillWriter buffer;
-
- // internal stream.
- private final PrefixedWriter sint;
- // stdout
- private final PrefixedWriter sout;
- // stderr
- private final PrefixedWriter serr;
-
- // last used stream (so that we can flush it properly and prefixes are not screwed up).
- private PrefixedWriter last;
-
- public OutputHandler() {
- buffer = new SpillWriter(() -> {
- try {
- return Files.createTempFile(spillDir, "spill-", ".tmp");
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- });
-
- Writer sink = buffer;
- if (verboseMode) {
- sink = new StdOutTeeWriter(buffer);
- }
-
- sint = new PrefixedWriter(" > ", sink, MAX_LINE_WIDTH);
- sout = new PrefixedWriter(" 1> ", sink, MAX_LINE_WIDTH);
- serr = new PrefixedWriter(" 2> ", sink, MAX_LINE_WIDTH);
- last = sint;
- }
-
- public void write(TestOutputEvent event) {
- write((event.getDestination() == TestOutputEvent.Destination.StdOut ? sout : serr), event.getMessage());
- }
-
- public void write(String message) {
- write(sint, message);
- }
-
- public long length() throws IOException {
- return buffer.length();
- }
-
- private void write(PrefixedWriter out, String message) {
- try {
- if (out != last) {
- last.completeLine();
- last = out;
- }
- out.write(message);
- } catch (IOException e) {
- throw new UncheckedIOException("Unable to write to test output.", e);
- }
- }
-
- public void copyTo(Writer out) throws IOException {
- flush();
- buffer.copyTo(out);
- }
-
- public void flush() throws IOException {
- sout.completeLine();
- serr.completeLine();
- buffer.flush();
- }
-
- @Override
- public void close() throws IOException {
- buffer.close();
- }
- }
-}
diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java b/buildSrc/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java
deleted file mode 100644
index 20a4c8524f6..00000000000
--- a/buildSrc/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.gradle;
-
-import java.io.IOException;
-import java.io.PrintStream;
-import java.io.Writer;
-
-class StdOutTeeWriter extends Writer {
- private final Writer delegate;
- private final PrintStream out = System.out;
-
- public StdOutTeeWriter(Writer delegate) {
- this.delegate = delegate;
- }
-
- @Override
- public void write(int c) throws IOException {
- delegate.write(c);
- out.write(c);
- }
-
- @Override
- public void write(char[] cbuf) throws IOException {
- delegate.write(cbuf);
- out.print(cbuf);
- }
-
- @Override
- public void write(String str) throws IOException {
- delegate.write(str);
- out.print(str);
- }
-
- @Override
- public void write(String str, int off, int len) throws IOException {
- delegate.write(str, off, len);
- out.append(str, off, len);
- }
-
- @Override
- public Writer append(CharSequence csq) throws IOException {
- delegate.append(csq);
- out.append(csq);
- return this;
- }
-
- @Override
- public Writer append(CharSequence csq, int start, int end) throws IOException {
- delegate.append(csq, start, end);
- out.append(csq, start, end);
- return this;
- }
-
- @Override
- public Writer append(char c) throws IOException {
- delegate.append(c);
- out.append(c);
- return this;
- }
-
- @Override
- public void write(char[] cbuf, int off, int len) throws IOException {
- delegate.write(cbuf, off, len);
- out.print(new String(cbuf, off, len));
- }
-
- @Override
- public void flush() throws IOException {
- delegate.flush();
- out.flush();
- }
-
- @Override
- public void close() throws IOException {
- delegate.close();
- // Don't close the actual output.
- }
-}
diff --git a/dev-docs/FAQ.adoc b/dev-docs/FAQ.adoc
index b25d81bd6a9..820e952b5f3 100644
--- a/dev-docs/FAQ.adoc
+++ b/dev-docs/FAQ.adoc
@@ -97,3 +97,7 @@ If you don't yet have an account, you have to ask for one in the 'users' or 'dev
* http://fucit.org/solr-jenkins-reports/failure-report.html
* https://ge.apache.org/scans/tests?search.relativeStartTime=P90D&search.rootProjectNames=solr*
* https://lists.apache.org[Solr mailing list archives especially builds]
+
+=== How can I build the JavaDoc's and the Reference Guide?
+
+Run `./gradlew buildLocalSite` to build the Javadoc's and the Reference Guide site.
diff --git a/dev-docs/apis.adoc b/dev-docs/apis.adoc
index 9e710f7e0a4..fa0565a14d9 100644
--- a/dev-docs/apis.adoc
+++ b/dev-docs/apis.adoc
@@ -81,4 +81,3 @@ A good example for each of these steps can be seen in Solr's v2 "add-replica-pro
While we've settled on JAX-RS as our framework for defining v2 APIs going forward, Solr still retains many v2 APIs that were written using an older homegrown framework.
This framework defines APIs using annotations (e.g. `@EndPoint`) similar to those used by JAX-RS, but lacks the full range of features and 3rd-party tooling.
We're in the process of migrating these API definitions to JAX-RS and hope to remove all support for this legacy framework in a future release.
-
diff --git a/dev-docs/asf-jenkins.adoc b/dev-docs/asf-jenkins.adoc
new file mode 100644
index 00000000000..b3c59b6fd9e
--- /dev/null
+++ b/dev-docs/asf-jenkins.adoc
@@ -0,0 +1,73 @@
+= ASF Jenkins Setup
+:toc: left
+
+The Solr project uses a Jenkins instance provided by the Apache Software Foundation ("ASF") for running tests, validation, etc.
+
+This file aims to document our [ASF Jenkins](https://ci-builds.apache.org/job/Solr/) usage and administration, to prevent it from becoming "tribal knowledge" understood by just a few.
+
+== Jobs
+
+We run a number of jobs on Jenkins, each validating an overlapping set of concerns:
+
+* `Solr-Artifacts-*` - daily jobs that run `./gradlew assemble` to ensure that build artifacts (except docker images) can be created successfully
+* `Solr-Lint-*` - daily jobs that run static analysis (i.e. `precommit` and `check -x test`) on a branch
+* `Solr-Test-*` - "hourly" jobs that run all (non-integration) tests (i.e. `./gradlew test`)
+* `Solr-TestIntegration-*` - daily jobs that run project integration tests (i.e. `./gradlew integrationTests`)
+* `Solr-Docker-Nightly-*` - daily jobs that `./gradlew testDocker dockerPush` to validate docker image packaging. Snapshot images are pushed to hub.docker.com
+* `Solr-reference-guide-*` - daily jobs that build the Solr reference guide via `./gradlew checkSite` and push the resulting artifact to the staging/preview site `nightlies.apache.org`
+* `Solr-Smoketest-*` - daily jobs that produce a snapshot release (via the `assembleRelease` task) and run the release smoketester
+
+Most jobs that validate particular build artifacts are run "daily", which is sufficient to prevent any large breaks from creeping into the build.
+On the other hand, jobs that run tests are triggered "hourly" in order to squeeze as many test runs as possible out of our Jenkins hardware.
+This is a necessary consequence of Solr's heavy use of randomization in its test-suite.
+"Hourly" scheduling ensures that a test run is either currently running or in the build queue at all times, and enables us to get the maximum data points from our hardware.
+
+== Jenkins Agents
+
+All Solr jobs run on Jenkins agents marked with the 'solr' label.
+Currently, this maps to two Jenkins agents:
+
+* `lucene-solr-1` - available at lucene1-us-west.apache.org
+* `lucene-solr-2` - available (confusingly) at lucene-us-west.apache.org
+
+These agents are "project-specific" VMs shared by the Lucene and Solr projects.
+That is: they are VMs requested by a project for their exclusive use.
+(INFRA policy appears to be that each Apache project may request 1 dedicated VM; it's unclear how Solr ended up with 2.)
+
+Maintenance of these agent VMs falls into a bit of a gray area.
+INFRA will still intervene when asked: to reboot nodes, to deploy OS upgrades, etc.
+But some burden also falls on Lucene and Solr as project teams to monitor the the VMs and keep them healthy.
+
+=== Accessing Jenkins Agents
+
+With a few steps, Solr committers can access our project's Jenkins agent VMs via SSH to troubleshoot and resolve issues.
+
+1. Ensure your account on id.apache.org has an SSH key associated with it.
+2. Ask INFRA to give your Apache ID SSH access to these boxes. (See [this JIRA ticket](https://issues.apache.org/jira/browse/INFRA-3682) for an example.)
+3. SSH into the desired box with: `ssh @$HOSTNAME` (where `$HOSTNAME` is either `lucene1-us-west.apache.org` or `lucene-us-west.apache.org`)
+
+Often, SSH access on the boxes is not sufficient, and administrators require "root" access to diagnose and solve problems.
+Sudo/su priveleges can be accessed via a one-time pad ("OTP") challenge, managed by the "Orthrus PAM" module.
+Users in need of root access can perform the following steps:
+
+1. Open the ASF's [OTP Generator Tool](https://selfserve.apache.org/otp-calculator.html) in your browser of choice
+2. Run `ortpasswd` on the machine. This will print out a OTP "challenge" (e.g. `otp-md5 497 lu6126`) and provide a password prompt. This password prompt should be given a OTP password, generated in steps 3-5 below.
+3. Copy the "challenge" from the previous step into the relevant field on the "OTP Generator Tool" form.
+4. Choose a password to use for OTP Challenges (or recall one you've used in the past), and type this into the relevant field on the "OTP Generator Tool" form.
+5. Click "Compute", and copy the first line from the "Response" box into your SSH session's password prompt. You're now established in the "Orthrus PAM" system.
+6. Run a command requesting `su` escalation (e.g. `sudo su -`). This should print another "challenge" and password prompt. Repeat steps 3-5.
+
+If this fails at any point, open a ticket with INFRA.
+You may need to be added to the 'sudoers' file for the VM(s) in question.
+
+=== Known Jenkins Issues
+
+One recurring problem with the Jenkins agents is that they periodically run out of disk-space.
+Usually this happens when enough "workspaces" are orphaned or left behind, consuming all of the agent's disk space.
+
+Solr Jenkins jobs are currently configured to clean up the previous workspace at the *start* of the subsequent run.
+This avoids orphans in the common case but leaves workspaces behind any time a job is renamed or deleted (as happens during the Solr release process).
+
+Luckily, this has an easy fix: SSH into the agent VM and delete any workspaces no longer needed in `/home/jenkins/jenkins-slave/workspace/Solr`.
+Any workspace that doesn't correspond to a [currently existing job](https://ci-builds.apache.org/job/Solr/) can be safely deleted.
+(It may also be worth comparing the Lucene workspaces in `/home/jenkins/jenkins-slave/workspace/Lucene` to [that project's list of jobs](https://ci-builds.apache.org/job/Lucene/).)
diff --git a/dev-docs/dependency-upgrades.adoc b/dev-docs/dependency-upgrades.adoc
index 9f7372cc1bd..aa5cd93a2de 100644
--- a/dev-docs/dependency-upgrades.adoc
+++ b/dev-docs/dependency-upgrades.adoc
@@ -16,30 +16,57 @@
// specific language governing permissions and limitations
// under the License.
-Solr has lots of 3rd party dependencies, defined mainly in `versions.props`.
+Solr has lots of 3rd party dependencies, defined in `gradle/libs.versions.toml`.
Keeping them up-to-date is crucial for a number of reasons:
* minimizing the risk of critical CVE vulnerabilities by staying on a recent and supported version
* avoiding "dependency hell", that can arise from falling too far behind
-Read the https://github.com/apache/solr/blob/main/help/dependencies.txt[help/dependencies.txt] file for an in-depth explanation of how gradle is deployed in Solr, using
-https://github.com/palantir/gradle-consistent-versions[Gradle consistent-versions] plugin.
+Read the https://github.com/apache/solr/blob/main/help/dependencies.txt[help/dependencies.txt] file for an in-depth
+explanation of how dependencies are managed.
== Manual dependency upgrades
In order to upgrade a dependency, you need to run through a number of steps:
1. Identify the available versions from e.g. https://search.maven.org[Maven Central]
-2. Update the version in `versions.props` file
-3. Run `./gradlew --write-locks` to re-generate `versions.lock`. Note that this may cause a cascading effect where
+2. Update the version in `gradle/libs.versions.toml` file
+3. Run `./gradlew writeLocks` to re-generate `versions.lock`. Note that this may cause a cascading effect where
the locked version of other dependencies also change.
-4. Run `./gradlew updateLicenses` to re-generate SHA1 checksums of the new jar files.
-5. Once in a while, a new version of a dependency will transitively bring in brand-new dependencies.
+4. In case of a conflict, resolve the conflict according to `help/dependencies.txt`
+5. Check if there are any constraints that are obsolete after the dependency update
+6. Update the license and notice files of the changed dependencies. See `help/dependencies.txt` for
+ details.
+7. Run `./gradlew updateLicenses` to re-generate SHA1 checksums of the new jar files.
+8. Once in a while, a new version of a dependency will transitively bring in brand-new dependencies.
You'll need to decide whether to keep or exclude them. See `help/dependencies.txt` for details.
+=== Reviewing Constraints
+
+The constraints are defined in gradle/validation/dependencies.gradle. There, if the updated dependency is listed,
+the constraint can be reviewed, updated or removed.
+
+The constraints fall into two "groups". In the first group there are dependency constraints from dependencies
+that our project directly includes and require version alignment to sync the versions across all transitive
+dependencies. In the second group are dependencies that are only present as transitive dependencies.
+There, we try to follow the convention to provide additional information with "which dependencies use what version",
+so that the next person reviewing the constraint does not have to look it up. However, this is quite time-consuming
+to analyze the dependencies and therefore subject to change.
+
+In order to review a constraint, you have to check if the updated dependency is mentioned in any of the constraints,
+either as a reason for another dependency constraint or as the constraint's dependency. Removing temporarily
+a constraint, the task writeLocks will fail if the constraint is still required.
+
+This process and the constraints of dependencies.gradle are not optimal, as it is quite time-consuming and not obvious
+by just looking at it. We just haven't found yet a more efficient way to maintain these constraints.
+
== Renovate bot Pull Requests
+
+The renovate bot may be replaced in the future with dependabot and this section may only be relevant for older
+versions (<10.0). See https://lists.apache.org/thread/1sb9ttv3lp57z2yod1htx1fykp5sj73z for updates.
+
A member of the Solr community operates a Github bot running https://github.com/renovatebot/renovate[Renovate], which
files Pull Requests to Solr with dependency upgrade proposals. The PRs are labeled `dependencies` and do include
-changes resulting from `gradle --write-locks` and `updateLicenses`.
+changes resulting from `./gradlew writeLocks` and `updateLicenses`.
Community members and committers can then review, and if manual changes are needed, help bring the PR to completion.
For many dependencies, a changelog is included in the PR text, which may help guide the upgrade decision.
diff --git a/dev-docs/lucene-upgrade.md b/dev-docs/lucene-upgrade.md
index c9ee1a27802..e5ab91e7c9f 100644
--- a/dev-docs/lucene-upgrade.md
+++ b/dev-docs/lucene-upgrade.md
@@ -27,7 +27,7 @@ Create a new branch locally e.g. `git checkout -b lucene940 -t origin/main` for
## Build
-### `versions.props` update
+### `gradle/libs.versions.toml` update
```
- org.apache.lucene:*=9.3.0
@@ -37,7 +37,7 @@ Create a new branch locally e.g. `git checkout -b lucene940 -t origin/main` for
### `versions.lock` update
```
-gradlew --write-locks
+gradlew :writeLocks
```
### `solr/licenses` update
diff --git a/dev-docs/running-in-docker.adoc b/dev-docs/running-in-docker.adoc
index 202d0379b1e..e1e96dac244 100644
--- a/dev-docs/running-in-docker.adoc
+++ b/dev-docs/running-in-docker.adoc
@@ -8,7 +8,7 @@ To run Solr in a container and expose the Solr port, run:
In order to start Solr in cloud mode, run the following.
-`docker run -p 8983:8983 solr solr-fg -c`
+`docker run -p 8983:8983 solr solr-fg`
For documentation on using the official docker builds, please refer to the https://hub.docker.com/_/solr[DockerHub page].
Up-to-date documentation for running locally built images of this branch can be found in the xref:_running_solr_in_docker[local reference guide].
@@ -30,4 +30,4 @@ For more info on building an image, run:
`./gradlew helpDocker`
== Additional Information
-You can find additional information in the https://solr.apache.org/guide/solr/latest/deployment-guide/solr-in-docker.html[Solr Ref Guide Docker Page]
\ No newline at end of file
+You can find additional information in the https://solr.apache.org/guide/solr/latest/deployment-guide/solr-in-docker.html[Solr Ref Guide Docker Page]
diff --git a/dev-docs/solr-source-code.adoc b/dev-docs/solr-source-code.adoc
index ad73858a478..5874a2e06ec 100644
--- a/dev-docs/solr-source-code.adoc
+++ b/dev-docs/solr-source-code.adoc
@@ -2,10 +2,10 @@
## Building Solr from Source
-Download the Java 11 JDK (Java Development Kit) or later.
+Download the Java 21 JDK (Java Development Kit) or later.
We recommend the OpenJDK distribution Eclipse Temurin available from https://adoptium.net/.
You will need the JDK installed, and the $JAVA_HOME/bin (Windows: %JAVA_HOME%\bin) folder included on your command path.
-To test this, issue a "java -version" command from your shell (command prompt) and verify that the Java version is 11 or later.
+To test this, issue a "java -version" command from your shell (command prompt) and verify that the Java version is 21 or later.
See the xref:jvms.adoc[JVM developer doc] for more information on Gradle and JVMs.
Clone the latest Apache Solr source code directly from the Git repository: .
diff --git a/dev-docs/v2-api-conventions.adoc b/dev-docs/v2-api-conventions.adoc
index d2159239694..74a760c2bfd 100644
--- a/dev-docs/v2-api-conventions.adoc
+++ b/dev-docs/v2-api-conventions.adoc
@@ -66,6 +66,14 @@ For use within the v2 API, the four "popular" HTTP methods have the following se
* `PUT` - used for idempotent resource modifications.
* `DELETE` - Used to delete or cleanup resource
+== Errors
+
+v2 APIs should be consistent in how they report errors. Throwing a `SolrException` will convey
+1.the error code as the HTTP response status code, as `responseHeader.status` and as `error.code`, and
+1.the error message as `error.msg`.
+
+API calls that reference a specific resource (e.g. `specificCollName`, `specificAliasName`, `specificPropertyName` and others per the above list) that do not exist should return `SolrException.ErrorCode.NOT_FOUND` (HTTP 404).
+
== Exceptional Cases - "Command" APIs
The pairing of semantic HTTP verbs and "resource"-based paths gives Solr an intuitive pattern for representing many operations, but not all.
diff --git a/dev-tools/README.txt b/dev-tools/README.txt
index db5a8e179b2..76ae0c52f3a 100644
--- a/dev-tools/README.txt
+++ b/dev-tools/README.txt
@@ -5,7 +5,6 @@ as to the usefulness of the tools.
Description of dev-tools/ contents:
-./missing-doclet -- JavaDoc validation doclet subproject
./doap/ -- Lucene and Solr project descriptors in DOAP RDF format.
./scripts/ -- Odds and ends for building releases, etc.
./test-patch/ -- Scripts for automatically validating patches
diff --git a/dev-tools/doap/solr.rdf b/dev-tools/doap/solr.rdf
index acfbd01dbf6..c3494226dde 100644
--- a/dev-tools/doap/solr.rdf
+++ b/dev-tools/doap/solr.rdf
@@ -152,6 +152,13 @@
9.0.0
+
+
+ solr-8.11.4
+ 2024-09-24
+ 8.11.4
+
+ solr-8.11.3
diff --git a/dev-tools/scripts/cloud.sh b/dev-tools/scripts/cloud.sh
index 3ab979d5ac3..9824fb3eea7 100755
--- a/dev-tools/scripts/cloud.sh
+++ b/dev-tools/scripts/cloud.sh
@@ -334,7 +334,7 @@ start(){
echo "Final NUM_NODES is $NUM_NODES"
for i in `seq 1 $NUM_NODES`; do
mkdir -p "${CLUSTER_WD}/n${i}"
- argsArray=(-c -s $CLUSTER_WD_FULL/n${i} -z localhost:${ZK_PORT}/solr_${SAFE_DEST} -p 898${i} -m $MEMORY \
+ argsArray=(-c --solr-home $CLUSTER_WD_FULL/n${i} -z localhost:${ZK_PORT}/solr_${SAFE_DEST} -p 898${i} -m $MEMORY \
-a "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=500${i} \
-Dsolr.log.dir=$CLUSTER_WD_FULL/n${i} $JVM_ARGS")
FINAL_COMMAND="${SOLR}/bin/solr ${argsArray[@]}"
diff --git a/dev-tools/scripts/releaseWizard.yaml b/dev-tools/scripts/releaseWizard.yaml
index 5de7067d331..a1ccdc5cf44 100644
--- a/dev-tools/scripts/releaseWizard.yaml
+++ b/dev-tools/scripts/releaseWizard.yaml
@@ -391,7 +391,6 @@ groups:
title: Create a new minor branch off the stable branch
description: |
In our case we'll create {{ release_branch }}.
- Also edit `.asf.yaml` to add the new branch under `protected_branches`.
types:
- major
- minor
@@ -417,15 +416,37 @@ groups:
- !Command
cmd: git push --set-upstream origin {{ release_branch }}
tee: true
- - !Command
- cmd: "{{ editor }} .asf.yaml"
- comment: |
- Add the new branch {{ release_branch }} under `protected_branches` in `.asf.yaml`. An editor will open.
- stdout: true
- - !Command
- cmd: git add .asf.yaml && git commit -m "Add branch protection for {{ release_branch }}" && git push
- logfile: commit-branch-protection.log
- tee: true
+ - !Todo
+ id: protect_branches
+ title: Protect the new git branch(es)
+ vars:
+ branch_names: "{{ release_branch }}{% if release_type == 'major' %} and {{ stable_branch }}{% endif %}"
+ description: >
+ Edit `.asf.yaml` to protect new git branch(es) {{ branch_names }}.
+ types:
+ - major
+ - minor
+ depends: create_minor_branch
+ commands: !Commands
+ root_folder: '{{ git_checkout_folder }}'
+ commands_text: Run these commands to edit `.asf.yaml` and protect the new branch(es)
+ commands:
+ - !Command
+ cmd: git checkout main
+ tee: true
+ - !Command
+ cmd: git pull --ff-only
+ tee: true
+ - !Command
+ cmd: "{{ editor }} .asf.yaml"
+ comment: >
+ Add the newly created branch(es) {{ branch_names }}
+ under `protected_branches` in `.asf.yaml`. An editor will open.
+ stdout: true
+ - !Command
+ cmd: git add .asf.yaml && git commit -m "Add branch protection for {{ branch_names }}" && git push
+ logfile: commit-branch-protection.log
+ tee: true
- !Todo
id: update_minor_branch_prerelease_antora
title: Update Ref Guide Metadata for new Minor Branch
@@ -1843,16 +1864,9 @@ groups:
root_folder: '{{ git_checkout_folder }}'
commands_text: |
Run these commands to delete proposed versions from distribution directory.
- Note, as long as we have some releases (7.x, 8.x) in Lucene dist repo and other
- releases (9.0 ->) in the Solr dist repo, we may need to delete two places.
-
+
WARNING: Validate that the proposal is correct!
commands:
- - !Command
- cmd: |
- svn rm -m "Stop publishing old Solr releases"{% for ver in mirrored_versions_to_delete %} https://dist.apache.org/repos/dist/release/lucene/solr/{{ ver }}{% endfor %}
- logfile: svn-rm-solr.log
- comment: Delete from Lucene dist area
- !Command
cmd: |
svn rm -m "Stop publishing old Solr releases"{% for ver in mirrored_versions_to_delete %} https://dist.apache.org/repos/dist/release/solr/solr/{{ ver }}{% endfor %}
diff --git a/dev-tools/scripts/smokeTestRelease.py b/dev-tools/scripts/smokeTestRelease.py
index 4ab97eb1f09..acfa109f2fc 100755
--- a/dev-tools/scripts/smokeTestRelease.py
+++ b/dev-tools/scripts/smokeTestRelease.py
@@ -628,7 +628,7 @@ def verifyUnpacked(java, artifact, unpackPath, gitRevision, version, testArgs):
expected_src_root_folders = ['buildSrc', 'dev-docs', 'dev-tools', 'gradle', 'help', 'solr']
expected_src_root_files = ['build.gradle', 'gradlew', 'gradlew.bat', 'settings.gradle', 'versions.lock', 'versions.props']
expected_src_solr_files = ['build.gradle']
- expected_src_solr_folders = ['benchmark', 'bin', 'modules', 'api', 'core', 'docker', 'documentation', 'example', 'licenses', 'packaging', 'distribution', 'prometheus-exporter', 'server', 'solr-ref-guide', 'solrj', 'solrj-streaming', 'solrj-zookeeper', 'test-framework', 'webapp', '.gitignore', '.gitattributes']
+ expected_src_solr_folders = ['benchmark', 'bin', 'modules', 'api', 'core', 'cross-dc-manager', 'docker', 'documentation', 'example', 'licenses', 'packaging', 'distribution', 'prometheus-exporter', 'server', 'solr-ref-guide', 'solrj', 'solrj-streaming', 'solrj-zookeeper', 'test-framework', 'webapp', '.gitignore', '.gitattributes']
is_in_list(in_root_folder, expected_src_root_folders)
is_in_list(in_root_folder, expected_src_root_files)
is_in_list(in_solr_folder, expected_src_solr_folders)
@@ -638,7 +638,7 @@ def verifyUnpacked(java, artifact, unpackPath, gitRevision, version, testArgs):
elif isSlim:
is_in_list(in_root_folder, ['bin', 'docker', 'docs', 'example', 'licenses', 'server', 'lib'])
else:
- is_in_list(in_root_folder, ['bin', 'modules', 'docker', 'prometheus-exporter', 'docs', 'example', 'licenses', 'server', 'lib'])
+ is_in_list(in_root_folder, ['bin', 'modules', 'cross-dc-manager', 'docker', 'prometheus-exporter', 'docs', 'example', 'licenses', 'server', 'lib'])
if len(in_root_folder) > 0:
raise RuntimeError('solr: unexpected files/dirs in artifact %s: %s' % (artifact, in_root_folder))
@@ -774,8 +774,6 @@ def testSolrExample(binaryDistPath, javaPath):
raise RuntimeError('Failed to run the techproducts example, check log for previous errors.')
os.chdir('example')
- print(' test utf8...')
- run('sh ./exampledocs/test_utf8.sh http://localhost:8983/solr/techproducts', 'utf8.log')
print(' run query...')
s = load('http://localhost:8983/solr/techproducts/select/?q=video')
if s.find('"numFound":3,') == -1:
diff --git a/gradle/testing/fail-on-unsupported-jdk.gradle b/gradle/conventions.gradle
similarity index 66%
rename from gradle/testing/fail-on-unsupported-jdk.gradle
rename to gradle/conventions.gradle
index 7d94b709764..fabc9b4cc58 100644
--- a/gradle/testing/fail-on-unsupported-jdk.gradle
+++ b/gradle/conventions.gradle
@@ -15,18 +15,19 @@
* limitations under the License.
*/
-configure(rootProject) {
- task ensureJdkSupported() {
- doFirst {
- if (System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("mac") && rootProject.runtimeJavaVersion == JavaVersion.VERSION_20) {
- throw new GradleException("Tests cannot be run with JDK20 on Mac; see SOLR-16733 for more details.")
- }
+configure(allprojects) {
+ tasks.register("tidy").configure {
+ description "Applies formatters and cleanups to sources."
+ group "verification"
}
- }
+}
- allprojects {
- tasks.withType(Test) {
- dependsOn ":ensureJdkSupported"
+// Locate script-relative resource folder. This is context-sensitive so pass
+// the right buildscript (top-level).
+configure(rootProject) {
+ ext {
+ scriptResources = { buildscript ->
+ return file(buildscript.sourceFile.absolutePath.replaceAll('.gradle$', ""))
+ }
}
- }
}
diff --git a/gradle/documentation/changes-to-html.gradle b/gradle/documentation/changes-to-html.gradle
index 3b4ca69bf9d..af9d1b5fa9a 100644
--- a/gradle/documentation/changes-to-html.gradle
+++ b/gradle/documentation/changes-to-html.gradle
@@ -76,6 +76,13 @@ class ChangesToHtmlTask extends DefaultTask {
def toHtml(File versionsFile) {
def output = new ByteArrayOutputStream()
+
+ // Check if the perl executable exists
+ if (!perlExists()) {
+ logger.warn("WARNING: Perl is not installed, skipping creating Changes.html")
+ return
+ }
+
def result = project.exec {
executable project.externalTool("perl")
standardInput changesFile.newInputStream()
@@ -114,4 +121,14 @@ class ChangesToHtmlTask extends DefaultTask {
throw new GradleException("Changes file ${changesFile} or Doap file ${changesDoapFile} not found.")
}
}
+
+ def perlExists() {
+ try {
+ def process = "perl -v".execute()
+ process.waitFor()
+ return process.exitValue() == 0
+ } catch (Exception e) {
+ return false
+ }
+ }
}
diff --git a/gradle/documentation/markdown.gradle b/gradle/documentation/markdown.gradle
index 29d23d87c75..d9a890d72eb 100644
--- a/gradle/documentation/markdown.gradle
+++ b/gradle/documentation/markdown.gradle
@@ -33,10 +33,10 @@ buildscript {
}
dependencies {
- classpath "com.vladsch.flexmark:flexmark:${scriptDepVersions['flexmark']}"
- classpath "com.vladsch.flexmark:flexmark-ext-abbreviation:${scriptDepVersions['flexmark']}"
- classpath "com.vladsch.flexmark:flexmark-ext-attributes:${scriptDepVersions['flexmark']}"
- classpath "com.vladsch.flexmark:flexmark-ext-autolink:${scriptDepVersions['flexmark']}"
+ classpath libs.flexmark.flexmark
+ classpath libs.flexmark.extensions.abbreviation
+ classpath libs.flexmark.extensions.attributes
+ classpath libs.flexmark.extensions.autolink
}
}
diff --git a/gradle/documentation/pull-lucene-javadocs.gradle b/gradle/documentation/pull-lucene-javadocs.gradle
index 5fdc4a70040..17985d88f1b 100644
--- a/gradle/documentation/pull-lucene-javadocs.gradle
+++ b/gradle/documentation/pull-lucene-javadocs.gradle
@@ -45,11 +45,11 @@ configure(project(":solr:documentation")) {
// from all Solr javadocs?) then perhaps we can find a way to build this list programatically?
// - If these javadocs are (only every) consumed by the ref guide only, then these deps & associated tasks
// should just be moved to the ref-guide build.gradle
- javadocs group: 'org.apache.lucene', name: 'lucene-core', classifier: 'javadoc'
- javadocs group: 'org.apache.lucene', name: 'lucene-analysis-common', classifier: 'javadoc'
- javadocs group: 'org.apache.lucene', name: 'lucene-analysis-stempel', classifier: 'javadoc'
- javadocs group: 'org.apache.lucene', name: 'lucene-queryparser', classifier: 'javadoc'
- javadocs group: 'org.apache.lucene', name: 'lucene-spatial-extras', classifier: 'javadoc'
+ javadocs variantOf(libs.apache.lucene.core) { classifier 'javadoc' }
+ javadocs variantOf(libs.apache.lucene.analysis.common) { classifier 'javadoc' }
+ javadocs variantOf(libs.apache.lucene.analysis.stempel) { classifier 'javadoc' }
+ javadocs variantOf(libs.apache.lucene.queryparser) { classifier 'javadoc' }
+ javadocs variantOf(libs.apache.lucene.spatialextras) { classifier 'javadoc' }
}
diff --git a/gradle/documentation/render-javadoc.gradle b/gradle/documentation/render-javadoc.gradle
index bd90ad35426..6c637e540df 100644
--- a/gradle/documentation/render-javadoc.gradle
+++ b/gradle/documentation/render-javadoc.gradle
@@ -32,7 +32,7 @@ allprojects {
missingdoclet "org.apache.solr.tools:missing-doclet"
}
- ext {
+ project.ext {
relativeDocPath = project.path.replaceFirst(/:\w+:/, "").replace(':', '/')
}
diff --git a/gradle/generation/javacc.gradle b/gradle/generation/javacc.gradle
index 54fc7e91359..0b70ba656ee 100644
--- a/gradle/generation/javacc.gradle
+++ b/gradle/generation/javacc.gradle
@@ -26,7 +26,7 @@ configure(rootProject) {
}
dependencies {
- javacc "net.java.dev.javacc:javacc:${scriptDepVersions['javacc']}"
+ javacc libs.javacc.javacc
}
task javacc() {
diff --git a/gradle/globals.gradle b/gradle/globals.gradle
index 30eaa0857ab..d8a99de69c2 100644
--- a/gradle/globals.gradle
+++ b/gradle/globals.gradle
@@ -37,7 +37,7 @@ allprojects {
// so :solr:core will have solr-core.jar, etc.
project.archivesBaseName = project.path.replaceAll("^:", "").replace(':', '-')
- ext {
+ project.ext {
// Utility method to support passing overrides via -P or -D.
propertyOrDefault = { propName, defValue ->
def result
@@ -173,5 +173,6 @@ allprojects {
// Assign different java version for client-side modules 'api' and 'solrj*'
var isSolrJ = project.name.matches("^(solrj.*|api)\$")
minJavaVersion = isSolrJ ? rootProject.minJavaVersionSolrJ : rootProject.minJavaVersionDefault
+ minJavaTestVersion = rootProject.minJavaVersionDefault
}
}
diff --git a/gradle/ide/eclipse.gradle b/gradle/ide/eclipse.gradle
index a088c8b87a5..d7d453c39d9 100644
--- a/gradle/ide/eclipse.gradle
+++ b/gradle/ide/eclipse.gradle
@@ -21,65 +21,68 @@ import org.gradle.plugins.ide.eclipse.model.ClasspathEntry
def resources = scriptResources(buildscript)
configure(rootProject) {
- apply plugin: "eclipse"
+ plugins.withType(JavaPlugin) {
+ apply plugin: "eclipse"
- def relativize = { other -> rootProject.rootDir.relativePath(other).toString() }
+ def eclipseJavaVersion = propertyOrDefault("eclipse.javaVersion", libs.versions.java.min.get())
+ def relativize = { other -> rootProject.rootDir.relativePath(other).toString() }
- eclipse {
- project {
- name = "Apache Solr ${version}"
- }
+ eclipse {
+ project {
+ name = "Apache Solr ${version}"
+ }
- classpath {
- downloadSources = true
- downloadJavadoc = true
- defaultOutputDir = file('build/eclipse')
+ classpath {
+ downloadSources = true
+ downloadJavadoc = true
+ defaultOutputDir = file('build/eclipse')
- file {
- beforeMerged { classpath -> classpath.entries.removeAll { it.kind == "src" } }
+ file {
+ beforeMerged { classpath -> classpath.entries.removeAll { it.kind == "src" } }
- whenMerged { classpath ->
- def projects = allprojects.findAll { prj ->
- return prj.plugins.hasPlugin(JavaPlugin) &&
- prj.path != ":solr:solr-ref-guide"
- }
-
- Set sources = []
- Set jars = []
- projects.each { prj ->
- prj.sourceSets.each { sourceSet ->
- sources += sourceSet.java.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) }
- sources += sourceSet.resources.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) }
+ whenMerged { classpath ->
+ def projects = allprojects.findAll { prj ->
+ return prj.plugins.hasPlugin(JavaPlugin) &&
+ prj.path != ":solr:solr-ref-guide"
}
- // This is hacky - we take the resolved compile classpath and just
- // include JAR files from there. We should probably make it smarter
- // by looking at real dependencies. But then: this Eclipse configuration
- // doesn't really separate sources anyway so why bother.
- jars += prj.configurations.compileClasspath.resolve()
- jars += prj.configurations.testCompileClasspath.resolve()
- }
+ Set sources = []
+ Set jars = []
+ projects.each { prj ->
+ prj.sourceSets.each { sourceSet ->
+ sources += sourceSet.java.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) }
+ sources += sourceSet.resources.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) }
+ }
+
+ // This is hacky - we take the resolved compile classpath and just
+ // include JAR files from there. We should probably make it smarter
+ // by looking at real dependencies. But then: this Eclipse configuration
+ // doesn't really separate sources anyway so why bother.
+ jars += prj.configurations.compileClasspath.resolve()
+ jars += prj.configurations.testCompileClasspath.resolve()
+ }
- classpath.entries += sources.sort().collect {name -> new SourceFolder(name, "build/eclipse/" + name) }
- classpath.entries += jars.unique().findAll { location -> location.isFile() }.collect { location ->
- new LibEntry(location.toString())
+ classpath.entries += sources.sort().collect {name -> new SourceFolder(name, "build/eclipse/" + name) }
+ classpath.entries += jars.unique().findAll { location -> location.isFile() }.collect { location ->
+ new LibEntry(location.toString())
+ }
}
}
}
}
jdt {
- sourceCompatibility = rootProject.minJavaVersionDefault
- targetCompatibility = rootProject.minJavaVersionDefault
- javaRuntimeName = "JavaSE-${rootProject.minJavaVersionDefault}"
+ sourceCompatibility = eclipseJavaVersion
+ targetCompatibility = eclipseJavaVersion
+ javaRuntimeName = "JavaSE-${eclipseJavaVersion}"
}
- }
- eclipseJdt {
- doLast {
- project.sync {
- from rootProject.file("${resources}/dot.settings")
- into rootProject.file(".settings")
+ eclipseJdt {
+ doLast {
+ project.sync {
+ from rootProject.file("${resources}/dot.settings")
+ into rootProject.file(".settings")
+ }
}
}
}
diff --git a/gradle/java/javac.gradle b/gradle/java/javac.gradle
index 53320cc01c0..ec33f977c64 100644
--- a/gradle/java/javac.gradle
+++ b/gradle/java/javac.gradle
@@ -19,14 +19,19 @@
allprojects {
plugins.withType(JavaPlugin) {
- sourceCompatibility = project.minJavaVersion
- targetCompatibility = project.minJavaVersion
-
- // Use 'release' flag instead of 'source' and 'target'
- tasks.withType(JavaCompile) {
- options.compilerArgs += ["--release", project.minJavaVersion.toString()]
- }
-
+ // Use 'release' flag instead of 'source' and 'target'
+ tasks.withType(JavaCompile) {
+ compileTestJava {
+ sourceCompatibility = project.minJavaTestVersion
+ targetCompatibility = project.minJavaTestVersion
+ options.compilerArgs += ["--release", project.minJavaTestVersion.toString()]
+ }
+ compileJava {
+ sourceCompatibility = project.minJavaVersion
+ targetCompatibility = project.minJavaVersion
+ options.compilerArgs += ["--release", project.minJavaVersion.toString()]
+ }
+ }
// Configure warnings.
tasks.withType(JavaCompile) {
options.encoding = "UTF-8"
@@ -51,22 +56,12 @@ allprojects {
"-Xdoclint:all/protected",
"-Xdoclint:-missing",
"-Xdoclint:-accessibility",
+ "-Xlint:synchronization",
+ "-Xlint:text-blocks",
"-proc:none", // proc:none was added because of LOG4J2-1925 / JDK-8186647
+ "-Xlint:removal"
]
- // enable some warnings only relevant to newer language features
- if (rootProject.runtimeJavaVersion >= JavaVersion.VERSION_15) {
- options.compilerArgs += [
- "-Xlint:text-blocks",
- ]
- }
-
- if (rootProject.runtimeJavaVersion >= JavaVersion.VERSION_16) {
- options.compilerArgs += [
- "-Xlint:synchronization",
- ]
- }
-
if (propertyOrDefault("javac.failOnWarnings", true).toBoolean()) {
options.compilerArgs += "-Werror"
}
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
new file mode 100644
index 00000000000..685fe012216
--- /dev/null
+++ b/gradle/libs.versions.toml
@@ -0,0 +1,459 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+[versions]
+adobe-testing-s3mock = "2.17.0"
+amazon-awssdk = "2.26.19"
+# @keep Antora version used in ref-guide
+antora = "3.1.4"
+# @keep Most recent commit as of 2022-06-24, this repo does not have tags
+antora-default-ui = "51ad811622394027afb4e182c2fdabc235ae04dd"
+# @keep Antora Lunr extensions version used in ref-guide
+antora-lunr-extension = "1.0.0-alpha.8"
+apache-calcite = "1.37.0"
+apache-calcite-avatica = "1.25.0"
+apache-commons-collections4 = "4.4"
+apache-commons-compress = "1.26.1"
+apache-commons-configuration2 = "2.11.0"
+apache-commons-exec = "1.4.0"
+apache-commons-lang3 = "3.15.0"
+apache-commons-math3 = "3.6.1"
+# @keep for version alignment
+apache-commons-text = "1.12.0"
+apache-curator = "5.7.1"
+apache-hadoop = "3.4.0"
+apache-hadoop-thirdparty = "1.2.0"
+apache-httpcomponents-httpclient = "4.5.14"
+apache-httpcomponents-httpcore = "4.4.16"
+apache-httpcomponents-httpmime = "4.5.14"
+apache-kafka = "3.7.1"
+apache-log4j = "2.21.0"
+apache-lucene = "9.12.1"
+apache-opennlp = "1.9.4"
+apache-poi = "5.2.2"
+apache-rat = "0.15"
+apache-tika = "1.28.5"
+apache-tomcat = "6.0.53"
+apache-zookeeper = "3.9.2"
+# @keep for version alignment
+apiguardian = "1.1.2"
+aqute-bnd = "6.4.1"
+# @keep Asciidoctor mathjax version used in ref-guide
+asciidoctor-mathjax = "0.0.9"
+# @keep Asciidoctor tabs version used in ref-guide
+asciidoctor-tabs = "1.0.0-beta.6"
+# @keep bats-assert (node) version used in packaging
+bats-assert = "2.0.0"
+# @keep bats-core (node) version used in packaging
+bats-core = "1.8.2"
+# @keep bats-file (node) version used in packaging
+bats-file = "0.3.0"
+bc-jose4j = "0.9.6"
+benmanes-caffeine = "3.1.8"
+benmanes-versions = "0.51.0"
+bouncycastle = "1.78.1"
+# @keep Browserify version used in ref-guide
+browserify = "17.0.0"
+carrot2-core = "4.5.1"
+carrotsearch-dependencychecks = "0.0.9"
+carrotsearch-hppc = "0.10.0"
+carrotsearch-randomizedtesting = "2.8.1"
+# @keep for version alignment
+checkerframework = "3.44.0"
+codehaus-woodstox = "4.2.2"
+commons-cli = "1.9.0"
+commons-codec = "1.17.1"
+commons-collections = "3.2.2"
+commons-io = "2.15.1"
+cutterslade-analyze = "1.10.0"
+cybozulabs-langdetect = "1.1-20120112"
+diffplug-spotless = "6.5.2"
+dropwizard-metrics = "4.2.26"
+eclipse-ecj = "3.39.0"
+eclipse-jetty = "10.0.22"
+eclipse-jettytoolchain = "4.0.6"
+# @keep jgit version used by git-status.gradle
+eclipse-jgit = "6.7.0.202309050840-r"
+fasterxml = "2.18.0"
+fasterxml-woodstox = "7.0.0"
+# @keep Flexmark used in classpath
+flexmark = "0.64.8"
+google-api-gax = "2.33.0"
+# @keep for version alignment
+google-api-grpc-proto = "2.41.0"
+google-auth = "1.19.0"
+# @keep for version alignment
+google-autovalue = "1.10.4"
+google-cloud-bom = "0.224.0"
+google-cloud-core = "2.23.0"
+google-cloud-nio = "0.127.3"
+google-cloud-storage = "2.27.0"
+google-errorprone = "2.31.0"
+# @keep for version alignment
+google-failureaccess = "1.0.2"
+# @keep for version alignment
+google-gson = "2.11.0"
+google-guava = "33.1.0-jre"
+# @keep for version alignment
+google-j2objc = "3.0.0"
+# @keep This is GJF version for spotless/ tidy.
+google-javaformat = "1.18.1"
+# @keep for version alignment
+google-protobuf = "3.25.3"
+google-re2j = "1.7"
+# @keep Gradle version to run the build
+gradle = "8.10"
+grpc = "1.65.1"
+# @keep Gulp version used in ref-guide
+gulp-cli = "2.3.0"
+hamcrest = "3.0"
+hk2 = "3.1.1"
+hsqldb = "2.7.2"
+ibm-icu = "74.2"
+immutables-valueannotations = "2.10.1"
+j256-simplemagic = "1.17"
+jakarta-annotation = "2.1.1"
+jakarta-inject = "2.0.1"
+jakarta-ws = "3.1.0"
+# @keep This is the minimum required Java version for the project.
+java-min = "21"
+# @keep This is the minimum required Java version for SolrJ.
+java-solrj = "17"
+javacc = "7.0.12"
+# @keep for version alignment
+jaxb = "2.3.8"
+jayway-jsonpath = "2.9.0"
+jctools = "4.0.5"
+jersey = "3.1.9"
+# TODO Sync with jersey versions
+jersey-containers = "2.39.1"
+# @keep for version alignment
+joda-time = "2.8.1"
+junit = "4.13.2"
+langchain4j = "0.35.0"
+# @keep Link checker version used in ref-guide
+link-checker = "1.4.2"
+littlerobots-versioncatalogupdate = "0.8.4"
+lmax-disruptor = "3.4.4"
+ltgt-errorprone = "3.1.0"
+mockito = "5.12.0"
+morethan-jmhreport = "0.9.0"
+navsecurity = "0.5.10"
+netty = "4.1.114.Final"
+# @keep for version alignment
+netty-tcnative = "2.0.66.Final"
+nimbusds-josejwt = "9.30.2"
+nodegradle-node = "7.0.1"
+# @keep Node JS version used in node.gradle (LTS)
+nodejs = "16.20.2"
+openapi = "7.6.0"
+openjdk-jmh = "1.37"
+opentelemetry = "1.40.0"
+osgi-annotation = "8.1.0"
+# @keep for version alignment
+ow2-asm = "9.3"
+owasp-dependencycheck = "9.0.8"
+# @keep for version alignment
+perfmark = "0.27.0"
+prometheus-metrics = "1.1.0"
+prometheus-simpleclient = "0.16.0"
+quicktheories = "0.26"
+semver4j = "5.3.0"
+slf4j = "2.0.13"
+spatial4j = "0.8"
+spotbugs = "4.8.6"
+squareup-okhttp3-mockwebserver = "4.11.0"
+squareup-okhttp3-okhttp = "4.12.0"
+stephenc-jcip = "1.0-1"
+swagger3 = "2.2.22"
+tdunning-tdigest = "3.3"
+thetaphi-forbiddenapis = "3.7"
+thisptr-jacksonjq = "0.0.13"
+threeten-bp = "1.6.8"
+undercouch-download = "5.5.0"
+xerces = "2.12.2"
+xerial-snappy = "1.1.10.5"
+
+[plugins]
+benmanes-versions = { id = "com.github.ben-manes.versions", version.ref = "benmanes-versions" }
+carrotsearch-dependencychecks = { id = "com.carrotsearch.gradle.dependencychecks", version.ref = "carrotsearch-dependencychecks" }
+cutterslade-analyze = { id = "ca.cutterslade.analyze", version.ref = "cutterslade-analyze" }
+diffplug-spotless = { id = "com.diffplug.spotless", version.ref = "diffplug-spotless" }
+littlerobots-versioncatalogupdate = { id = "nl.littlerobots.version-catalog-update", version.ref = "littlerobots-versioncatalogupdate" }
+ltgt-errorprone = { id = "net.ltgt.errorprone", version.ref = "ltgt-errorprone" }
+morethan-jmhreport = { id = "io.morethan.jmhreport", version.ref = "morethan-jmhreport" }
+nodegradle-node = { id = "com.github.node-gradle.node", version.ref = "nodegradle-node" }
+openapi-generator = { id = "org.openapi.generator", version.ref = "openapi" }
+owasp-dependencycheck = { id = "org.owasp.dependencycheck", version.ref = "owasp-dependencycheck" }
+swagger3-core = { id = "io.swagger.core.v3.swagger-gradle-plugin", version.ref = "swagger3" }
+thetaphi-forbiddenapis = { id = "de.thetaphi.forbiddenapis", version.ref = "thetaphi-forbiddenapis" }
+undercouch-download = { id = "de.undercouch.download", version.ref = "undercouch-download" }
+
+[libraries]
+adobe-testing-s3mock-junit4 = { module = "com.adobe.testing:s3mock-junit4", version.ref = "adobe-testing-s3mock" }
+adobe-testing-s3mock-testsupportcommon = { module = "com.adobe.testing:s3mock-testsupport-common", version.ref = "adobe-testing-s3mock" }
+amazon-awssdk-apacheclient = { module = "software.amazon.awssdk:apache-client", version.ref = "amazon-awssdk" }
+amazon-awssdk-auth = { module = "software.amazon.awssdk:auth", version.ref = "amazon-awssdk" }
+amazon-awssdk-awscore = { module = "software.amazon.awssdk:aws-core", version.ref = "amazon-awssdk" }
+amazon-awssdk-bom = { module = "software.amazon.awssdk:bom", version.ref = "amazon-awssdk" }
+amazon-awssdk-httpclient-spi = { module = "software.amazon.awssdk:http-client-spi", version.ref = "amazon-awssdk" }
+amazon-awssdk-profiles = { module = "software.amazon.awssdk:profiles", version.ref = "amazon-awssdk" }
+amazon-awssdk-regions = { module = "software.amazon.awssdk:regions", version.ref = "amazon-awssdk" }
+amazon-awssdk-s3 = { module = "software.amazon.awssdk:s3", version.ref = "amazon-awssdk" }
+amazon-awssdk-sdkcore = { module = "software.amazon.awssdk:sdk-core", version.ref = "amazon-awssdk" }
+amazon-awssdk-sts = { module = "software.amazon.awssdk:sts", version.ref = "amazon-awssdk" }
+apache-calcite-avatica-core = { module = "org.apache.calcite.avatica:avatica-core", version.ref = "apache-calcite-avatica" }
+apache-calcite-core = { module = "org.apache.calcite:calcite-core", version.ref = "apache-calcite" }
+apache-calcite-linq4j = { module = "org.apache.calcite:calcite-linq4j", version.ref = "apache-calcite" }
+apache-commons-collections4 = { module = "org.apache.commons:commons-collections4", version.ref = "apache-commons-collections4" }
+apache-commons-compress = { module = "org.apache.commons:commons-compress", version.ref = "apache-commons-compress" }
+apache-commons-configuration2 = { module = "org.apache.commons:commons-configuration2", version.ref = "apache-commons-configuration2" }
+apache-commons-exec = { module = "org.apache.commons:commons-exec", version.ref = "apache-commons-exec" }
+apache-commons-lang3 = { module = "org.apache.commons:commons-lang3", version.ref = "apache-commons-lang3" }
+apache-commons-math3 = { module = "org.apache.commons:commons-math3", version.ref = "apache-commons-math3" }
+# @keep transitive dependency for version alignment
+apache-commons-text = { module = "org.apache.commons:commons-text", version.ref = "apache-commons-text" }
+apache-curator-client = { module = "org.apache.curator:curator-client", version.ref = "apache-curator" }
+apache-curator-framework = { module = "org.apache.curator:curator-framework", version.ref = "apache-curator" }
+apache-curator-recipes = { module = "org.apache.curator:curator-recipes", version.ref = "apache-curator" }
+apache-curator-test = { module = "org.apache.curator:curator-test", version.ref = "apache-curator" }
+apache-hadoop-client-api = { module = "org.apache.hadoop:hadoop-client-api", version.ref = "apache-hadoop" }
+apache-hadoop-client-minicluster = { module = "org.apache.hadoop:hadoop-client-minicluster", version.ref = "apache-hadoop" }
+apache-hadoop-client-runtime = { module = "org.apache.hadoop:hadoop-client-runtime", version.ref = "apache-hadoop" }
+apache-hadoop-hdfs = { module = "org.apache.hadoop:hadoop-hdfs", version.ref = "apache-hadoop" }
+apache-hadoop-thirdparty-shadedguava = { module = "org.apache.hadoop.thirdparty:hadoop-shaded-guava", version.ref = "apache-hadoop-thirdparty" }
+apache-httpcomponents-httpclient = { module = "org.apache.httpcomponents:httpclient", version.ref = "apache-httpcomponents-httpclient" }
+apache-httpcomponents-httpcore = { module = "org.apache.httpcomponents:httpcore", version.ref = "apache-httpcomponents-httpcore" }
+apache-httpcomponents-httpmime = { module = "org.apache.httpcomponents:httpmime", version.ref = "apache-httpcomponents-httpmime" }
+apache-kafka-clients = { module = "org.apache.kafka:kafka-clients", version.ref = "apache-kafka" }
+apache-kafka-kafka213 = { module = "org.apache.kafka:kafka_2.13", version.ref = "apache-kafka" }
+apache-kafka-server-common = { module = "org.apache.kafka:kafka-server-common", version.ref = "apache-kafka" }
+apache-kafka-streams = { module = "org.apache.kafka:kafka-streams", version.ref = "apache-kafka" }
+apache-log4j-api = { module = "org.apache.logging.log4j:log4j-api", version.ref = "apache-log4j" }
+apache-log4j-core = { module = "org.apache.logging.log4j:log4j-core", version.ref = "apache-log4j" }
+apache-log4j-jul = { module = "org.apache.logging.log4j:log4j-jul", version.ref = "apache-log4j" }
+apache-log4j-layout-templatejson = { module = "org.apache.logging.log4j:log4j-layout-template-json", version.ref = "apache-log4j" }
+apache-log4j-slf4j2impl = { module = "org.apache.logging.log4j:log4j-slf4j2-impl", version.ref = "apache-log4j" }
+apache-log4j-web = { module = "org.apache.logging.log4j:log4j-web", version.ref = "apache-log4j" }
+apache-log4j1-api = { module = "org.apache.logging.log4j:log4j-1.2-api", version.ref = "apache-log4j" }
+apache-lucene-analysis-common = { module = "org.apache.lucene:lucene-analysis-common", version.ref = "apache-lucene" }
+apache-lucene-analysis-icu = { module = "org.apache.lucene:lucene-analysis-icu", version.ref = "apache-lucene" }
+apache-lucene-analysis-kuromoji = { module = "org.apache.lucene:lucene-analysis-kuromoji", version.ref = "apache-lucene" }
+apache-lucene-analysis-morfologik = { module = "org.apache.lucene:lucene-analysis-morfologik", version.ref = "apache-lucene" }
+apache-lucene-analysis-nori = { module = "org.apache.lucene:lucene-analysis-nori", version.ref = "apache-lucene" }
+apache-lucene-analysis-opennlp = { module = "org.apache.lucene:lucene-analysis-opennlp", version.ref = "apache-lucene" }
+apache-lucene-analysis-phonetic = { module = "org.apache.lucene:lucene-analysis-phonetic", version.ref = "apache-lucene" }
+apache-lucene-analysis-smartcn = { module = "org.apache.lucene:lucene-analysis-smartcn", version.ref = "apache-lucene" }
+apache-lucene-analysis-stempel = { module = "org.apache.lucene:lucene-analysis-stempel", version.ref = "apache-lucene" }
+apache-lucene-backward-codecs = { module = "org.apache.lucene:lucene-backward-codecs", version.ref = "apache-lucene" }
+apache-lucene-classification = { module = "org.apache.lucene:lucene-classification", version.ref = "apache-lucene" }
+apache-lucene-codecs = { module = "org.apache.lucene:lucene-codecs", version.ref = "apache-lucene" }
+apache-lucene-core = { module = "org.apache.lucene:lucene-core", version.ref = "apache-lucene" }
+apache-lucene-expressions = { module = "org.apache.lucene:lucene-expressions", version.ref = "apache-lucene" }
+apache-lucene-grouping = { module = "org.apache.lucene:lucene-grouping", version.ref = "apache-lucene" }
+apache-lucene-highlighter = { module = "org.apache.lucene:lucene-highlighter", version.ref = "apache-lucene" }
+apache-lucene-join = { module = "org.apache.lucene:lucene-join", version.ref = "apache-lucene" }
+apache-lucene-misc = { module = "org.apache.lucene:lucene-misc", version.ref = "apache-lucene" }
+apache-lucene-queries = { module = "org.apache.lucene:lucene-queries", version.ref = "apache-lucene" }
+apache-lucene-queryparser = { module = "org.apache.lucene:lucene-queryparser", version.ref = "apache-lucene" }
+apache-lucene-spatialextras = { module = "org.apache.lucene:lucene-spatial-extras", version.ref = "apache-lucene" }
+apache-lucene-suggest = { module = "org.apache.lucene:lucene-suggest", version.ref = "apache-lucene" }
+apache-lucene-testframework = { module = "org.apache.lucene:lucene-test-framework", version.ref = "apache-lucene" }
+apache-opennlp-tools = { module = "org.apache.opennlp:opennlp-tools", version.ref = "apache-opennlp" }
+apache-poi-ooxml = { module = "org.apache.poi:poi-ooxml", version.ref = "apache-poi" }
+apache-poi-poi = { module = "org.apache.poi:poi", version.ref = "apache-poi" }
+apache-rat-rat = { module = "org.apache.rat:apache-rat", version.ref = "apache-rat" }
+apache-tika-core = { module = "org.apache.tika:tika-core", version.ref = "apache-tika" }
+apache-tika-parsers = { module = "org.apache.tika:tika-parsers", version.ref = "apache-tika" }
+apache-tomcat-annotationsapi = { module = "org.apache.tomcat:annotations-api", version.ref = "apache-tomcat" }
+apache-zookeeper-jute = { module = "org.apache.zookeeper:zookeeper-jute", version.ref = "apache-zookeeper" }
+apache-zookeeper-zookeeper = { module = "org.apache.zookeeper:zookeeper", version.ref = "apache-zookeeper" }
+# @keep transitive dependency for version alignment
+apiguardian-api = { module = "org.apiguardian:apiguardian-api", version.ref = "apiguardian" }
+aqute-bnd-annotation = { module = "biz.aQute.bnd:biz.aQute.bnd.annotation", version.ref = "aqute-bnd" }
+bc-jose4j = { module = "org.bitbucket.b_c:jose4j", version.ref = "bc-jose4j" }
+benmanes-caffeine = { module = "com.github.ben-manes.caffeine:caffeine", version.ref = "benmanes-caffeine" }
+bouncycastle-bcpkix = { module = "org.bouncycastle:bcpkix-jdk18on", version.ref = "bouncycastle" }
+bouncycastle-bcprov = { module = "org.bouncycastle:bcprov-jdk18on", version.ref = "bouncycastle" }
+carrot2-core = { module = "org.carrot2:carrot2-core", version.ref = "carrot2-core" }
+carrotsearch-hppc = { module = "com.carrotsearch:hppc", version.ref = "carrotsearch-hppc" }
+carrotsearch-randomizedtesting-runner = { module = "com.carrotsearch.randomizedtesting:randomizedtesting-runner", version.ref = "carrotsearch-randomizedtesting" }
+# @keep transitive dependency for version alignment
+checkerframework-qual = { module = "org.checkerframework:checker-qual", version.ref = "checkerframework" }
+codehaus-woodstox-stax2api = { module = "org.codehaus.woodstox:stax2-api", version.ref = "codehaus-woodstox" }
+commonscli-commonscli = { module = "commons-cli:commons-cli", version.ref = "commons-cli" }
+commonscodec-commonscodec = { module = "commons-codec:commons-codec", version.ref = "commons-codec" }
+commonscollections-commonscollections = { module = "commons-collections:commons-collections", version.ref = "commons-collections" }
+commonsio-commonsio = { module = "commons-io:commons-io", version.ref = "commons-io" }
+cybozulabs-langdetect = { module = "com.cybozu.labs:langdetect", version.ref = "cybozulabs-langdetect" }
+dropwizard-metrics-core = { module = "io.dropwizard.metrics:metrics-core", version.ref = "dropwizard-metrics" }
+dropwizard-metrics-graphite = { module = "io.dropwizard.metrics:metrics-graphite", version.ref = "dropwizard-metrics" }
+dropwizard-metrics-jetty10 = { module = "io.dropwizard.metrics:metrics-jetty10", version.ref = "dropwizard-metrics" }
+dropwizard-metrics-jmx = { module = "io.dropwizard.metrics:metrics-jmx", version.ref = "dropwizard-metrics" }
+dropwizard-metrics-jvm = { module = "io.dropwizard.metrics:metrics-jvm", version.ref = "dropwizard-metrics" }
+dropwizard-metrics-servlets = { module = "io.dropwizard.metrics:metrics-servlets", version.ref = "dropwizard-metrics" }
+eclipse-jdt-ecj = { module = "org.eclipse.jdt:ecj", version.ref = "eclipse-ecj" }
+eclipse-jetty-alpnjavaclient = { module = "org.eclipse.jetty:jetty-alpn-java-client", version.ref = "eclipse-jetty" }
+eclipse-jetty-alpnjavaserver = { module = "org.eclipse.jetty:jetty-alpn-java-server", version.ref = "eclipse-jetty" }
+eclipse-jetty-alpnserver = { module = "org.eclipse.jetty:jetty-alpn-server", version.ref = "eclipse-jetty" }
+eclipse-jetty-client = { module = "org.eclipse.jetty:jetty-client", version.ref = "eclipse-jetty" }
+eclipse-jetty-deploy = { module = "org.eclipse.jetty:jetty-deploy", version.ref = "eclipse-jetty" }
+eclipse-jetty-http = { module = "org.eclipse.jetty:jetty-http", version.ref = "eclipse-jetty" }
+eclipse-jetty-http2-client = { module = "org.eclipse.jetty.http2:http2-client", version.ref = "eclipse-jetty" }
+eclipse-jetty-http2-common = { module = "org.eclipse.jetty.http2:http2-common", version.ref = "eclipse-jetty" }
+eclipse-jetty-http2-hpack = { module = "org.eclipse.jetty.http2:http2-hpack", version.ref = "eclipse-jetty" }
+eclipse-jetty-http2-httpclienttransport = { module = "org.eclipse.jetty.http2:http2-http-client-transport", version.ref = "eclipse-jetty" }
+eclipse-jetty-http2-server = { module = "org.eclipse.jetty.http2:http2-server", version.ref = "eclipse-jetty" }
+eclipse-jetty-io = { module = "org.eclipse.jetty:jetty-io", version.ref = "eclipse-jetty" }
+eclipse-jetty-jmx = { module = "org.eclipse.jetty:jetty-jmx", version.ref = "eclipse-jetty" }
+eclipse-jetty-rewrite = { module = "org.eclipse.jetty:jetty-rewrite", version.ref = "eclipse-jetty" }
+eclipse-jetty-security = { module = "org.eclipse.jetty:jetty-security", version.ref = "eclipse-jetty" }
+eclipse-jetty-server = { module = "org.eclipse.jetty:jetty-server", version.ref = "eclipse-jetty" }
+eclipse-jetty-servlet = { module = "org.eclipse.jetty:jetty-servlet", version.ref = "eclipse-jetty" }
+eclipse-jetty-servlets = { module = "org.eclipse.jetty:jetty-servlets", version.ref = "eclipse-jetty" }
+eclipse-jetty-start = { module = "org.eclipse.jetty:jetty-start", version.ref = "eclipse-jetty" }
+eclipse-jetty-toolchain-servletapi = { module = "org.eclipse.jetty.toolchain:jetty-servlet-api", version.ref = "eclipse-jettytoolchain" }
+eclipse-jetty-util = { module = "org.eclipse.jetty:jetty-util", version.ref = "eclipse-jetty" }
+eclipse-jetty-webapp = { module = "org.eclipse.jetty:jetty-webapp", version.ref = "eclipse-jetty" }
+eclipse-jetty-xml = { module = "org.eclipse.jetty:jetty-xml", version.ref = "eclipse-jetty" }
+eclipse-jgit-jgit = { module = "org.eclipse.jgit:org.eclipse.jgit", version.ref = "eclipse-jgit" }
+fasterxml-jackson-bom = { module = "com.fasterxml.jackson:jackson-bom", version.ref = "fasterxml" }
+fasterxml-jackson-core-annotations = { module = "com.fasterxml.jackson.core:jackson-annotations", version.ref = "fasterxml" }
+fasterxml-jackson-core-core = { module = "com.fasterxml.jackson.core:jackson-core", version.ref = "fasterxml" }
+fasterxml-jackson-core-databind = { module = "com.fasterxml.jackson.core:jackson-databind", version.ref = "fasterxml" }
+fasterxml-jackson-dataformat-cbor = { module = "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor", version.ref = "fasterxml" }
+fasterxml-jackson-dataformat-smile = { module = "com.fasterxml.jackson.dataformat:jackson-dataformat-smile", version.ref = "fasterxml" }
+fasterxml-woodstox-core = { module = "com.fasterxml.woodstox:woodstox-core", version.ref = "fasterxml-woodstox" }
+flexmark-extensions-abbreviation = { module = "com.vladsch.flexmark:flexmark-ext-abbreviation", version.ref = "flexmark" }
+flexmark-extensions-attributes = { module = "com.vladsch.flexmark:flexmark-ext-attributes", version.ref = "flexmark" }
+flexmark-extensions-autolink = { module = "com.vladsch.flexmark:flexmark-ext-autolink", version.ref = "flexmark" }
+flexmark-flexmark = { module = "com.vladsch.flexmark:flexmark", version.ref = "flexmark" }
+google-api-gax = { module = "com.google.api:gax", version.ref = "google-api-gax" }
+# @keep transitive dependency for version alignment
+google-api-grpc-proto = { module = "com.google.api.grpc:proto-google-common-protos", version.ref = "google-api-grpc-proto" }
+google-auth-credentials = { module = "com.google.auth:google-auth-library-credentials", version.ref = "google-auth" }
+google-auth-oauth2http = { module = "com.google.auth:google-auth-library-oauth2-http", version.ref = "google-auth" }
+# @keep transitive dependency for version alignment
+google-autovalue-annotations = { module = "com.google.auto.value:auto-value-annotations", version.ref = "google-autovalue" }
+google-cloud-bom = { module = "com.google.cloud:google-cloud-bom", version.ref = "google-cloud-bom" }
+google-cloud-core = { module = "com.google.cloud:google-cloud-core", version.ref = "google-cloud-core" }
+google-cloud-corehttp = { module = "com.google.cloud:google-cloud-core-http", version.ref = "google-cloud-core" }
+google-cloud-nio = { module = "com.google.cloud:google-cloud-nio", version.ref = "google-cloud-nio" }
+google-cloud-storage = { module = "com.google.cloud:google-cloud-storage", version.ref = "google-cloud-storage" }
+# @keep transitive dependency for version alignment
+google-errorprone-annotations = { module = "com.google.errorprone:error_prone_annotations", version.ref = "google-errorprone" }
+google-errorprone-core = { module = "com.google.errorprone:error_prone_core", version.ref = "google-errorprone" }
+# @keep transitive dependency for version alignment
+google-gson = { module = "com.google.code.gson:gson", version.ref = "google-gson" }
+google-guava = { module = "com.google.guava:guava", version.ref = "google-guava" }
+# @keep transitive dependency for version alignment
+google-j2objc-annotations = { module = "com.google.j2objc:j2objc-annotations", version.ref = "google-j2objc" }
+# @keep transitive dependency for version alignment
+google-protobuf-java = { module = "com.google.protobuf:protobuf-java", version.ref = "google-protobuf" }
+google-protobuf-javautils = { module = "com.google.protobuf:protobuf-java-util", version.ref = "google-protobuf" }
+google-re2j = { module = "com.google.re2j:re2j", version.ref = "google-re2j" }
+# @keep transitive dependency for version alignment
+grpc-api = { module = "io.grpc:grpc-api", version.ref = "grpc" }
+# @keep transitive dependency for version alignment
+grpc-bom = { module = "io.grpc:grpc-bom", version.ref = "grpc" }
+grpc-context = { module = "io.grpc:grpc-context", version.ref = "grpc" }
+# @keep transitive dependency for version alignment
+grpc-core = { module = "io.grpc:grpc-core", version.ref = "grpc" }
+grpc-netty = { module = "io.grpc:grpc-netty", version.ref = "grpc" }
+grpc-protobuf = { module = "io.grpc:grpc-protobuf", version.ref = "grpc" }
+# @keep transitive dependency for version alignment
+grpc-protobuf-lite = { module = "io.grpc:grpc-protobuf-lite", version.ref = "grpc" }
+grpc-stub = { module = "io.grpc:grpc-stub", version.ref = "grpc" }
+# @keep transitive dependency for version alignment
+grpc-util = { module = "io.grpc:grpc-util", version.ref = "grpc" }
+hamcrest-hamcrest = { module = "org.hamcrest:hamcrest", version.ref = "hamcrest" }
+hk2-api = { module = "org.glassfish.hk2:hk2-api", version.ref = "hk2" }
+# @keep transitive dependency for version alignment
+hk2-locator = { module = "org.glassfish.hk2:hk2-locator", version.ref = "hk2" }
+hsqldb-hsqldb = { module = "org.hsqldb:hsqldb", version.ref = "hsqldb" }
+ibm-icu-icu4j = { module = "com.ibm.icu:icu4j", version.ref = "ibm-icu" }
+immutables-valueannotations = { module = "org.immutables:value-annotations", version.ref = "immutables-valueannotations" }
+j256-simplemagic = { module = "com.j256.simplemagic:simplemagic", version.ref = "j256-simplemagic" }
+jakarta-annotation-api = { module = "jakarta.annotation:jakarta.annotation-api", version.ref = "jakarta-annotation" }
+jakarta-inject-api = { module = "jakarta.inject:jakarta.inject-api", version.ref = "jakarta-inject" }
+jakarta-ws-rsapi = { module = "jakarta.ws.rs:jakarta.ws.rs-api", version.ref = "jakarta-ws" }
+javacc-javacc = { module = "net.java.dev.javacc:javacc", version.ref = "javacc" }
+# @keep transitive dependency for version alignment
+jaxb-runtime = { module = "org.glassfish.jaxb:jaxb-runtime", version.ref = "jaxb" }
+jayway-jsonpath = { module = "com.jayway.jsonpath:json-path", version.ref = "jayway-jsonpath" }
+jctools-core = { module = "org.jctools:jctools-core", version.ref = "jctools" }
+jersey-containers-jettyhttp = { module = "org.glassfish.jersey.containers:jersey-container-jetty-http", version.ref = "jersey-containers" }
+jersey-core-common = { module = "org.glassfish.jersey.core:jersey-common", version.ref = "jersey" }
+jersey-core-server = { module = "org.glassfish.jersey.core:jersey-server", version.ref = "jersey" }
+jersey-inject-hk2 = { module = "org.glassfish.jersey.inject:jersey-hk2", version.ref = "jersey" }
+jersey-media-jsonjackson = { module = "org.glassfish.jersey.media:jersey-media-json-jackson", version.ref = "jersey" }
+# @keep transitive dependency for version alignment
+jodatime-jodatime = { module = "joda-time:joda-time", version.ref = "joda-time" }
+junit-junit = { module = "junit:junit", version.ref = "junit" }
+langchain4j-cohere = { module = "dev.langchain4j:langchain4j-cohere", version.ref = "langchain4j" }
+langchain4j-core = { module = "dev.langchain4j:langchain4j-core", version.ref = "langchain4j" }
+langchain4j-hugging-face = { module = "dev.langchain4j:langchain4j-hugging-face", version.ref = "langchain4j" }
+langchain4j-mistral-ai = { module = "dev.langchain4j:langchain4j-mistral-ai", version.ref = "langchain4j" }
+langchain4j-open-ai = { module = "dev.langchain4j:langchain4j-open-ai", version.ref = "langchain4j" }
+lmax-disruptor = { module = "com.lmax:disruptor", version.ref = "lmax-disruptor" }
+locationtech-spatial4j = { module = "org.locationtech.spatial4j:spatial4j", version.ref = "spatial4j" }
+mockito-core = { module = "org.mockito:mockito-core", version.ref = "mockito" }
+mockito-subclass = { module = "org.mockito:mockito-subclass", version.ref = "mockito" }
+navsecurity-mockoauth2server = { module = "no.nav.security:mock-oauth2-server", version.ref = "navsecurity" }
+netty-bom = { module = "io.netty:netty-bom", version.ref = "netty" }
+netty-codechttp = { module = "io.netty:netty-codec-http", version.ref = "netty" }
+# @keep transitive dependency for version alignment
+netty-handler = { module = "io.netty:netty-handler", version.ref = "netty" }
+# @keep transitive dependency for version alignment
+netty-tcnative-boringssl = { module = "io.netty:netty-tcnative-boringssl-static", version.ref = "netty-tcnative" }
+netty-tcnative-classes = { module = "io.netty:netty-tcnative-classes", version.ref = "netty-tcnative" }
+# @keep transitive dependency for version alignment
+netty-transport-classes-epoll = { module = "io.netty:netty-transport-classes-epoll", version.ref = "netty" }
+netty-transport-native-epoll = { module = "io.netty:netty-transport-native-epoll", version.ref = "netty" }
+nimbusds-josejwt = { module = "com.nimbusds:nimbus-jose-jwt", version.ref = "nimbusds-josejwt" }
+openjdk-jmh-core = { module = "org.openjdk.jmh:jmh-core", version.ref = "openjdk-jmh" }
+openjdk-jmh-generatorannprocess = { module = "org.openjdk.jmh:jmh-generator-annprocess", version.ref = "openjdk-jmh" }
+opentelemetry-api = { module = "io.opentelemetry:opentelemetry-api", version.ref = "opentelemetry" }
+opentelemetry-bom = { module = "io.opentelemetry:opentelemetry-bom", version.ref = "opentelemetry" }
+opentelemetry-context = { module = "io.opentelemetry:opentelemetry-context", version.ref = "opentelemetry" }
+opentelemetry-exporter-otlp = { module = "io.opentelemetry:opentelemetry-exporter-otlp", version.ref = "opentelemetry" }
+opentelemetry-sdk = { module = "io.opentelemetry:opentelemetry-sdk", version.ref = "opentelemetry" }
+opentelemetry-sdkextension-autoconfigure = { module = "io.opentelemetry:opentelemetry-sdk-extension-autoconfigure", version.ref = "opentelemetry" }
+opentelemetry-sdktesting = { module = "io.opentelemetry:opentelemetry-sdk-testing", version.ref = "opentelemetry" }
+opentelemetry-sdktrace = { module = "io.opentelemetry:opentelemetry-sdk-trace", version.ref = "opentelemetry" }
+osgi-annotation = { module = "org.osgi:osgi.annotation", version.ref = "osgi-annotation" }
+# @keep transitive dependency for version alignment
+ow2-asm = { module = "org.ow2.asm:asm", version.ref = "ow2-asm" }
+# @keep transitive dependency for version alignment
+perfmark-api = { module = "io.perfmark:perfmark-api", version.ref = "perfmark" }
+prometheus-metrics-expositionformats = { module = "io.prometheus:prometheus-metrics-exposition-formats", version.ref = "prometheus-metrics" }
+prometheus-metrics-model = { module = "io.prometheus:prometheus-metrics-model", version.ref = "prometheus-metrics" }
+prometheus-simpleclient = { module = "io.prometheus:simpleclient", version.ref = "prometheus-simpleclient" }
+prometheus-simpleclient-httpserver = { module = "io.prometheus:simpleclient_httpserver", version.ref = "prometheus-simpleclient" }
+quicktheories-quicktheories = { module = "org.quicktheories:quicktheories", version.ref = "quicktheories" }
+semver4j-semver4j = { module = "org.semver4j:semver4j", version.ref = "semver4j" }
+slf4j-api = { module = "org.slf4j:slf4j-api", version.ref = "slf4j" }
+slf4j-jcloverslf4j = { module = "org.slf4j:jcl-over-slf4j", version.ref = "slf4j" }
+slf4j-jultoslf4j = { module = "org.slf4j:jul-to-slf4j", version.ref = "slf4j" }
+spotbugs-annotations = { module = "com.github.spotbugs:spotbugs-annotations", version.ref = "spotbugs" }
+squareup-okhttp3-mockwebserver = { module = "com.squareup.okhttp3:mockwebserver", version.ref = "squareup-okhttp3-mockwebserver" }
+squareup-okhttp3-okhttp = { module = "com.squareup.okhttp3:okhttp", version.ref = "squareup-okhttp3-okhttp" }
+stephenc-jcip-annotations = { module = "com.github.stephenc.jcip:jcip-annotations", version.ref = "stephenc-jcip" }
+swagger3-annotations-jakarta = { module = "io.swagger.core.v3:swagger-annotations-jakarta", version.ref = "swagger3" }
+swagger3-jaxrs2-jakarta = { module = "io.swagger.core.v3:swagger-jaxrs2-jakarta", version.ref = "swagger3" }
+tdunning-tdigest = { module = "com.tdunning:t-digest", version.ref = "tdunning-tdigest" }
+thisptr-jacksonjq = { module = "net.thisptr:jackson-jq", version.ref = "thisptr-jacksonjq" }
+threeten-bp = { module = "org.threeten:threetenbp", version.ref = "threeten-bp" }
+xerces-impl = { module = "xerces:xercesImpl", version.ref = "xerces" }
+xerial-snappy-java = { module = "org.xerial.snappy:snappy-java", version.ref = "xerial-snappy" }
diff --git a/gradle/lucene-dev/lucene-dev-repo-composite.gradle b/gradle/lucene-dev/lucene-dev-repo-composite.gradle
index d612b29fe70..62274b32e6c 100644
--- a/gradle/lucene-dev/lucene-dev-repo-composite.gradle
+++ b/gradle/lucene-dev/lucene-dev-repo-composite.gradle
@@ -104,7 +104,7 @@ if (luceneDevRepo != null) {
// We substitute the exact version of Lucene we currently have in versions.props across all the dependencies.
// We can't just substitute all references without looking at the versoin because
// plugin dependencies then also get substituted and everything crashes.
- String luceneVersion = (file("${rootDir}/versions.props").getText("UTF-8") =~ /org.apache.lucene:\*=(.+)/)[0][1]
+ String luceneVersion = libs.versions.apache.lucene.get()
logger.lifecycle("Local Lucene development repository will be used substituting ${luceneVersion}: ${luceneDevRepo}")
// Include Lucene repository as a composite and substitute module names.
diff --git a/gradle/maven/defaults-maven.gradle b/gradle/maven/defaults-maven.gradle
index 96e82dcc1c4..3239c04acfc 100644
--- a/gradle/maven/defaults-maven.gradle
+++ b/gradle/maven/defaults-maven.gradle
@@ -25,6 +25,7 @@ configure(rootProject) {
ext {
published = [
":solr:api",
+ ":solr:cross-dc-manager",
":solr:core",
":solr:solrj",
":solr:solrj-streaming",
@@ -150,17 +151,6 @@ configure(subprojects.findAll { it.path in rootProject.published }) { prj ->
artifact javadocJar
pom(configurePom)
-
- pom({
- // LUCENE-9561:
- // Remove dependencyManagement section created by a combination of
- // Palantir and the publishing plugin.
- //
- // https://github.com/palantir/gradle-consistent-versions/issues/550
- withXml {
- asNode().dependencyManagement.replaceNode {}
- }
- })
}
}
}
diff --git a/gradle/node.gradle b/gradle/node.gradle
index 3da3a51d40e..d585ab5f871 100644
--- a/gradle/node.gradle
+++ b/gradle/node.gradle
@@ -16,7 +16,7 @@
*/
configure([project(":solr:packaging"), project(":solr:solr-ref-guide"), project(":solr:webapp")]) {
- apply plugin: "com.github.node-gradle.node"
+ apply plugin: libs.plugins.nodegradle.node.get().pluginId
def npmRegistry = "${ -> propertyOrEnvOrDefault("solr.npm.registry", "SOLR_NPM_REGISTRY", '') }"
if (!npmRegistry.isEmpty()) {
@@ -34,14 +34,14 @@ configure([project(":solr:packaging"), project(":solr:solr-ref-guide"), project(
}
}
- ext {
+ project.ext {
rootNodeDir = "$rootDir/.gradle/node"
nodeProjectDir = file("$rootNodeDir/$project.name")
}
node {
download = true
- version = "16.20.2" // LTS
+ version = libs.versions.nodejs.get()
def nodeDistUrl = "${ -> propertyOrEnvOrDefault("solr.node.distUrl", "SOLR_NODE_DIST_URL", '') }"
if (!nodeDistUrl.isEmpty()) {
diff --git a/gradle/solr/packaging.gradle b/gradle/solr/packaging.gradle
index bb3fd5703ab..1b5325f908b 100644
--- a/gradle/solr/packaging.gradle
+++ b/gradle/solr/packaging.gradle
@@ -36,11 +36,11 @@
// I don't know how to untie these two cleanly.
//
-configure(allprojects.findAll {project -> project.path.startsWith(":solr:modules:") || project.path.startsWith(":solr:prometheus-exporter") }) {
+configure(allprojects.findAll {project -> project.path.startsWith(":solr:modules:") || project.path == ":solr:prometheus-exporter" || project.path == ":solr:cross-dc-manager" }) {
plugins.withType(JavaPlugin) {
- ext {
+ project.ext {
packagingDir = file("${buildDir}/packaging")
- if (project.path.startsWith(":solr:prometheus-exporter")) {
+ if (project.path.startsWith(":solr:prometheus-exporter") || project.path.startsWith(":solr:cross-dc-manager")) {
deps = packagingDir
} else {
deps = file("${packagingDir}/${project.name}")
@@ -62,6 +62,12 @@ configure(allprojects.findAll {project -> project.path.startsWith(":solr:modules
solrPlatformLibs project(":solr:solrj-zookeeper")
// libExt has logging libs, which we don't want. Lets users decide what they want.
solrPlatformLibs project(path: ":solr:server", configuration: 'libExt')
+
+ // The cross-dc-manager uses the cross-dc Solr module libraries as well as the Jetty server jars
+ if (project.path == ":solr:cross-dc-manager") {
+ solrPlatformLibs project(":solr:modules:cross-dc")
+ solrPlatformLibs project(path: ":solr:server", configuration: 'serverLib')
+ }
}
// An aggregate that configures lib and test-lib in a temporary location.
diff --git a/gradle/template.gradle.properties b/gradle/template.gradle.properties
index c52e3048a77..79b18753f43 100644
--- a/gradle/template.gradle.properties
+++ b/gradle/template.gradle.properties
@@ -49,6 +49,11 @@
# tests.minheapsize=512m
# tests.jvmargs=-XX:+UseParallelGC -XX:TieredStopAtLevel=1 -XX:ActiveProcessorCount=1
#
+# If you want tests to produce an html report (which intellij provides a clickable link for
+# at the end of a failed build) set this to true, defaults to false to save a few seconds.
+#
+# tests.html=false
+#
#################
# Gradle Daemon #
#################
@@ -98,5 +103,8 @@ org.gradle.workers.max=@MAX_WORKERS@
# Maximum number of test JVMs forked per test task.
tests.jvms=@TEST_JVMS@
+# By default skip html generation
+tests.html=false
+
# Disable auto JVM provisioning (we don't use toolchains yet but want no surprises).
org.gradle.java.installations.auto-download=false
diff --git a/gradle/testing/alternative-jdk-support.gradle b/gradle/testing/alternative-jdk-support.gradle
index 72cdabdab4b..97e5311ee09 100644
--- a/gradle/testing/alternative-jdk-support.gradle
+++ b/gradle/testing/alternative-jdk-support.gradle
@@ -50,7 +50,7 @@ if (jvmGradle != jvmCurrent) {
doFirst {
def jvmInfo = { JavaInfo javaInfo ->
- JvmInstallationMetadata jvmMetadata = jvmDetector.getMetadata(new InstallationLocation(javaInfo.javaHome, "specific path"))
+ JvmInstallationMetadata jvmMetadata = jvmDetector.getMetadata(InstallationLocation.userDefined(javaInfo.javaHome, "specific path"))
return "${jvmMetadata.languageVersion} (${jvmMetadata.displayName} ${jvmMetadata.runtimeVersion}, home at: ${jvmMetadata.javaHome})"
}
@@ -87,6 +87,6 @@ if (jvmGradle != jvmCurrent) {
// Set up root project's properties.
rootProject.ext.runtimeJavaHome = jvmCurrent.javaHome
-rootProject.ext.runtimeJavaVersion = jvmDetector.getMetadata(new InstallationLocation(jvmCurrent.javaHome, "specific path")).getLanguageVersion()
+rootProject.ext.runtimeJavaVersion = jvmDetector.getMetadata(InstallationLocation.userDefined(jvmCurrent.javaHome, "specific path")).getLanguageVersion()
rootProject.ext.usesAltJvm = (jvmGradle != jvmCurrent);
diff --git a/gradle/testing/beasting.gradle b/gradle/testing/beasting.gradle
index 8934100ec10..67c20140ba8 100644
--- a/gradle/testing/beasting.gradle
+++ b/gradle/testing/beasting.gradle
@@ -27,7 +27,7 @@ def beastingMode = gradle.startParameter.taskNames.any{ name -> name == 'beast'
allprojects {
plugins.withType(JavaPlugin) {
- ext {
+ project.ext {
testOptions += [
[propName: 'tests.dups', value: 0, description: "Reiterate runs of entire test suites ('beast' task)."]
]
diff --git a/gradle/testing/defaults-tests.gradle b/gradle/testing/defaults-tests.gradle
index d291ca85a40..9241720e8c3 100644
--- a/gradle/testing/defaults-tests.gradle
+++ b/gradle/testing/defaults-tests.gradle
@@ -18,7 +18,6 @@
import org.apache.tools.ant.taskdefs.condition.Os
import org.apache.tools.ant.types.Commandline
import org.gradle.api.tasks.testing.logging.*
-import org.apache.lucene.gradle.ErrorReportingTestListener
def resources = scriptResources(buildscript)
def verboseModeHookInstalled = false
@@ -112,12 +111,6 @@ allprojects {
ignoreFailures = resolvedTestOption("tests.haltonfailure").toBoolean() == false
jvmArgs Commandline.translateCommandline(resolvedTestOption("tests.jvmargs"))
-
- // Up to JDK-15 we have to enforce --illegal-access=deny, because we want no code to access
- // JDK internals; JDK-16 and later will default to deny, see https://openjdk.java.net/jeps/396:
- if (rootProject.runtimeJavaVersion < JavaVersion.VERSION_16) {
- jvmArgs '--illegal-access=deny'
- }
def loggingConfigFile = layout.projectDirectory.file("${resources}/logging.properties")
def tempDir = layout.projectDirectory.dir(testsTmpDir.toString())
@@ -154,7 +147,7 @@ allprojects {
}
// Disable HTML report generation. The reports are big and slow to generate.
- reports.html.required = false
+ reports.html.required = Boolean.parseBoolean(providers.gradleProperty("tests.html").getOrElse("false"))
// Set up logging.
testLogging {
@@ -173,7 +166,7 @@ allprojects {
}
def spillDir = getTemporaryDir().toPath()
- def listener = new ErrorReportingTestListener(test.testLogging, spillDir, testOutputsDir.toPath(), verboseMode)
+ def listener = buildinfra.newErrorReportingTestListener(test.testLogging, spillDir, testOutputsDir.toPath(), verboseMode)
addTestOutputListener(listener)
addTestListener(listener)
diff --git a/gradle/testing/failed-tests-at-end.gradle b/gradle/testing/failed-tests-at-end.gradle
index 5bffe9c9926..5b3381751d4 100644
--- a/gradle/testing/failed-tests-at-end.gradle
+++ b/gradle/testing/failed-tests-at-end.gradle
@@ -15,8 +15,6 @@
* limitations under the License.
*/
-import org.apache.lucene.gradle.ErrorReportingTestListener
-
// Display all failed tests at the end of the build.
def failedTests = new LinkedHashSet() // for dedupe due to weird afterTest classMethod issue
@@ -29,7 +27,7 @@ def genFailInfo(def task, TestDescriptor desc) {
historyUrl += "&tests.test=$desc.name"
historyUrl += " http://fucit.org/solr-jenkins-reports/history-trend-of-recent-failures.html#series/$name"
}
- def logName = ErrorReportingTestListener.getOutputLogName(desc.parent ?: desc)
+ def logName = buildinfra.getOutputLogName(desc.parent ?: desc)
def output = file("${task.testOutputsDir}/${logName}")
def repro = "./gradlew ${task.project.path}:test --tests \"${name}\" ${task.project.testOptionsForReproduceLine}"
return ["name": name, "project": "${task.project.path}", "historyUrl": historyUrl, "output": output, "reproduce": repro]
diff --git a/gradle/testing/profiling.gradle b/gradle/testing/profiling.gradle
index 34b3efe59fa..8b1e5147efc 100644
--- a/gradle/testing/profiling.gradle
+++ b/gradle/testing/profiling.gradle
@@ -15,13 +15,11 @@
* limitations under the License.
*/
-import org.apache.lucene.gradle.ProfileResults;
-
def recordings = files()
allprojects {
plugins.withType(JavaPlugin) {
- ext {
+ project.ext {
testOptions += [
[propName: 'tests.profile', value: false, description: "Enable java flight recorder profiling."]
]
@@ -48,7 +46,7 @@ allprojects {
gradle.buildFinished {
if (!recordings.isEmpty()) {
- ProfileResults.printReport(recordings.getFiles().collect { it.toString() },
+ buildinfra.profileResultsClass().printReport(recordings.getFiles().collect { it.toString() },
propertyOrDefault(ProfileResults.MODE_KEY, ProfileResults.MODE_DEFAULT) as String,
Integer.parseInt(propertyOrDefault(ProfileResults.STACKSIZE_KEY, ProfileResults.STACKSIZE_DEFAULT)),
Integer.parseInt(propertyOrDefault(ProfileResults.COUNT_KEY, ProfileResults.COUNT_DEFAULT)),
diff --git a/gradle/testing/randomization.gradle b/gradle/testing/randomization.gradle
index 9c809fc69e9..d3ae962c144 100644
--- a/gradle/testing/randomization.gradle
+++ b/gradle/testing/randomization.gradle
@@ -30,7 +30,7 @@ buildscript {
}
dependencies {
- classpath 'com.carrotsearch.randomizedtesting:randomizedtesting-runner:2.7.9'
+ classpath libs.carrotsearch.randomizedtesting.runner
}
}
@@ -79,7 +79,7 @@ allprojects {
// Configure test property defaults and their descriptions.
allprojects {
plugins.withType(JavaPlugin) {
- ext {
+ project.ext {
testOptions += [
// seed, repetition and amplification.
[propName: 'tests.seed', value: { -> rootSeed }, description: "Sets the master randomization seed."],
@@ -124,7 +124,7 @@ allprojects {
// Add Solr-specific test configs settings.
configure(allprojects.findAll {project -> project.path.startsWith(":solr") }) {
plugins.withType(JavaPlugin) {
- ext {
+ project.ext {
testOptions += [
[propName: 'tests.src.home', value: null, description: "See SOLR-14023."],
[propName: 'solr.tests.use.numeric.points', value: null, description: "Point implementation to use (true=numerics, false=trie)."],
@@ -137,14 +137,14 @@ configure(allprojects.findAll {project -> project.path.startsWith(":solr") }) {
allprojects {
plugins.withType(JavaPlugin) {
afterEvaluate {
- ext.testOptionsResolved = testOptions.findAll { opt ->
+ project.ext.testOptionsResolved = testOptions.findAll { opt ->
propertyOrDefault(opt.propName, opt.value) != null
}.collectEntries { opt ->
[(opt.propName): Objects.toString(resolvedTestOption(opt.propName))]
}
// Compute the "reproduce with" string.
- ext.testOptionsForReproduceLine = testOptions.findAll { opt ->
+ project.ext.testOptionsForReproduceLine = testOptions.findAll { opt ->
if (opt["includeInReproLine"] == false) {
return false
}
@@ -199,7 +199,12 @@ allprojects {
// Enable security manager, if requested. We could move the selection of security manager and security policy
// to each project's build/ configuration but it seems compact enough to keep it here for now.
- if (Boolean.parseBoolean(testOptionsResolved["tests.useSecurityManager"])) {
+ def useSecurityManager = Boolean.parseBoolean(testOptionsResolved["tests.useSecurityManager"]);
+ // Allow the project to override this
+ if (project.ext.has("useSecurityManager")) {
+ useSecurityManager = project.ext.get("useSecurityManager")
+ }
+ if (useSecurityManager) {
def commonSolrDir = project(':solr').layout.projectDirectory
def javaSecurityPolicy = layout.projectDirectory.file("${resources}/policies/solr-tests.policy")
jvmArgumentProviders.add(
diff --git a/gradle/testing/randomization/policies/solr-tests.policy b/gradle/testing/randomization/policies/solr-tests.policy
index dae3f218ec3..4d61f7985c0 100644
--- a/gradle/testing/randomization/policies/solr-tests.policy
+++ b/gradle/testing/randomization/policies/solr-tests.policy
@@ -50,12 +50,17 @@ grant {
permission java.net.SocketPermission "127.0.0.1:4", "connect,resolve";
permission java.net.SocketPermission "127.0.0.1:6", "connect,resolve";
permission java.net.SocketPermission "127.0.0.1:8", "connect,resolve";
+ // Used as an invalid ZK host
+ permission java.net.SocketPermission "----------:33332", "connect,resolve";
// Basic permissions needed for Lucene to work:
permission java.util.PropertyPermission "*", "read,write";
// needed by randomizedtesting runner to identify test methods.
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
+ permission java.lang.reflect.ReflectPermission "newProxyInPackage.dev.langchain4j.model.cohere";
+ permission java.lang.reflect.ReflectPermission "newProxyInPackage.dev.ai4j.openai4j";
+ permission java.lang.reflect.ReflectPermission "newProxyInPackage.dev.langchain4j.model.huggingface";
permission java.lang.RuntimePermission "accessDeclaredMembers";
// needed by certain tests to redirect sysout/syserr:
permission java.lang.RuntimePermission "setIO";
@@ -85,10 +90,12 @@ grant {
// needed by bytebuddy
permission java.lang.RuntimePermission "defineClass";
permission java.lang.RuntimePermission "net.bytebuddy.createJavaDispatcher";
+ permission java.lang.RuntimePermission "net.bytebuddy.agent.getInstrumentation";
permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.description.method";
permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.description.type";
permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.dynamic.loading";
permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.utility";
+
// needed by mockito
permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect";
permission java.lang.RuntimePermission "reflectionFactoryAccess";
@@ -96,7 +103,7 @@ grant {
permission java.lang.RuntimePermission "closeClassLoader";
// needed by HttpSolrClient
permission java.lang.RuntimePermission "getFileSystemAttributes";
- // needed by hadoop auth (TODO: there is a cleaner way to handle this)
+ // needed by hadoop hdfs (TODO: there is a cleaner way to handle this)
permission java.lang.RuntimePermission "loadLibrary.jaas";
permission java.lang.RuntimePermission "loadLibrary.jaas_unix";
permission java.lang.RuntimePermission "loadLibrary.jaas_nt";
@@ -107,6 +114,8 @@ grant {
permission java.lang.RuntimePermission "writeFileDescriptor";
// needed by hadoop http
permission java.lang.RuntimePermission "getProtectionDomain";
+ // SolrProcessMgr to list processes
+ permission java.lang.RuntimePermission "manageProcess";
// These two *have* to be spelled out a separate
permission java.lang.management.ManagementPermission "control";
@@ -129,17 +138,19 @@ grant {
permission javax.management.MBeanServerPermission "findMBeanServer";
permission javax.management.MBeanServerPermission "releaseMBeanServer";
permission javax.management.MBeanTrustPermission "register";
-
- // needed by hadoop auth
+
+ // needed by hadoop hdfs
permission javax.security.auth.AuthPermission "getSubject";
permission javax.security.auth.AuthPermission "modifyPrincipals";
permission javax.security.auth.AuthPermission "doAs";
- permission javax.security.auth.AuthPermission "getLoginConfiguration";
- permission javax.security.auth.AuthPermission "setLoginConfiguration";
permission javax.security.auth.AuthPermission "modifyPrivateCredentials";
permission javax.security.auth.AuthPermission "modifyPublicCredentials";
permission javax.security.auth.PrivateCredentialPermission "org.apache.hadoop.security.Credentials * \"*\"", "read";
+ // needed by crossdc
+ permission javax.security.auth.AuthPermission "getLoginConfiguration";
+ permission javax.security.auth.AuthPermission "setLoginConfiguration";
+
// needed by hadoop security
permission java.security.SecurityPermission "putProviderProperty.SaslPlainServer";
permission java.security.SecurityPermission "insertProvider";
@@ -153,6 +164,12 @@ grant {
// needed by s3mock
permission java.lang.RuntimePermission "accessClassInPackage.sun.nio.fs";
+ // needed for kafka mockito
+ permission java.lang.RuntimePermission "manageProcess";
+ permission java.io.FilePermission "${/}proc${/}self${/}io", "read";
+ permission java.io.FilePermission "${java.home}${/}bin${/}java", "execute";
+ permission java.io.FilePermission "${java.home}${/}bin${/}java.exe", "execute";
+
// SSL related properties for Solr tests
permission javax.net.ssl.SSLPermission "setDefaultSSLContext";
@@ -242,6 +259,11 @@ grant {
// expanded to a wildcard if set, allows all networking everywhere
permission java.net.SocketPermission "${solr.internal.network.permission}", "accept,listen,connect,resolve";
+
+ // Run java
+ permission java.io.FilePermission "${java.home}${/}-", "execute";
+ // Required by SolrProcessManager on Windows to find Solr processes, used by StatusTool (CLI)
+ permission java.io.FilePermission "<>", "execute";
};
// Grant all permissions to Gradle test runner classes.
diff --git a/gradle/testing/slowest-tests-at-end.gradle b/gradle/testing/slowest-tests-at-end.gradle
index eaf9cd1a2f1..d24e523394d 100644
--- a/gradle/testing/slowest-tests-at-end.gradle
+++ b/gradle/testing/slowest-tests-at-end.gradle
@@ -22,7 +22,7 @@ def allSuites = []
allprojects {
plugins.withType(JavaPlugin) {
- ext {
+ project.ext {
testOptions += [
[propName: 'tests.slowestTests', value: true, description: "Print the summary of the slowest tests."],
[propName: 'tests.slowestSuites', value: true, description: "Print the summary of the slowest suites."]
diff --git a/gradle/validation/check-environment.gradle b/gradle/validation/check-environment.gradle
index d9ea66b694e..f56e9fa4e78 100644
--- a/gradle/validation/check-environment.gradle
+++ b/gradle/validation/check-environment.gradle
@@ -22,7 +22,7 @@ import org.gradle.util.GradleVersion
configure(rootProject) {
ext {
- expectedGradleVersion = '8.4'
+ expectedGradleVersion = libs.versions.gradle.get()
}
wrapper {
@@ -31,6 +31,7 @@ configure(rootProject) {
}
def currentJavaVersion = JavaVersion.current()
+ def minJavaVersion = JavaVersion.toVersion(libs.versions.java.min.get())
if (currentJavaVersion < minJavaVersion) {
throw new GradleException("At least Java ${minJavaVersion} is required, you are running Java ${currentJavaVersion} "
+ "[${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]")
diff --git a/gradle/validation/dependencies.gradle b/gradle/validation/dependencies.gradle
new file mode 100644
index 00000000000..cfb78ee15a2
--- /dev/null
+++ b/gradle/validation/dependencies.gradle
@@ -0,0 +1,346 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Helper function for specifying stable versions for dependency updates
+// https://github.com/ben-manes/gradle-versions-plugin
+def isNonStable = { String version ->
+ def stableKeyword = ['RELEASE', 'FINAL', 'GA'].any { it -> version.toUpperCase().contains(it) }
+ def regex = /^[0-9,.v-]+(-r)?$/
+ return !stableKeyword && !(version ==~ regex)
+}
+
+// Configure sanity check for conflicting dependencies across certain configurations
+allprojects {
+ apply plugin: libs.plugins.carrotsearch.dependencychecks.get().pluginId
+
+ def consolidatedConfigurations = project.configurations.matching {
+ it.name in [
+ "annotationProcessor",
+ "compileClasspath",
+ "libExt",
+ "packaging",
+ "runtimeClasspath",
+ "runtimeLibs",
+ "server",
+ "serverLib",
+ "solrCore",
+ "solrFullTgz",
+ "solrPlatformLibs",
+ "solrSlimTgz",
+ "testCompileClasspath",
+ "testRuntimeClasspath",
+ ]
+ }
+
+ dependencyVersionChecks {
+ lockFileComment = "An inventory of resolved dependency versions. Do not edit this file directly."
+
+ configurationGroups {
+ // consolidated_dependencies is a configuration group that is used
+ // to check for conflicting versions of the included configurations
+ consolidated_dependencies {
+ include consolidatedConfigurations
+ }
+ }
+ }
+
+ dependencies {
+ modules {
+ module("org.hamcrest:hamcrest-core") {
+ replacedBy("org.hamcrest:hamcrest", "hamcrest-core was renamed to hamcrest")
+ }
+ }
+
+ constraints { handler ->
+ consolidatedConfigurations.configureEach { Configuration conf ->
+ // Add BOMs as they resolve many dependency conflicts
+ handler.add(conf.name, libs.amazon.awssdk.bom, {
+ because 'version alignment with known BOM for consistency across project'
+ })
+ handler.add(conf.name, libs.google.cloud.bom, {
+ because 'version alignment with known BOM for consistency across project'
+ })
+ handler.add(conf.name, libs.fasterxml.jackson.bom, {
+ because 'version alignment with known BOM for consistency across project'
+ })
+ handler.add(conf.name, libs.opentelemetry.bom, {
+ because 'version alignment with known BOM for consistency across project'
+ })
+ handler.add(conf.name, libs.grpc.bom, {
+ because 'version alignment with known BOM for consistency across project'
+ })
+ handler.add(conf.name, libs.netty.bom, {
+ because 'version alignment with known BOM for consistency across project'
+ })
+
+ // Add known dependencies that have multiple versions as constraints
+ // to align versions
+ handler.add(conf.name, libs.google.guava, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.google.errorprone.annotations, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.apache.commons.exec, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.xerial.snappy.java, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.grpc.context, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.commonscli.commonscli, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.commonscodec.commonscodec, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.commonsio.commonsio, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.junit.junit, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.grpc.core, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.grpc.protobuf, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.jakarta.annotation.api, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.apache.commons.lang3, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.grpc.stub, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.eclipse.jetty.server, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.fasterxml.woodstox.core, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.slf4j.api, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.slf4j.jultoslf4j, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.apache.commons.compress, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.benmanes.caffeine, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.carrotsearch.hppc, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.apache.log4j.api, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.grpc.api, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.slf4j.jcloverslf4j, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.netty.codechttp, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.bc.jose4j, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.lmax.disruptor, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.apache.httpcomponents.httpclient, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.apache.httpcomponents.httpcore, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.apache.httpcomponents.httpmime, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.apache.zookeeper.zookeeper, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.apache.zookeeper.jute, {
+ because 'version alignment for consistency across project'
+ })
+ handler.add(conf.name, libs.hamcrest.hamcrest, {
+ because 'version alignment for consistency across project'
+ })
+
+ // Add transitive dependencies as constraints to align versions
+ handler.add(conf.name, libs.checkerframework.qual, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.google.guava)} uses 3.42.0" +
+ "\n- ${getFullName(libs.benmanes.caffeine)} uses 3.37.0" +
+ "\n- ${getFullName(libs.google.cloud.storage)} uses 3.44.0"
+ })
+ handler.add(conf.name, libs.ow2.asm, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.apache.lucene.expressions)} uses 7.2" +
+ "\n- ${getFullName(libs.apache.tika.parsers)} uses 9.3"
+ })
+ handler.add(conf.name, libs.google.protobuf.java, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.google.errorprone.core)} uses 3.19.6" +
+ "\n- ${getFullName(libs.apache.tika.parsers)} uses 3.21.5" +
+ "\n- ${getFullName(libs.apache.calcite.avatica.core)} uses 3.21.9" +
+ "\n- ${getFullName(libs.google.cloud.storage)} uses 3.25.3" +
+ "\n- ${getFullName(libs.google.cloud.core)} uses 3.25.3"
+ })
+ handler.add(conf.name, libs.google.gson, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.grpc.core)} uses 2.10.1" +
+ "\n- ${getFullName(libs.apache.tika.parsers)} uses 2.9.1" +
+ "\n- ${getFullName(libs.google.cloud.storage)} uses 2.11.0 and 2.10.1" +
+ "\n- ${getFullName(libs.google.protobuf.java)} uses 2.8.9" +
+ "\n- ${getFullName(libs.google.cloud.core)} uses 2.8.9 and 2.10.1" +
+ "\n- ${getFullName(libs.google.auth.oauth2http)} uses 2.10.1"
+ })
+ handler.add(conf.name, libs.google.autovalue.annotations, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.google.cloud.storage)} uses 1.10.4" +
+ "\n- ${getFullName(libs.google.cloud.corehttp)} uses 1.10.4" +
+ "\n- ${getFullName(libs.google.cloud.core)} uses 1.10.4" +
+ "\n- ${getFullName(libs.google.api.gax)} uses 1.10.4" +
+ "\n- ${getFullName(libs.google.auth.oauth2http)} uses 1.10.4" +
+ "\n- ${getFullName(libs.google.cloud.bom)} uses 1.10.4" +
+ "\n- ${getFullName(libs.google.errorprone.core)} uses 1.9"
+ })
+ handler.add(conf.name, libs.apache.commons.text, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.apache.calcite.core)} uses 1.11.0" +
+ "\n- ${getFullName(libs.apache.commons.configuration2)} uses 1.12.0"
+ })
+ handler.add(conf.name, libs.grpc.util, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.google.cloud.storage)} uses 1.62.2" +
+ "\n- ${getFullName(libs.grpc.netty)} uses 1.65.1"
+ })
+ handler.add(conf.name, libs.jodatime.jodatime, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.amazon.awssdk.sdkcore)} uses 2.8.1" +
+ "\n- ${getFullName(libs.apache.tika.parsers)} uses 2.2"
+ })
+ handler.add(conf.name, libs.google.api.grpc.proto, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.grpc.protobuf.asProvider())} uses 2.29.0" +
+ "\n- ${getFullName(libs.google.api.gax)} uses 2.41.0" +
+ "\n- ${getFullName(libs.google.api.grpc.proto)} uses 2.41.0" +
+ "\n- ${getFullName(libs.google.cloud.core)} uses 2.41.0" +
+ "\n- ${getFullName(libs.google.cloud.storage)} uses 2.41.0"
+ })
+ handler.add(conf.name, libs.netty.handler, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.apache.zookeeper.zookeeper)} uses 4.1.105.Final" +
+ "\n- ${getFullName(libs.netty.codechttp)} uses 4.1.112.Final"
+ })
+ handler.add(conf.name, libs.grpc.protobuf.lite, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.google.cloud.storage)} uses 1.62.2" +
+ "\n- ${getFullName(libs.grpc.protobuf.asProvider())} uses 1.65.1"
+ })
+ handler.add(conf.name, libs.jaxb.runtime, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.apache.tika.parsers)} uses 2.3.5" +
+ "\n- ${getFullName(libs.adobe.testing.s3mock.testsupportcommon)} uses 2.3.8"
+ })
+ handler.add(conf.name, libs.perfmark.api, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.grpc.core)} uses 0.26.0" +
+ "\n- ${getFullName(libs.grpc.netty)} uses 0.26.0" +
+ "\n- ${getFullName(libs.google.cloud.storage)} uses 0.27.0"
+ })
+ handler.add(conf.name, libs.netty.tcnative.boringssl, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.netty.bom)} uses 2.0.66.Final" +
+ "\n- ${getFullName(libs.apache.zookeeper.zookeeper)} uses 2.0.61.Final"
+ })
+ handler.add(conf.name, libs.netty.transport.classes.epoll, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.netty.bom)} uses 4.1.114.Final" +
+ "\n- ${getFullName(libs.apache.zookeeper.zookeeper)} uses 4.1.105.Final"
+ })
+ handler.add(conf.name, libs.netty.transport.native.epoll, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.netty.bom)} uses 4.1.114.Final" +
+ "\n- ${getFullName(libs.apache.zookeeper.zookeeper)} uses 4.1.105.Final"
+ })
+ handler.add(conf.name, libs.google.j2objc.annotations, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.google.guava)} uses 3.0.0" +
+ "\n- ${getFullName(libs.google.protobuf.javautils)} uses 2.8"
+ })
+ handler.add(conf.name, libs.apiguardian.api, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.apache.calcite.core)} uses 1.1.2" +
+ "\n- ${getFullName(libs.junit.junit)} (api) uses 1.1.0"
+ })
+ handler.add(conf.name, libs.hk2.locator, {
+ because "transitive version alignment for consistency across project" +
+ "\n- ${getFullName(libs.jersey.inject.hk2)} uses 3.0.6" +
+ "\n- ${getFullName(libs.hk2.api)} uses 3.1.1"
+ })
+ }
+ }
+ }
+}
+
+static def getFullName(Provider dependency) {
+ def resolvedDep = dependency.get()
+ return "${resolvedDep.module.group}:${resolvedDep.module.name}"
+}
+
+// Configure version catalog cleanups plugin.
+configure(rootProject) {
+ apply plugin: libs.plugins.littlerobots.versioncatalogupdate.get().pluginId
+
+ versionCatalogUpdate {
+ sortByKey = true
+ }
+
+ tasks.matching { it.name == "tidy" }.configureEach {
+ it.dependsOn(":versionCatalogFormat")
+ }
+
+ tasks.matching {
+ it.path in [":versionCatalogUpdate"]
+ }.configureEach {
+ it.interactive = true
+ }
+
+ tasks.register("updateLibs", {
+ dependsOn ":versionCatalogUpdate"
+ })
+
+ // on dependencyUpdates get only stable versions recommended if current version is stable
+ // https://github.com/ben-manes/gradle-versions-plugin
+ tasks.named("dependencyUpdates").configure {
+ checkConstraints = true
+ checkBuildEnvironmentConstraints = true
+ rejectVersionIf {
+ isNonStable(it.candidate.version) && !isNonStable(it.currentVersion)
+ }
+ }
+}
diff --git a/gradle/validation/dependency-analyze.gradle b/gradle/validation/dependency-analyze.gradle
index 1f35012ecf2..92125aba11c 100644
--- a/gradle/validation/dependency-analyze.gradle
+++ b/gradle/validation/dependency-analyze.gradle
@@ -20,7 +20,7 @@
allprojects { prj ->
plugins.withId("java", {
- prj.apply plugin: 'ca.cutterslade.analyze'
+ prj.apply plugin: libs.plugins.cutterslade.analyze.get().pluginId
analyzeClassesDependencies {
warnUsedUndeclared = false // means fail build if UsedUndeclared found
diff --git a/gradle/validation/ecj-lint.gradle b/gradle/validation/ecj-lint.gradle
index f47f70587a1..86f30cd5f1f 100644
--- a/gradle/validation/ecj-lint.gradle
+++ b/gradle/validation/ecj-lint.gradle
@@ -23,7 +23,7 @@ configure(rootProject) {
}
dependencies {
- ecjDeps "org.eclipse.jdt:ecj:${scriptDepVersions['ecj']}"
+ ecjDeps libs.eclipse.jdt.ecj
}
}
diff --git a/gradle/validation/ecj-lint/ecj.javadocs.prefs b/gradle/validation/ecj-lint/ecj.javadocs.prefs
index 975707055ff..74278547699 100644
--- a/gradle/validation/ecj-lint/ecj.javadocs.prefs
+++ b/gradle/validation/ecj-lint/ecj.javadocs.prefs
@@ -5,8 +5,8 @@ org.eclipse.jdt.core.compiler.annotation.nonnullbydefault=org.eclipse.jdt.annota
org.eclipse.jdt.core.compiler.annotation.nonnullisdefault=disabled
org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable
org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=11
-org.eclipse.jdt.core.compiler.compliance=11
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=17
+org.eclipse.jdt.core.compiler.compliance=17
org.eclipse.jdt.core.compiler.doc.comment.support=enabled
org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=error
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
@@ -93,4 +93,4 @@ org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disa
org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=ignore
org.eclipse.jdt.core.compiler.problem.unusedWarningToken=ignore
org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=error
-org.eclipse.jdt.core.compiler.source=11
+org.eclipse.jdt.core.compiler.source=17
diff --git a/gradle/validation/error-prone.gradle b/gradle/validation/error-prone.gradle
index 00e14ed0eab..b7242b566c0 100644
--- a/gradle/validation/error-prone.gradle
+++ b/gradle/validation/error-prone.gradle
@@ -37,24 +37,25 @@ if (skipReason) {
allprojects { prj ->
plugins.withType(JavaPlugin) {
- // LUCENE-9650: Errorprone on master/gradle does not work when running as plugin
- // inside a forked Javac process. Javac running inside Gradle works, because we have
- // additional module system opens in place.
- // This is a hack to keep the dependency (so that palantir's version check doesn't complain)
- // but don't include the plugin (which fails on JDK16+).
+ // LUCENE-9650: Errorprone does not work when running as a plugin inside a forked Javac process.
+ // Javac running inside Gradle works, because we have additional module system opens in place.
if (skipReason) {
tasks.withType(JavaCompile) { task -> task.dependsOn ":errorProneSkipped" }
+
+ // Error prone plugin adds error prone to test classpath. We need to add it here too
+ // (manually) so that versions.lock is consistent with or without error prone.
configurations {
errorprone
}
dependencies {
- errorprone("com.google.errorprone:error_prone_core")
+ errorprone libs.google.errorprone.core
}
+ configurations.annotationProcessor.extendsFrom(configurations.errorprone)
} else {
- prj.apply plugin: 'net.ltgt.errorprone'
+ prj.apply plugin: libs.plugins.ltgt.errorprone.get().pluginId
dependencies {
- errorprone("com.google.errorprone:error_prone_core")
+ errorprone libs.google.errorprone.core
}
tasks.withType(JavaCompile) { task ->
@@ -179,6 +180,7 @@ allprojects { prj ->
'-Xep:MathRoundIntLong:ERROR',
// '-Xep:MislabeledAndroidString:OFF', // we don't use android
'-Xep:MisplacedScopeAnnotations:ERROR',
+ // '-Xep:MissingRuntimeRetention:ERROR', // todo check if useful or comment why not
// '-Xep:MissingSuperCall:OFF', // we don't use this annotation
// '-Xep:MissingTestCall:OFF', // we don't use this annotation
'-Xep:MisusedDayOfYear:ERROR',
@@ -218,12 +220,15 @@ allprojects { prj ->
'-Xep:RandomCast:ERROR',
'-Xep:RandomModInteger:ERROR',
// '-Xep:RectIntersectReturnValueIgnored:OFF', // we don't use android
+ // '-Xep:RedundantSetterCall:ERROR', // todo check if useful or comment why not
// '-Xep:RequiredModifiers:OFF', // we don't use this annotation
// '-Xep:RestrictedApiChecker:OFF', // we don't use this annotation
// '-Xep:ReturnValueIgnored:OFF', // todo there are problems that should be fixed
+ // '-Xep:SelfAssertion:ERROR', // todo check if useful or comment why not
'-Xep:SelfAssignment:ERROR',
'-Xep:SelfComparison:ERROR',
'-Xep:SelfEquals:ERROR',
+ // '-Xep:SetUnrecognized:ERROR', // todo check if useful or comment why not
// '-Xep:ShouldHaveEvenArgs:OFF', // we don't use truth
'-Xep:SizeGreaterThanOrEqualsZero:ERROR',
'-Xep:StreamToString:ERROR',
@@ -236,7 +241,6 @@ allprojects { prj ->
// '-Xep:ThrowIfUncheckedKnownChecked:OFF', // we don't use this annotation
'-Xep:ThrowNull:ERROR',
'-Xep:TreeToString:ERROR',
- // '-Xep:TruthSelfEquals:OFF', // we don't use truth
'-Xep:TryFailThrowable:ERROR',
'-Xep:TypeParameterQualifier:ERROR',
'-Xep:UnicodeDirectionalityCharacters:ERROR',
@@ -265,6 +269,7 @@ allprojects { prj ->
'-Xep:AssertionFailureIgnored:WARN',
'-Xep:AssistedInjectAndInjectOnSameConstructor:WARN',
'-Xep:AttemptedNegativeZero:WARN',
+ // '-Xep:AutoValueBoxedValues:WARN', // todo check if useful or comment why not
// '-Xep:AutoValueFinalMethods:OFF', // we don't use autovalue
// '-Xep:AutoValueImmutableFields:OFF', // we don't use autovalue
// '-Xep:AutoValueSubclassLeaked:OFF', // we don't use autovalue
@@ -285,6 +290,7 @@ allprojects { prj ->
'-Xep:ChainedAssertionLosesContext:WARN',
'-Xep:CharacterGetNumericValue:WARN',
'-Xep:ClassCanBeStatic:WARN',
+ // '-Xep:ClassInitializationDeadlock:WARN', // todo check if useful or comment why not
'-Xep:ClassNewInstance:WARN',
// '-Xep:CloseableProvides:OFF', // we don't use this annotation
'-Xep:ClosingStandardOutputStreams:WARN',
@@ -296,6 +302,8 @@ allprojects { prj ->
'-Xep:DateChecker:WARN',
'-Xep:DateFormatConstant:WARN',
// '-Xep:DefaultCharset:OFF', // we have forbiddenapis for that
+ //'-Xep:DeeplyNested:WARN', // todo check if useful or comment why not
+ //'-Xep:DefaultLocale:WARN', // todo check if useful or comment why not
'-Xep:DefaultPackage:WARN',
'-Xep:DeprecatedVariable:WARN',
'-Xep:DirectInvocationOnMock:WARN',
@@ -309,6 +317,7 @@ allprojects { prj ->
'-Xep:EmptyBlockTag:WARN',
// '-Xep:EmptyCatch:OFF', // todo check if useful or comment why not - might be handled by ECJ?
// '-Xep:EmptySetMultibindingContributions:OFF', // we don't use this annotation
+ // '-Xep:EnumOrdinal:WARN', // todo check if useful or comment why not
'-Xep:EqualsGetClass:WARN',
'-Xep:EqualsIncompatibleType:WARN',
'-Xep:EqualsUnsafeCast:WARN',
@@ -330,6 +339,7 @@ allprojects { prj ->
// '-Xep:FragmentNotInstantiable:OFF', // we don't use android
// '-Xep:FutureReturnValueIgnored:OFF', // todo there are problems that should be fixed
'-Xep:GetClassOnEnum:WARN',
+ // '-Xep:GuiceNestedCombine:WARN', // todo check if useful or comment why not
'-Xep:HidingField:WARN',
'-Xep:ICCProfileGetInstance:WARN',
'-Xep:IdentityHashMapUsage:WARN',
@@ -383,6 +393,7 @@ allprojects { prj ->
'-Xep:JodaPlusMinusLong:WARN',
'-Xep:JodaTimeConverterManager:WARN',
'-Xep:JodaWithDurationAddedLong:WARN',
+ // '-Xep:JUnitIncompatibleType:WARN', // todo check if useful or comment why not
// '-Xep:LabelledBreakTarget:OFF', // stylistic
'-Xep:LiteEnumValueOf:WARN',
'-Xep:LiteProtoToString:WARN',
@@ -403,10 +414,12 @@ allprojects { prj ->
// '-Xep:MissingSummary:OFF', // style preference that we don't want to enforce
// '-Xep:MixedMutabilityReturnType:OFF', // todo check if useful or comment why not
'-Xep:MockNotUsedInProduction:WARN',
+ // '-Xep:MockitoDoSetup:WARN', // todo check if useful or comment why not
'-Xep:ModifiedButNotUsed:WARN',
'-Xep:ModifyCollectionInEnhancedForLoop:WARN',
'-Xep:ModifySourceCollectionInStream:WARN',
'-Xep:MultimapKeys:WARN',
+ // '-Xep:MultipleNullnessAnnotations:WARN', // todo check if useful or comment why not
'-Xep:MultipleParallelOrSequentialCalls:WARN',
'-Xep:MultipleUnaryOperatorsInMethodCall:WARN',
// '-Xep:MutableGuiceModule:OFF', // we don't use guice
@@ -428,7 +441,9 @@ allprojects { prj ->
'-Xep:NullableOptional:WARN',
// '-Xep:NullablePrimitive:OFF', // we don't use this annotation
// '-Xep:NullablePrimitiveArray:OFF', // we don't use this annotation
+ // '-Xep:NullableTypeParameter:WARN', // todo check if useful or comment why not
// '-Xep:NullableVoid:OFF', // we don't use this annotation
+ // '-Xep:NullableWildcard:WARN', // todo check if useful or comment why not
'-Xep:ObjectEqualsForPrimitives:WARN',
// '-Xep:ObjectToString:OFF', // todo check if useful or comment why not
'-Xep:ObjectsHashCodePrimitive:WARN',
@@ -442,6 +457,7 @@ allprojects { prj ->
'-Xep:Overrides:WARN',
// '-Xep:OverridesGuiceInjectableMethod:OFF', // we don't use guice
'-Xep:ParameterName:WARN',
+ '-Xep:PatternMatchingInstanceof:WARN',
'-Xep:PreconditionsCheckNotNullRepeated:WARN',
'-Xep:PrimitiveAtomicReference:WARN',
'-Xep:ProtectedMembersInFinalClass:WARN',
@@ -459,6 +475,7 @@ allprojects { prj ->
// '-Xep:SameNameButDifferent:OFF', // todo check if useful or comment why not
'-Xep:SelfAlwaysReturnsThis:WARN',
// '-Xep:ShortCircuitBoolean:OFF', // todo check if useful or comment why not
+ // '-Xep:StatementSwitchToExpressionSwitch:WARN', // todo check if useful or comment why not
// '-Xep:StaticAssignmentInConstructor:OFF', // we assign SolrTestCaseJ4.configString in many tests, difficult to untangle
'-Xep:StaticAssignmentOfThrowable:WARN',
// '-Xep:StaticGuardedByInstance:OFF', // todo check if useful or comment why not
@@ -469,9 +486,12 @@ allprojects { prj ->
'-Xep:StringCharset:WARN',
'-Xep:StringFormatWithLiteral:WARN',
// '-Xep:StringSplitter:OFF', // todo check if useful or comment why not - might be able to use forbidden-apis for this?
+ // '-Xep:SunApi:WARN', // todo check if useful or comment why not
+ // '-Xep:SuperCallToObjectMethod:WARN', // todo check if useful or comment why not
'-Xep:SuperEqualsIsObjectEquals:WARN',
// '-Xep:SwigMemoryLeak:OFF', // we don't use swig
// '-Xep:SynchronizeOnNonFinalField:OFF', // todo check if useful or comment why not
+ // '-Xep:SystemConsoleNull:WARN', // todo check if useful or comment why not
// '-Xep:ThreadJoinLoop:OFF', // todo check if useful or comment why not
// '-Xep:ThreadLocalUsage:OFF', // todo check if useful or comment why not
// '-Xep:ThreadPriorityCheck:OFF', // todo check if useful or comment why not
@@ -493,6 +513,7 @@ allprojects { prj ->
// '-Xep:UnicodeEscape:OFF', // can't enable since Lucene/Solr tests use unicode a bunch
// '-Xep:UnnecessaryAssignment:OFF', // we don't use these annotations
'-Xep:UnnecessaryAsync:WARN',
+ // '-Xep:UnnecessaryBreakInSwitch:WARN', // todo check if useful or comment why not
'-Xep:UnnecessaryLambda:WARN',
'-Xep:UnnecessaryLongToIntConversion:WARN',
'-Xep:UnnecessaryMethodInvocationMatcher:WARN',
@@ -513,6 +534,7 @@ allprojects { prj ->
// '-Xep:UseBinds:OFF', // we don't use this annotation
// '-Xep:UseCorrectAssertInTests:OFF', // we inherit from LuceneTestCase which extends Assert
'-Xep:VariableNameSameAsType:WARN',
+ // '-Xep:VoidUsed:WARN', // todo check if useful or comment why not
// '-Xep:WaitNotInLoop:OFF', // todo check if useful or comment why not
// '-Xep:WakelockReleasedDangerously:OFF', // we don't use android
// '-Xep:WithSignatureDiscouraged:OFF', // we aren't using this error-prone internal api
diff --git a/gradle/validation/forbidden-apis.gradle b/gradle/validation/forbidden-apis.gradle
index 0ffb3a3a272..2a09ff49dda 100644
--- a/gradle/validation/forbidden-apis.gradle
+++ b/gradle/validation/forbidden-apis.gradle
@@ -23,7 +23,7 @@ def resources = scriptResources(buildscript)
// Only apply forbidden-apis to java projects.
allprojects { prj ->
plugins.withId("java", {
- prj.apply plugin: 'de.thetaphi.forbiddenapis'
+ prj.apply plugin: libs.plugins.thetaphi.forbiddenapis.get().pluginId
// This helper method appends signature files based on a set of true
// dependencies from a given configuration.
diff --git a/gradle/validation/forbidden-apis/commons-cli.commons-cli.all.txt b/gradle/validation/forbidden-apis/commons-cli.commons-cli.all.txt
new file mode 100644
index 00000000000..469fef8238f
--- /dev/null
+++ b/gradle/validation/forbidden-apis/commons-cli.commons-cli.all.txt
@@ -0,0 +1,16 @@
+@defaultMessage Use a org.apache.commons.cli.Option instead of a String value
+org.apache.commons.cli.CommandLine#hasOption(java.lang.String)
+org.apache.commons.cli.CommandLine#getOptionValue(java.lang.String)
+org.apache.commons.cli.CommandLine#getOptionValue(java.lang.String, java.lang.String)
+org.apache.commons.cli.CommandLine#getParsedOptionValue(java.lang.String, java.lang.Object)
+org.apache.commons.cli.CommandLine#hasOption(char)
+org.apache.commons.cli.CommandLine#getOptionValue(char)
+org.apache.commons.cli.CommandLine#getOptionValue(char, java.lang.String)
+#org.apache.commons.cli.CommandLine#getOptionValue(char, Supplier)
+org.apache.commons.cli.CommandLine#getOptionValues(char)
+org.apache.commons.cli.CommandLine#getOptionValues(java.lang.String)
+org.apache.commons.cli.CommandLine#getParsedOptionValue(char)
+# org.apache.commons.cli.CommandLine#getParsedOptionValue(char, Supplier)
+org.apache.commons.cli.CommandLine#getParsedOptionValue(char, java.lang.Object)
+org.apache.commons.cli.CommandLine#getParsedOptionValue(java.lang.String)
+# org.apache.commons.cli.CommandLine#getParsedOptionValue(String, Supplier)
diff --git a/gradle/validation/git-status.gradle b/gradle/validation/git-status.gradle
index b34cf831ef7..8a43e7c7b3d 100644
--- a/gradle/validation/git-status.gradle
+++ b/gradle/validation/git-status.gradle
@@ -33,7 +33,7 @@ buildscript {
}
dependencies {
- classpath "org.eclipse.jgit:org.eclipse.jgit:${scriptDepVersions['jgit']}"
+ classpath libs.eclipse.jgit.jgit
}
}
diff --git a/gradle/validation/jar-checks.gradle b/gradle/validation/jar-checks.gradle
index d416a9561fd..650a3b3337b 100644
--- a/gradle/validation/jar-checks.gradle
+++ b/gradle/validation/jar-checks.gradle
@@ -1,3 +1,5 @@
+import java.util.stream.Collectors
+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -24,8 +26,6 @@
// Because of this all tasks here must always execute together, so they cannot define task outputs.
// TODO: Rewrite the internal state to use state files containing the ext.jarInfos and its referencedFiles
-import org.apache.commons.codec.digest.DigestUtils
-
// This should be false only for debugging.
def failOnError = true
@@ -36,7 +36,7 @@ buildscript {
}
dependencies {
- classpath "commons-codec:commons-codec:${scriptDepVersions['commons-codec']}"
+ classpath libs.commonscodec.commonscodec
}
}
@@ -76,14 +76,14 @@ subprojects {
// Configure jarValidation configuration for all projects. Any dependency
// declared on this configuration (or any configuration it extends from) will
// be verified.
- configurations {
+ project.configurations {
jarValidation
}
// For Java projects, add all dependencies from the following configurations
// to jar validation
plugins.withType(JavaPlugin) {
- configurations {
+ project.configurations {
jarValidation {
extendsFrom runtimeClasspath
extendsFrom compileClasspath
@@ -109,16 +109,24 @@ subprojects {
}
def excludeRules = configurations.jarValidation.excludeRules
+ List