diff --git a/.asf.yaml b/.asf.yaml index 57999445552..9aa6153333c 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -29,6 +29,7 @@ github: branch_9_5: {} branch_9_6: {} branch_9_7: {} + branch_9_8: {} branch_9x: {} protected_tags: diff --git a/.github/labeler.yml b/.github/labeler.yml index 793957db16d..a3b7d5fe64c 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -2,7 +2,8 @@ dependencies: - changed-files: - any-glob-to-any-file: - - versions.props + - gradle/libs.versions.toml # Solr 10+ + - versions.props # Solr < v10 - versions.lock - solr/licenses/** @@ -122,11 +123,6 @@ module:gcs-repository: - any-glob-to-any-file: - solr/modules/gcs-repository/** -module:hadoop-auth: - - changed-files: - - any-glob-to-any-file: - - solr/modules/hadoop-auth/** - module:hdfs: - changed-files: - any-glob-to-any-file: diff --git a/.github/renovate.json b/.github/renovate.json index 5cf53a4c5fd..94220882331 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -4,9 +4,9 @@ "enabled": true, "dependencyDashboard": false, "enabledManagers": ["gradle", "github-actions"], - "includePaths": ["versions.*", "build.gradle", ".github/workflows/*"], + "includePaths": ["gradle/libs.versions.toml", "versions.*", "build.gradle", ".github/workflows/*"], "postUpgradeTasks": { - "commands": ["./gradlew updateLicenses"], + "commands": ["./gradlew writeLocks", "./gradlew updateLicenses"], "fileFilters": ["solr/licenses/*.sha1"], "executionMode": "branch" }, diff --git a/.github/workflows/bin-solr-test.yml b/.github/workflows/bin-solr-test.yml index a0a33ccc51a..126e8d62293 100644 --- a/.github/workflows/bin-solr-test.yml +++ b/.github/workflows/bin-solr-test.yml @@ -24,11 +24,11 @@ jobs: steps: # Setup - uses: actions/checkout@v4 - - name: Set up JDK 11 + - name: Set up JDK uses: actions/setup-java@v4 with: distribution: 'temurin' - java-version: 11 + java-version: 21 java-package: jdk - name: Setup Gradle uses: gradle/actions/setup-gradle@v4 diff --git a/.github/workflows/docker-test.yml b/.github/workflows/docker-test.yml index 0b00a6ab308..d8593613ca6 100644 --- a/.github/workflows/docker-test.yml +++ b/.github/workflows/docker-test.yml @@ -26,11 +26,11 @@ jobs: steps: # Setup - uses: actions/checkout@v4 - - name: Set up JDK 11 + - name: Set up JDK 21 uses: actions/setup-java@v4 with: distribution: 'temurin' - java-version: 11 + java-version: 21 java-package: jdk - name: Setup Gradle uses: gradle/actions/setup-gradle@v4 diff --git a/.github/workflows/gradle-precommit.yml b/.github/workflows/gradle-precommit.yml index dcc55ead323..67c1506ad0a 100644 --- a/.github/workflows/gradle-precommit.yml +++ b/.github/workflows/gradle-precommit.yml @@ -8,7 +8,7 @@ on: jobs: test: - name: gradle check w/ Java 11 + name: gradle check runs-on: ubuntu-latest @@ -19,11 +19,11 @@ jobs: # Setup - uses: actions/checkout@v4 - - name: Set up JDK 11 + - name: Set up JDK uses: actions/setup-java@v4 with: distribution: 'temurin' - java-version: 11 + java-version: 21 java-package: jdk - name: Setup Gradle diff --git a/.github/workflows/solrj-test.yml b/.github/workflows/solrj-test.yml index 1a0f6bfebde..3eb9bb4f5f6 100644 --- a/.github/workflows/solrj-test.yml +++ b/.github/workflows/solrj-test.yml @@ -21,11 +21,11 @@ jobs: steps: # Setup - uses: actions/checkout@v4 - - name: Set up JDK 11 + - name: Set up JDK 21 uses: actions/setup-java@v4 with: distribution: 'temurin' - java-version: 11 + java-version: 21 java-package: jdk - name: Setup Gradle uses: gradle/actions/setup-gradle@v4 diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index f8e30632059..8580134b58b 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -24,14 +24,22 @@ jobs: days-before-pr-stale: 60 days-before-issue-stale: -1 # we don't use issues - days-before-close: -1 # don't close stale PRs/issues + days-before-close: 60 # Close PRs marked as stale after 60 days exempt-draft-pr: true # don't mark draft PRs as stale + exempt-pr-labels: "exempt-stale" # don't mark PRs with these labels as stale stale-pr-label: "stale" # label to use when marking as stale + close-pr-label: "closed-stale" # label to use when closing a stale PR stale-pr-message: > This PR has had no activity for 60 days and is now labeled as stale. - Any new activity or converting it to draft will remove the stale label. - To attract more reviewers, please tag people who might be familiar with the code area and/or notify the dev@solr.apache.org mailing list. + Any new activity will remove the stale label. + To attract more reviewers, please tag people who might be familiar with the code area and/or notify the dev@solr.apache.org mailing list. + To exempt this PR from being marked as stale, make it a draft PR or add the label "exempt-stale". + If left unattended, this PR will be closed after another 60 days of inactivity. Thank you for your contribution! + close-pr-message: > + This PR is now closed due to 60 days of inactivity after being marked as stale. + Re-opening this PR is still possible, in which case it will be marked as active again. + operations-per-run: 100 # operations budget diff --git a/NOTICE.txt b/NOTICE.txt index 49724175b40..ce7c667fc29 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,6 +1,6 @@ ============================================================== Apache Solr - Copyright 2006-2024 The Apache Software Foundation + Copyright 2006-2025 The Apache Software Foundation ============================================================== This product includes software developed at diff --git a/README.md b/README.md index 920a4fa3005..24d419bd472 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ Solr is the blazing-fast, open source, multi-modal search platform built on [Apa It powers full-text, vector, and geospatial search at many of the world's largest organizations. [![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Artifacts-main/badge/icon?subject=Solr%20Artifacts)](https://ci-builds.apache.org/job/Solr/job/Solr-Artifacts-main/) -[![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Check-main/badge/icon?subject=Solr%20Check)](https://ci-builds.apache.org/job/Solr/job/Solr-Check-main/) +[![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Lint-main/badge/icon?subject=Solr%20Lint)](https://ci-builds.apache.org/job/Solr/job/Solr-Lint-main/) For a complete description of the Solr project, team composition, source code repositories, and other details, please see the Solr web site at @@ -94,7 +94,7 @@ Solr uses [Gradle](https://gradle.org/) for its build system. Here are some usef ``` cd ./solr/packaging/build/dev -bin/solr start -c +bin/solr start ``` - Open a web browser and go to http://localhost:8983/solr/ to access the Solr Admin interface. You can also use the `bin/solr` script to create and manage Solr collections. For example use the `bin/solr post` tool to index some sample data. @@ -108,4 +108,3 @@ To get involved in the developer community: - Slack: `#solr-dev` in the `the-asf` organization. Sign up at https://the-asf.slack.com/messages/CE70MDPMF - [Issue Tracker (JIRA)](https://issues.apache.org/jira/browse/SOLR) - IRC: `#solr-dev` on [libera.chat](https://web.libera.chat/?channels=#solr-dev) - diff --git a/build-tools/build-infra/build.gradle b/build-tools/build-infra/build.gradle new file mode 100644 index 00000000000..9b5ff387488 --- /dev/null +++ b/build-tools/build-infra/build.gradle @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id "java-gradle-plugin" + alias(libs.plugins.diffplug.spotless) apply false +} + +repositories { + mavenCentral() +} + +group = "org.apache" + +// Make sure the build environment is consistent. +apply from: file('../../gradle/conventions.gradle') +apply from: file('../../gradle/validation/check-environment.gradle') + +// Add spotless/ tidy. +tasks.register("checkJdkInternalsExportedToGradle") {} +apply from: file('../../gradle/validation/spotless.gradle') + +java { + sourceCompatibility = JavaVersion.toVersion(libs.versions.java.min.get()) + targetCompatibility = JavaVersion.toVersion(libs.versions.java.min.get()) +} + +gradlePlugin { + automatedPublishing = false + + plugins { + buildInfra { + id = 'solr.build-infra' + implementationClass = 'org.apache.lucene.gradle.buildinfra.BuildInfraPlugin' + } + } +} + +dependencies { + implementation gradleApi() + implementation localGroovy() + + implementation libs.commonscodec.commonscodec +} diff --git a/build-tools/build-infra/settings.gradle b/build-tools/build-infra/settings.gradle new file mode 100644 index 00000000000..7a55021b366 --- /dev/null +++ b/build-tools/build-infra/settings.gradle @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +rootProject.name = 'build-infra' + +// Use project's version catalog for centralized dependency management +dependencyResolutionManagement { + versionCatalogs { + libs { + from(files("../../gradle/libs.versions.toml")) + } + } +} diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/Checksum.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/Checksum.java similarity index 99% rename from buildSrc/src/main/java/org/apache/lucene/gradle/Checksum.java rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/Checksum.java index 0dab9dc7f05..a1d5c09586f 100644 --- a/buildSrc/src/main/java/org/apache/lucene/gradle/Checksum.java +++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/Checksum.java @@ -27,6 +27,11 @@ package org.apache.lucene.gradle; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.Locale; import org.apache.commons.codec.digest.DigestUtils; import org.gradle.api.DefaultTask; import org.gradle.api.GradleException; @@ -39,16 +44,10 @@ import org.gradle.work.Incremental; import org.gradle.work.InputChanges; -import java.io.File; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.Locale; - public class Checksum extends DefaultTask { private FileCollection files; private File outputDir; - private Algorithm algorithm; + private Algorithm algorithm = Checksum.Algorithm.SHA512; public enum Algorithm { MD5(new DigestUtils(DigestUtils.getMd5Digest())), diff --git a/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java new file mode 100644 index 00000000000..c1fb7b83983 --- /dev/null +++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java @@ -0,0 +1,288 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.gradle; + +import java.io.BufferedReader; +import java.io.Closeable; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.io.Writer; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import java.util.regex.Pattern; +import org.gradle.api.internal.tasks.testing.logging.FullExceptionFormatter; +import org.gradle.api.internal.tasks.testing.logging.TestExceptionFormatter; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; +import org.gradle.api.tasks.testing.TestDescriptor; +import org.gradle.api.tasks.testing.TestListener; +import org.gradle.api.tasks.testing.TestOutputEvent; +import org.gradle.api.tasks.testing.TestOutputListener; +import org.gradle.api.tasks.testing.TestResult; +import org.gradle.api.tasks.testing.logging.TestLogging; + +/** + * An error reporting listener that queues test output streams and displays them on failure. + * + *

Heavily inspired by Elasticsearch's ErrorReportingTestListener (ASL 2.0 licensed). + */ +public class ErrorReportingTestListener implements TestOutputListener, TestListener { + private static final Logger LOGGER = Logging.getLogger(ErrorReportingTestListener.class); + + private final TestExceptionFormatter formatter; + private final Map outputHandlers = new ConcurrentHashMap<>(); + private final Path spillDir; + private final Path outputsDir; + private final boolean verboseMode; + + public ErrorReportingTestListener( + TestLogging testLogging, Path spillDir, Path outputsDir, boolean verboseMode) { + this.formatter = new FullExceptionFormatter(testLogging); + this.spillDir = spillDir; + this.outputsDir = outputsDir; + this.verboseMode = verboseMode; + } + + @Override + public void onOutput(TestDescriptor testDescriptor, TestOutputEvent outputEvent) { + handlerFor(testDescriptor).write(outputEvent); + } + + @Override + public void beforeSuite(TestDescriptor suite) { + // noop. + } + + @Override + public void beforeTest(TestDescriptor testDescriptor) { + // Noop. + } + + @Override + public void afterSuite(final TestDescriptor suite, TestResult result) { + if (suite.getParent() == null || suite.getName().startsWith("Gradle")) { + return; + } + + TestKey key = TestKey.of(suite); + try { + OutputHandler outputHandler = outputHandlers.get(key); + if (outputHandler != null) { + long length = outputHandler.length(); + if (length > 1024 * 1024 * 10) { + LOGGER.warn( + String.format( + Locale.ROOT, + "WARNING: Test %s wrote %,d bytes of output.", + suite.getName(), + length)); + } + } + + boolean echoOutput = Objects.equals(result.getResultType(), TestResult.ResultType.FAILURE); + boolean dumpOutput = echoOutput; + + // If the test suite failed, report output. + if (dumpOutput || echoOutput) { + Files.createDirectories(outputsDir); + Path outputLog = outputsDir.resolve(getOutputLogName(suite)); + + // Save the output of a failing test to disk. + try (Writer w = Files.newBufferedWriter(outputLog, StandardCharsets.UTF_8)) { + if (outputHandler != null) { + outputHandler.copyTo(w); + } + } + + if (echoOutput && !verboseMode) { + synchronized (this) { + System.out.println(""); + System.out.println( + suite.getClassName() + + " > test suite's output saved to " + + outputLog + + ", copied below:"); + try (BufferedReader reader = + Files.newBufferedReader(outputLog, StandardCharsets.UTF_8)) { + char[] buf = new char[1024]; + int len; + while ((len = reader.read(buf)) >= 0) { + System.out.print(new String(buf, 0, len)); + } + System.out.println(); + } + } + } + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } finally { + OutputHandler handler = outputHandlers.remove(key); + if (handler != null) { + try { + handler.close(); + } catch (IOException e) { + LOGGER.error("Failed to close output handler for: " + key, e); + } + } + } + } + + private static Pattern SANITIZE = Pattern.compile("[^a-zA-Z .\\-_0-9]+"); + + public static String getOutputLogName(TestDescriptor suite) { + return SANITIZE.matcher("OUTPUT-" + suite.getName() + ".txt").replaceAll("_"); + } + + @Override + public void afterTest(TestDescriptor testDescriptor, TestResult result) { + // Include test failure exception stacktrace(s) in test output log. + if (result.getResultType() == TestResult.ResultType.FAILURE) { + if (result.getExceptions().size() > 0) { + String message = formatter.format(testDescriptor, result.getExceptions()); + handlerFor(testDescriptor).write(message); + } + } + } + + private OutputHandler handlerFor(TestDescriptor descriptor) { + // Attach output of leaves (individual tests) to their parent. + if (!descriptor.isComposite()) { + descriptor = descriptor.getParent(); + } + return outputHandlers.computeIfAbsent(TestKey.of(descriptor), (key) -> new OutputHandler()); + } + + public static class TestKey { + private final String key; + + private TestKey(String key) { + this.key = key; + } + + public static TestKey of(TestDescriptor d) { + StringBuilder key = new StringBuilder(); + key.append(d.getClassName()); + key.append("::"); + key.append(d.getName()); + key.append("::"); + key.append(d.getParent() == null ? "-" : d.getParent().toString()); + return new TestKey(key.toString()); + } + + @Override + public boolean equals(Object o) { + return o != null && o.getClass() == this.getClass() && Objects.equals(((TestKey) o).key, key); + } + + @Override + public int hashCode() { + return key.hashCode(); + } + + @Override + public String toString() { + return key; + } + } + + private class OutputHandler implements Closeable { + // Max single-line buffer before automatic wrap occurs. + private static final int MAX_LINE_WIDTH = 1024 * 4; + + private final SpillWriter buffer; + + // internal stream. + private final PrefixedWriter sint; + // stdout + private final PrefixedWriter sout; + // stderr + private final PrefixedWriter serr; + + // last used stream (so that we can flush it properly and prefixes are not screwed up). + private PrefixedWriter last; + + public OutputHandler() { + buffer = + new SpillWriter( + () -> { + try { + return Files.createTempFile(spillDir, "spill-", ".tmp"); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + + Writer sink = buffer; + if (verboseMode) { + sink = new StdOutTeeWriter(buffer); + } + + sint = new PrefixedWriter(" > ", sink, MAX_LINE_WIDTH); + sout = new PrefixedWriter(" 1> ", sink, MAX_LINE_WIDTH); + serr = new PrefixedWriter(" 2> ", sink, MAX_LINE_WIDTH); + last = sint; + } + + public void write(TestOutputEvent event) { + write( + (event.getDestination() == TestOutputEvent.Destination.StdOut ? sout : serr), + event.getMessage()); + } + + public void write(String message) { + write(sint, message); + } + + public long length() throws IOException { + return buffer.length(); + } + + private void write(PrefixedWriter out, String message) { + try { + if (out != last) { + last.completeLine(); + last = out; + } + out.write(message); + } catch (IOException e) { + throw new UncheckedIOException("Unable to write to test output.", e); + } + } + + public void copyTo(Writer out) throws IOException { + flush(); + buffer.copyTo(out); + } + + public void flush() throws IOException { + sout.completeLine(); + serr.completeLine(); + buffer.flush(); + } + + @Override + public void close() throws IOException { + buffer.close(); + } + } +} diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java similarity index 96% rename from buildSrc/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java index db4f804f12e..5436afe70f8 100644 --- a/buildSrc/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java +++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java @@ -67,6 +67,6 @@ public void run(Path source, Path destination) throws IOException { fileContent = fileContent.replace(entry.getKey(), String.valueOf(entry.getValue())); } Files.writeString( - destination, fileContent, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW); + destination, fileContent, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW); } } diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java similarity index 91% rename from buildSrc/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java index 7281d496001..3dc663e8332 100644 --- a/buildSrc/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java +++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/PrefixedWriter.java @@ -20,12 +20,13 @@ import java.io.Writer; /** - * Prefixes every new line with a given string, synchronizing multiple streams to emit consistent lines. + * Prefixes every new line with a given string, synchronizing multiple streams to emit consistent + * lines. */ public class PrefixedWriter extends Writer { Writer sink; - private final static char LF = '\n'; + private static final char LF = '\n'; private final String prefix; private final StringBuilder lineBuffer = new StringBuilder(); private final int maxLineLength; @@ -45,7 +46,7 @@ public void write(int c) throws IOException { sink.write(LF); lineBuffer.setLength(0); - if (c != LF) { + if (c != LF) { lineBuffer.append((char) c); } } else { @@ -70,9 +71,7 @@ public void close() throws IOException { throw new UnsupportedOperationException(); } - /** - * Complete the current line (emit LF if not at the start of the line already). - */ + /** Complete the current line (emit LF if not at the start of the line already). */ public void completeLine() throws IOException { if (lineBuffer.length() > 0) { write(LF); diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/ProfileResults.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ProfileResults.java similarity index 79% rename from buildSrc/src/main/java/org/apache/lucene/gradle/ProfileResults.java rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ProfileResults.java index 60def1a89d1..15e0f11c56e 100644 --- a/buildSrc/src/main/java/org/apache/lucene/gradle/ProfileResults.java +++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/ProfileResults.java @@ -20,13 +20,12 @@ import java.io.IOException; import java.nio.file.Paths; import java.util.AbstractMap.SimpleEntry; -import java.util.Arrays; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; - import jdk.jfr.consumer.RecordedClass; import jdk.jfr.consumer.RecordedEvent; import jdk.jfr.consumer.RecordedFrame; @@ -36,15 +35,12 @@ import jdk.jfr.consumer.RecordingFile; /** - * Processes an array of recording files (from tests), and prints a simple histogram. - * Inspired by the JFR example code. - * Whole stacks are deduplicated (with the default stacksize being 1): you can drill deeper - * by adjusting the parameters. + * Processes an array of recording files (from tests), and prints a simple histogram. Inspired by + * the JFR example code. Whole stacks are deduplicated (with the default stacksize being 1): you can + * drill deeper by adjusting the parameters. */ public class ProfileResults { - /** - * Formats a frame to a formatted line. This is deduplicated on! - */ + /** Formats a frame to a formatted line. This is deduplicated on! */ static String frameToString(RecordedFrame frame, boolean lineNumbers) { StringBuilder builder = new StringBuilder(); RecordedMethod method = frame.getMethod(); @@ -84,29 +80,32 @@ static String frameToString(RecordedFrame frame, boolean lineNumbers) { /** * Driver method, for testing standalone. + * *

    * java -Dtests.profile.count=5 buildSrc/src/main/java/org/apache/lucene/gradle/ProfileResults.java \
    *   ./lucene/core/build/tmp/tests-cwd/somefile.jfr ...
    * 
*/ public static void main(String[] args) throws IOException { - printReport(Arrays.asList(args), - System.getProperty(MODE_KEY, MODE_DEFAULT), - Integer.parseInt(System.getProperty(STACKSIZE_KEY, STACKSIZE_DEFAULT)), - Integer.parseInt(System.getProperty(COUNT_KEY, COUNT_DEFAULT)), - Boolean.parseBoolean(System.getProperty(LINENUMBERS_KEY, LINENUMBERS_DEFAULT))); + printReport( + Arrays.asList(args), + System.getProperty(MODE_KEY, MODE_DEFAULT), + Integer.parseInt(System.getProperty(STACKSIZE_KEY, STACKSIZE_DEFAULT)), + Integer.parseInt(System.getProperty(COUNT_KEY, COUNT_DEFAULT)), + Boolean.parseBoolean(System.getProperty(LINENUMBERS_KEY, LINENUMBERS_DEFAULT))); } /** true if we care about this event */ static boolean isInteresting(String mode, RecordedEvent event) { String name = event.getEventType().getName(); - switch(mode) { + switch (mode) { case "cpu": - return (name.equals("jdk.ExecutionSample") || name.equals("jdk.NativeMethodSample")) && - !isGradlePollThread(event.getThread("sampledThread")); + return (name.equals("jdk.ExecutionSample") || name.equals("jdk.NativeMethodSample")) + && !isGradlePollThread(event.getThread("sampledThread")); case "heap": - return (name.equals("jdk.ObjectAllocationInNewTLAB") || name.equals("jdk.ObjectAllocationOutsideTLAB")) && - !isGradlePollThread(event.getThread("eventThread")); + return (name.equals("jdk.ObjectAllocationInNewTLAB") + || name.equals("jdk.ObjectAllocationOutsideTLAB")) + && !isGradlePollThread(event.getThread("eventThread")); default: throw new UnsupportedOperationException(event.toString()); } @@ -119,7 +118,7 @@ static boolean isGradlePollThread(RecordedThread thread) { /** value we accumulate for this event */ static long getValue(RecordedEvent event) { - switch(event.getEventType().getName()) { + switch (event.getEventType().getName()) { case "jdk.ObjectAllocationInNewTLAB": return event.getLong("tlabSize"); case "jdk.ObjectAllocationOutsideTLAB": @@ -144,15 +143,17 @@ static String formatValue(long value) { /** fixed width used for printing the different columns */ private static final int COLUMN_SIZE = 14; + private static final String COLUMN_PAD = "%-" + COLUMN_SIZE + "s"; + private static String pad(String input) { return String.format(Locale.ROOT, COLUMN_PAD, input); } - /** - * Process all the JFR files passed in args and print a merged summary. - */ - public static void printReport(List files, String mode, int stacksize, int count, boolean lineNumbers) throws IOException { + /** Process all the JFR files passed in args and print a merged summary. */ + public static void printReport( + List files, String mode, int stacksize, int count, boolean lineNumbers) + throws IOException { if (!"cpu".equals(mode) && !"heap".equals(mode)) { throw new IllegalArgumentException("tests.profile.mode must be one of (cpu,heap)"); } @@ -178,14 +179,13 @@ public static void printReport(List files, String mode, int stacksize, i StringBuilder stack = new StringBuilder(); for (int i = 0; i < Math.min(stacksize, trace.getFrames().size()); i++) { if (stack.length() > 0) { - stack.append("\n") - .append(framePadding) - .append(" at "); + stack.append("\n").append(framePadding).append(" at "); } stack.append(frameToString(trace.getFrames().get(i), lineNumbers)); } String line = stack.toString(); - SimpleEntry entry = histogram.computeIfAbsent(line, u -> new SimpleEntry<>(line, 0L)); + SimpleEntry entry = + histogram.computeIfAbsent(line, u -> new SimpleEntry<>(line, 0L)); long value = getValue(event); entry.setValue(entry.getValue() + value); totalEvents++; @@ -195,12 +195,20 @@ public static void printReport(List files, String mode, int stacksize, i } } // print summary from histogram - System.out.printf(Locale.ROOT, "PROFILE SUMMARY from %d events (total: %s)\n", totalEvents, formatValue(sumValues)); + System.out.printf( + Locale.ROOT, + "PROFILE SUMMARY from %d events (total: %s)\n", + totalEvents, + formatValue(sumValues)); System.out.printf(Locale.ROOT, " tests.profile.mode=%s\n", mode); System.out.printf(Locale.ROOT, " tests.profile.count=%d\n", count); System.out.printf(Locale.ROOT, " tests.profile.stacksize=%d\n", stacksize); System.out.printf(Locale.ROOT, " tests.profile.linenumbers=%b\n", lineNumbers); - System.out.printf(Locale.ROOT, "%s%sSTACK\n", pad("PERCENT"), pad(mode.toUpperCase(Locale.ROOT) + " SAMPLES")); + System.out.printf( + Locale.ROOT, + "%s%sSTACK\n", + pad("PERCENT"), + pad(mode.toUpperCase(Locale.ROOT) + " SAMPLES")); List> entries = new ArrayList<>(histogram.values()); entries.sort((u, v) -> v.getValue().compareTo(u.getValue())); int seen = 0; @@ -209,7 +217,8 @@ public static void printReport(List files, String mode, int stacksize, i break; } String percent = String.format("%2.2f%%", 100 * (c.getValue() / (float) sumValues)); - System.out.printf(Locale.ROOT, "%s%s%s\n", pad(percent), pad(formatValue(c.getValue())), c.getKey()); + System.out.printf( + Locale.ROOT, "%s%s%s\n", pad(percent), pad(formatValue(c.getValue())), c.getKey()); } } } diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/SpillWriter.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/SpillWriter.java similarity index 92% rename from buildSrc/src/main/java/org/apache/lucene/gradle/SpillWriter.java rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/SpillWriter.java index f89977c2503..e9783d070b5 100644 --- a/buildSrc/src/main/java/org/apache/lucene/gradle/SpillWriter.java +++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/SpillWriter.java @@ -24,9 +24,10 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.function.Supplier; +import org.jetbrains.annotations.NotNull; public class SpillWriter extends Writer { - private final static int MAX_BUFFERED = 2 * 1024; + private static final int MAX_BUFFERED = 2 * 1024; private final StringWriter buffer = new StringWriter(MAX_BUFFERED); private final Supplier spillPathSupplier; @@ -38,7 +39,7 @@ public SpillWriter(Supplier spillPathSupplier) { } @Override - public void write(char[] cbuf, int off, int len) throws IOException { + public void write(char @NotNull [] cbuf, int off, int len) throws IOException { getSink(len).write(cbuf, off, len); } @@ -58,7 +59,7 @@ public void write(String str) throws IOException { } @Override - public void write(String str, int off, int len) throws IOException { + public void write(@NotNull String str, int off, int len) throws IOException { getSink(len).write(str, off, len); } diff --git a/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java new file mode 100644 index 00000000000..71901259ea2 --- /dev/null +++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.gradle; + +import java.io.IOException; +import java.io.PrintStream; +import java.io.Writer; +import org.jetbrains.annotations.NotNull; + +class StdOutTeeWriter extends Writer { + private final Writer delegate; + private final PrintStream out = System.out; + + public StdOutTeeWriter(Writer delegate) { + this.delegate = delegate; + } + + @Override + public void write(int c) throws IOException { + delegate.write(c); + out.write(c); + } + + @Override + public void write(char @NotNull [] cbuf) throws IOException { + delegate.write(cbuf); + out.print(cbuf); + } + + @Override + public void write(@NotNull String str) throws IOException { + delegate.write(str); + out.print(str); + } + + @Override + public void write(@NotNull String str, int off, int len) throws IOException { + delegate.write(str, off, len); + out.append(str, off, len); + } + + @Override + public Writer append(CharSequence csq) throws IOException { + delegate.append(csq); + out.append(csq); + return this; + } + + @Override + public Writer append(CharSequence csq, int start, int end) throws IOException { + delegate.append(csq, start, end); + out.append(csq, start, end); + return this; + } + + @Override + public Writer append(char c) throws IOException { + delegate.append(c); + out.append(c); + return this; + } + + @Override + public void write(char @NotNull [] cbuf, int off, int len) throws IOException { + delegate.write(cbuf, off, len); + out.print(new String(cbuf, off, len)); + } + + @Override + public void flush() throws IOException { + delegate.flush(); + out.flush(); + } + + @Override + public void close() throws IOException { + delegate.close(); + // Don't close the actual output. + } +} diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java similarity index 75% rename from buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java rename to build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java index e6930af7c74..adb8f3eaf07 100644 --- a/buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java +++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java @@ -16,32 +16,26 @@ */ package org.apache.lucene.gradle; +import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; + import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.HttpURLConnection; -import java.net.URL; -import java.net.URLConnection; -import java.nio.channels.Channels; -import java.nio.channels.FileChannel; -import java.nio.channels.ReadableByteChannel; +import java.net.URI; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; -import java.util.EnumSet; import java.util.Locale; import java.util.concurrent.TimeUnit; -import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; -import static java.nio.file.StandardOpenOption.APPEND; - /** * Standalone class that can be used to download a gradle-wrapper.jar - *

- * Has no dependencies outside of standard java libraries + * + *

Has no dependencies outside of standard java libraries */ public class WrapperDownloader { public static void main(String[] args) { @@ -60,20 +54,23 @@ public static void main(String[] args) { } public static void checkVersion() { - int major = Runtime.getRuntime().version().feature(); - if (major < 11 || major > 21) { - throw new IllegalStateException("java version must be between 11 and 21, your version: " + major); + int major = Runtime.version().feature(); + if (major < 21 || major > 23) { + throw new IllegalStateException( + "java version must be between 21 and 23, your version: " + major); } } public void run(Path destination) throws IOException, NoSuchAlgorithmException { - Path checksumPath = destination.resolveSibling(destination.getFileName().toString() + ".sha256"); + Path checksumPath = + destination.resolveSibling(destination.getFileName().toString() + ".sha256"); if (!Files.exists(checksumPath)) { throw new IOException("Checksum file not found: " + checksumPath); } String expectedChecksum = Files.readString(checksumPath, StandardCharsets.UTF_8).trim(); - Path versionPath = destination.resolveSibling(destination.getFileName().toString() + ".version"); + Path versionPath = + destination.resolveSibling(destination.getFileName().toString() + ".version"); if (!Files.exists(versionPath)) { throw new IOException("Wrapper version file not found: " + versionPath); } @@ -92,8 +89,12 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException { } } - URL url = new URL("https://raw.githubusercontent.com/gradle/gradle/v" + wrapperVersion + "/gradle/wrapper/gradle-wrapper.jar"); - System.err.println("Downloading gradle-wrapper.jar from " + url); + URI uri = + URI.create( + "https://raw.githubusercontent.com/gradle/gradle/v" + + wrapperVersion + + "/gradle/wrapper/gradle-wrapper.jar"); + System.err.println("Downloading gradle-wrapper.jar from " + uri); // Zero-copy save the jar to a temp file Path temp = Files.createTempFile(destination.getParent(), ".gradle-wrapper", ".tmp"); @@ -102,13 +103,14 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException { int retryDelay = 30; HttpURLConnection connection; while (true) { - connection = (HttpURLConnection) url.openConnection(); + connection = (HttpURLConnection) uri.toURL().openConnection(); try { connection.connect(); } catch (IOException e) { if (retries-- > 0) { // Retry after a short delay - System.err.println("Error connecting to server: " + e + ", will retry in " + retryDelay + " seconds."); + System.err.println( + "Error connecting to server: " + e + ", will retry in " + retryDelay + " seconds."); Thread.sleep(TimeUnit.SECONDS.toMillis(retryDelay)); continue; } @@ -120,7 +122,12 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException { case HttpURLConnection.HTTP_BAD_GATEWAY: if (retries-- > 0) { // Retry after a short delay. - System.err.println("Server returned HTTP " + connection.getResponseCode() + ", will retry in " + retryDelay + " seconds."); + System.err.println( + "Server returned HTTP " + + connection.getResponseCode() + + ", will retry in " + + retryDelay + + " seconds."); Thread.sleep(TimeUnit.SECONDS.toMillis(retryDelay)); continue; } @@ -131,13 +138,15 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException { } try (InputStream is = connection.getInputStream(); - OutputStream out = Files.newOutputStream(temp)){ + OutputStream out = Files.newOutputStream(temp)) { is.transferTo(out); } String checksum = checksum(digest, temp); if (!checksum.equalsIgnoreCase(expectedChecksum)) { - throw new IOException(String.format(Locale.ROOT, + throw new IOException( + String.format( + Locale.ROOT, "Checksum mismatch on downloaded gradle-wrapper.jar (was: %s, expected: %s).", checksum, expectedChecksum)); @@ -146,8 +155,12 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException { Files.move(temp, destination, REPLACE_EXISTING); temp = null; } catch (IOException | InterruptedException e) { - throw new IOException("Could not download gradle-wrapper.jar (" + - e.getClass().getSimpleName() + ": " + e.getMessage() + ")."); + throw new IOException( + "Could not download gradle-wrapper.jar (" + + e.getClass().getSimpleName() + + ": " + + e.getMessage() + + ")."); } finally { if (temp != null) { Files.deleteIfExists(temp); @@ -165,7 +178,8 @@ private String checksum(MessageDigest messageDigest, Path path) throws IOExcepti } return sb.toString(); } catch (IOException e) { - throw new IOException("Could not compute digest of file: " + path + " (" + e.getMessage() + ")"); + throw new IOException( + "Could not compute digest of file: " + path + " (" + e.getMessage() + ")"); } } } diff --git a/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/buildinfra/BuildInfraPlugin.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/buildinfra/BuildInfraPlugin.java new file mode 100644 index 00000000000..415922a1916 --- /dev/null +++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/buildinfra/BuildInfraPlugin.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.gradle.buildinfra; + +import java.nio.file.Path; +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.lucene.gradle.Checksum; +import org.apache.lucene.gradle.ErrorReportingTestListener; +import org.apache.lucene.gradle.ProfileResults; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.tasks.testing.TestDescriptor; +import org.gradle.api.tasks.testing.logging.TestLogging; + +public class BuildInfraPlugin implements Plugin { + @Override + public void apply(Project project) { + project.getExtensions().create(BuildInfraExtension.NAME, BuildInfraExtension.class); + } + + public static class BuildInfraExtension { + public static final String NAME = "buildinfra"; + + public ErrorReportingTestListener newErrorReportingTestListener( + TestLogging testLogging, Path spillDir, Path outputsDir, boolean verboseMode) { + return new ErrorReportingTestListener(testLogging, spillDir, outputsDir, verboseMode); + } + + public DigestUtils sha1Digest() { + return new DigestUtils(DigestUtils.getSha1Digest()); + } + + public String getOutputLogName(TestDescriptor suite) { + return ErrorReportingTestListener.getOutputLogName(suite); + } + + public Class checksumClass() { + return Checksum.class; + } + + public Class profileResultsClass() { + return ProfileResults.class; + } + } +} diff --git a/build-tools/missing-doclet/build.gradle b/build-tools/missing-doclet/build.gradle new file mode 100644 index 00000000000..11a7fc6e2a8 --- /dev/null +++ b/build-tools/missing-doclet/build.gradle @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'java-library' + alias(libs.plugins.diffplug.spotless) apply false +} + +repositories { + mavenCentral() +} + +group = "org.apache.solr.tools" +description = 'Doclet-based javadoc validation' + +// Make sure the build environment is consistent. +apply from: file('../../gradle/conventions.gradle') +apply from: file('../../gradle/validation/check-environment.gradle') + +// Add spotless/ tidy. +tasks.register("checkJdkInternalsExportedToGradle") {} +apply from: file('../../gradle/validation/spotless.gradle') + +java { + sourceCompatibility = JavaVersion.toVersion(libs.versions.java.min.get()) + targetCompatibility = JavaVersion.toVersion(libs.versions.java.min.get()) +} + +tasks.withType(JavaCompile).configureEach { + options.compilerArgs += ["--release", targetCompatibility.toString()] + options.encoding = "UTF-8" +} diff --git a/build-tools/missing-doclet/settings.gradle b/build-tools/missing-doclet/settings.gradle new file mode 100644 index 00000000000..c39219472be --- /dev/null +++ b/build-tools/missing-doclet/settings.gradle @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +rootProject.name = "missing-doclet" + +// Use project's version catalog for centralized dependency management +dependencyResolutionManagement { + versionCatalogs { + libs { + from(files("../../gradle/libs.versions.toml")) + } + } +} diff --git a/dev-tools/solr-missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java b/build-tools/missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java similarity index 60% rename from dev-tools/solr-missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java rename to build-tools/missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java index 36c0e4fe4c8..bb0b0fb3ea0 100644 --- a/dev-tools/solr-missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java +++ b/build-tools/missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java @@ -16,6 +16,9 @@ */ package org.apache.lucene.missingdoclet; +import com.sun.source.doctree.DocCommentTree; +import com.sun.source.doctree.ParamTree; +import com.sun.source.util.DocTrees; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -24,7 +27,6 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; @@ -35,24 +37,19 @@ import javax.lang.model.util.ElementFilter; import javax.lang.model.util.Elements; import javax.tools.Diagnostic; - -import com.sun.source.doctree.DocCommentTree; -import com.sun.source.doctree.ParamTree; -import com.sun.source.util.DocTrees; - import jdk.javadoc.doclet.Doclet; import jdk.javadoc.doclet.DocletEnvironment; import jdk.javadoc.doclet.Reporter; import jdk.javadoc.doclet.StandardDoclet; /** - * Checks for missing javadocs, where missing also means "only whitespace" or "license header". - * Has option --missing-level (package, class, method, parameter) so that we can improve over time. - * Has option --missing-ignore to ignore individual elements (such as split packages). - * It isn't recursive, just ignores exactly the elements you tell it. - * This should be removed when packaging is fixed to no longer be split across JARs. - * Has option --missing-method to apply "method" level to selected packages (fix one at a time). - * Matches package names exactly: so you'll need to list subpackages separately. + * Checks for missing javadocs, where missing also means "only whitespace" or "license header". Has + * option --missing-level (package, class, method, parameter) so that we can improve over time. Has + * option --missing-ignore to ignore individual elements (such as split packages). It isn't + * recursive, just ignores exactly the elements you tell it. This should be removed when packaging + * is fixed to no longer be split across JARs. Has option --missing-method to apply "method" level + * to selected packages (fix one at a time). Matches package names exactly: so you'll need to list + * subpackages separately. */ public class MissingDoclet extends StandardDoclet { // checks that modules and packages have documentation @@ -70,121 +67,124 @@ public class MissingDoclet extends StandardDoclet { Elements elementUtils; Set ignored = Collections.emptySet(); Set methodPackages = Collections.emptySet(); - + @Override public Set getSupportedOptions() { Set options = new HashSet<>(); options.addAll(super.getSupportedOptions()); - options.add(new Doclet.Option() { - @Override - public int getArgumentCount() { - return 1; - } + options.add( + new Doclet.Option() { + @Override + public int getArgumentCount() { + return 1; + } - @Override - public String getDescription() { - return "level to enforce for missing javadocs: [package, class, method, parameter]"; - } + @Override + public String getDescription() { + return "level to enforce for missing javadocs: [package, class, method, parameter]"; + } - @Override - public Kind getKind() { - return Option.Kind.STANDARD; - } + @Override + public Kind getKind() { + return Option.Kind.STANDARD; + } - @Override - public List getNames() { - return Collections.singletonList("--missing-level"); - } + @Override + public List getNames() { + return Collections.singletonList("--missing-level"); + } - @Override - public String getParameters() { - return "level"; - } + @Override + public String getParameters() { + return "level"; + } - @Override - public boolean process(String option, List arguments) { - switch (arguments.get(0)) { - case "package": - level = PACKAGE; - return true; - case "class": - level = CLASS; - return true; - case "method": - level = METHOD; - return true; - case "parameter": - level = PARAMETER; - return true; - default: - return false; - } - } - }); - options.add(new Doclet.Option() { - @Override - public int getArgumentCount() { - return 1; - } + @Override + public boolean process(String option, List arguments) { + switch (arguments.get(0)) { + case "package": + level = PACKAGE; + return true; + case "class": + level = CLASS; + return true; + case "method": + level = METHOD; + return true; + case "parameter": + level = PARAMETER; + return true; + default: + return false; + } + } + }); + options.add( + new Doclet.Option() { + @Override + public int getArgumentCount() { + return 1; + } - @Override - public String getDescription() { - return "comma separated list of element names to ignore (e.g. as a workaround for split packages)"; - } + @Override + public String getDescription() { + return "comma separated list of element names to ignore (e.g. as a workaround for split packages)"; + } - @Override - public Kind getKind() { - return Option.Kind.STANDARD; - } + @Override + public Kind getKind() { + return Option.Kind.STANDARD; + } - @Override - public List getNames() { - return Collections.singletonList("--missing-ignore"); - } + @Override + public List getNames() { + return Collections.singletonList("--missing-ignore"); + } - @Override - public String getParameters() { - return "ignoredNames"; - } + @Override + public String getParameters() { + return "ignoredNames"; + } - @Override - public boolean process(String option, List arguments) { - ignored = new HashSet<>(Arrays.asList(arguments.get(0).split(","))); - return true; - } - }); - options.add(new Doclet.Option() { - @Override - public int getArgumentCount() { - return 1; - } + @Override + public boolean process(String option, List arguments) { + ignored = new HashSet<>(Arrays.asList(arguments.get(0).split(","))); + return true; + } + }); + options.add( + new Doclet.Option() { + @Override + public int getArgumentCount() { + return 1; + } - @Override - public String getDescription() { - return "comma separated list of packages to check at 'method' level"; - } + @Override + public String getDescription() { + return "comma separated list of packages to check at 'method' level"; + } - @Override - public Kind getKind() { - return Option.Kind.STANDARD; - } + @Override + public Kind getKind() { + return Option.Kind.STANDARD; + } - @Override - public List getNames() { - return Collections.singletonList("--missing-method"); - } + @Override + public List getNames() { + return Collections.singletonList("--missing-method"); + } - @Override - public String getParameters() { - return "packages"; - } + @Override + public String getParameters() { + return "packages"; + } - @Override - public boolean process(String option, List arguments) { - methodPackages = new HashSet<>(Arrays.asList(arguments.get(0).split(","))); - return true; - } - }); + @Override + public boolean process(String option, List arguments) { + methodPackages = new HashSet<>(Arrays.asList(arguments.get(0).split(","))); + return true; + } + }); return options; } @@ -205,10 +205,8 @@ public boolean run(DocletEnvironment docEnv) { return super.run(docEnv); } - - /** - * Returns effective check level for this element - */ + + /** Returns effective check level for this element */ private int level(Element element) { String pkg = elementUtils.getPackageOf(element).getQualifiedName().toString(); if (methodPackages.contains(pkg)) { @@ -217,24 +215,24 @@ private int level(Element element) { return level; } } - - /** - * Check an individual element. - * This checks packages and types from the doctrees. - * It will recursively check methods/fields from encountered types when the level is "method" + + /** + * Check an individual element. This checks packages and types from the doctrees. It will + * recursively check methods/fields from encountered types when the level is "method" */ private void check(Element element) { - switch(element.getKind()) { + switch (element.getKind()) { case MODULE: // don't check the unnamed module, it won't have javadocs - if (!((ModuleElement)element).isUnnamed()) { + if (!((ModuleElement) element).isUnnamed()) { checkComment(element); } break; case PACKAGE: checkComment(element); break; - // class-like elements, check them, then recursively check their children (fields and methods) + // class-like elements, check them, then recursively check their children (fields and + // methods) case CLASS: case INTERFACE: case ENUM: @@ -242,17 +240,18 @@ private void check(Element element) { if (level(element) >= CLASS) { checkComment(element); for (var subElement : element.getEnclosedElements()) { - // don't recurse into enclosed types, otherwise we'll double-check since they are already in the included docTree - if (subElement.getKind() == ElementKind.METHOD || - subElement.getKind() == ElementKind.CONSTRUCTOR || - subElement.getKind() == ElementKind.FIELD || - subElement.getKind() == ElementKind.ENUM_CONSTANT) { + // don't recurse into enclosed types, otherwise we'll double-check since they are + // already in the included docTree + if (subElement.getKind() == ElementKind.METHOD + || subElement.getKind() == ElementKind.CONSTRUCTOR + || subElement.getKind() == ElementKind.FIELD + || subElement.getKind() == ElementKind.ENUM_CONSTANT) { check(subElement); } } } break; - // method-like elements, check them if we are configured to do so + // method-like elements, check them if we are configured to do so case METHOD: case CONSTRUCTOR: case FIELD: @@ -267,9 +266,9 @@ private void check(Element element) { } /** - * Return true if the method is synthetic enum method (values/valueOf). - * According to the doctree documentation, the "included" set never includes synthetic elements. - * UweSays: It should not happen but it happens! + * Return true if the method is synthetic enum method (values/valueOf). According to the doctree + * documentation, the "included" set never includes synthetic elements. UweSays: It should not + * happen but it happens! */ private boolean isSyntheticEnumMethod(Element element) { String simpleName = element.getSimpleName().toString(); @@ -280,20 +279,23 @@ private boolean isSyntheticEnumMethod(Element element) { } return false; } - + /** - * Checks that an element doesn't have missing javadocs. - * In addition to truly "missing", check that comments aren't solely whitespace (generated by some IDEs), - * that they aren't a license header masquerading as a javadoc comment. + * Checks that an element doesn't have missing javadocs. In addition to truly "missing", check + * that comments aren't solely whitespace (generated by some IDEs), that they aren't a license + * header masquerading as a javadoc comment. */ private void checkComment(Element element) { // sanity check that the element is really "included", because we do some recursion into types if (!docEnv.isIncluded(element)) { return; } - // check that this element isn't on our ignore list. This is only used as a workaround for "split packages". - // ignoring a package isn't recursive (on purpose), we still check all the classes, etc. inside it. - // we just need to cope with the fact package-info.java isn't there because it is split across multiple jars. + // check that this element isn't on our ignore list. This is only used as a workaround for + // "split packages". + // ignoring a package isn't recursive (on purpose), we still check all the classes, etc. inside + // it. + // we just need to cope with the fact package-info.java isn't there because it is split across + // multiple jars. if (ignored.contains(element.toString())) { return; } @@ -306,14 +308,17 @@ private void checkComment(Element element) { error(element, "javadocs are missing"); } } else { - var normalized = tree.getFirstSentence().get(0).toString() - .replace('\u00A0', ' ') - .trim() - .toLowerCase(Locale.ROOT); + var normalized = + tree.getFirstSentence() + .get(0) + .toString() + .replace('\u00A0', ' ') + .trim() + .toLowerCase(Locale.ROOT); if (normalized.isEmpty()) { error(element, "blank javadoc comment"); - } else if (normalized.startsWith("licensed to the apache software foundation") || - normalized.startsWith("copyright 2004 the apache software foundation")) { + } else if (normalized.startsWith("licensed to the apache software foundation") + || normalized.startsWith("copyright 2004 the apache software foundation")) { error(element, "comment is really a license"); } } @@ -323,19 +328,20 @@ private void checkComment(Element element) { } private boolean hasInheritedJavadocs(Element element) { - boolean hasOverrides = element.getAnnotationMirrors().stream() - .anyMatch(ann -> ann.getAnnotationType().toString().equals(Override.class.getName())); + boolean hasOverrides = + element.getAnnotationMirrors().stream() + .anyMatch(ann -> ann.getAnnotationType().toString().equals(Override.class.getName())); if (hasOverrides) { // If an element has explicit @Overrides annotation, assume it does // have inherited javadocs somewhere. - reporter.print(Diagnostic.Kind.NOTE, element, "javadoc empty but @Override declared, skipping."); + reporter.print( + Diagnostic.Kind.NOTE, element, "javadoc empty but @Override declared, skipping."); return true; } // Check for methods up the types tree. - if (element instanceof ExecutableElement) { - ExecutableElement thisMethod = (ExecutableElement) element; + if (element instanceof ExecutableElement thisMethod) { Iterable superTypes = () -> superTypeForInheritDoc(thisMethod.getEnclosingElement()).iterator(); @@ -346,7 +352,10 @@ private boolean hasInheritedJavadocs(Element element) { // We could check supMethod for non-empty javadoc here. Don't know if this makes // sense though as all methods will be verified in the end so it'd fail on the // top of the hierarchy (if empty) anyway. - reporter.print(Diagnostic.Kind.NOTE, element, "javadoc empty but method overrides another, skipping."); + reporter.print( + Diagnostic.Kind.NOTE, + element, + "javadoc empty but method overrides another, skipping."); return true; } } @@ -356,15 +365,14 @@ private boolean hasInheritedJavadocs(Element element) { return false; } - /* Find types from which methods in type may inherit javadoc, in the proper order.*/ private Stream superTypeForInheritDoc(Element type) { TypeElement clazz = (TypeElement) type; - List interfaces = clazz.getInterfaces() - .stream() - .filter(tm -> tm.getKind() == TypeKind.DECLARED) - .map(tm -> ((DeclaredType) tm).asElement()) - .collect(Collectors.toList()); + List interfaces = + clazz.getInterfaces().stream() + .filter(tm -> tm.getKind() == TypeKind.DECLARED) + .map(tm -> ((DeclaredType) tm).asElement()) + .collect(Collectors.toList()); Stream result = interfaces.stream(); result = Stream.concat(result, interfaces.stream().flatMap(this::superTypeForInheritDoc)); @@ -386,13 +394,13 @@ private void checkParameters(Element element, DocCommentTree tree) { if (tree != null) { for (var tag : tree.getBlockTags()) { if (tag instanceof ParamTree) { - var name = ((ParamTree)tag).getName().getName().toString(); + var name = ((ParamTree) tag).getName().getName().toString(); seenParameters.add(name); } } } // now compare the method's formal parameter list against it - for (var param : ((ExecutableElement)element).getParameters()) { + for (var param : ((ExecutableElement) element).getParameters()) { var name = param.getSimpleName().toString(); if (!seenParameters.contains(name)) { error(element, "missing javadoc @param for parameter '" + name + "'"); @@ -400,7 +408,7 @@ private void checkParameters(Element element, DocCommentTree tree) { } } } - + /** logs a new error for the particular element */ private void error(Element element, String message) { var fullMessage = new StringBuilder(); @@ -430,13 +438,6 @@ private void error(Element element, String message) { fullMessage.append("): "); fullMessage.append(message); - if (Runtime.version().feature() == 11 && element.getKind() == ElementKind.PACKAGE) { - // Avoid JDK 11 bug: - // https://issues.apache.org/jira/browse/LUCENE-9747 - // https://bugs.openjdk.java.net/browse/JDK-8224082 - reporter.print(Diagnostic.Kind.ERROR, fullMessage.toString()); - } else { - reporter.print(Diagnostic.Kind.ERROR, element, fullMessage.toString()); - } + reporter.print(Diagnostic.Kind.ERROR, element, fullMessage.toString()); } } diff --git a/build.gradle b/build.gradle index 3b3c0e0999e..9439438f9a8 100644 --- a/build.gradle +++ b/build.gradle @@ -20,19 +20,24 @@ import java.time.format.DateTimeFormatter plugins { id 'base' - id 'com.palantir.consistent-versions' version '2.16.0' - id 'org.owasp.dependencycheck' version '9.0.8' - id 'ca.cutterslade.analyze' version '1.10.0' - id 'de.thetaphi.forbiddenapis' version '3.7' apply false - id 'de.undercouch.download' version '5.5.0' apply false - id 'net.ltgt.errorprone' version '3.1.0' apply false - id 'com.diffplug.spotless' version '6.5.2' apply false - id 'com.github.node-gradle.node' version '7.0.1' apply false + id 'solr.build-infra' + + alias(libs.plugins.carrotsearch.dependencychecks) + alias(libs.plugins.owasp.dependencycheck) + alias(libs.plugins.cutterslade.analyze) + alias(libs.plugins.benmanes.versions) + alias(libs.plugins.littlerobots.versioncatalogupdate) apply false + alias(libs.plugins.thetaphi.forbiddenapis) apply false + alias(libs.plugins.undercouch.download) apply false + alias(libs.plugins.ltgt.errorprone) apply false + alias(libs.plugins.diffplug.spotless) apply false + alias(libs.plugins.nodegradle.node) apply false + alias(libs.plugins.openapi.generator) apply false } // Declare default Java versions for the entire project and for SolrJ separately -rootProject.ext.minJavaVersionDefault = JavaVersion.VERSION_11 -rootProject.ext.minJavaVersionSolrJ = JavaVersion.VERSION_11 +rootProject.ext.minJavaVersionDefault = JavaVersion.toVersion(libs.versions.java.min.get()) +rootProject.ext.minJavaVersionSolrJ = JavaVersion.toVersion(libs.versions.java.solrj.get()) apply from: file('gradle/globals.gradle') @@ -97,7 +102,7 @@ ext { } luceneBaseVersionProvider = project.provider { - def luceneVersion = getVersion('org.apache.lucene:lucene-core') + def luceneVersion = libs.versions.apache.lucene.get() def m = (luceneVersion =~ /^\d+\.\d+\.\d+\b/) if (!m) { throw GradleException("Can't strip base version from " + luceneVersion) @@ -108,14 +113,13 @@ ext { } } -apply from: file('buildSrc/scriptDepVersions.gradle') - // Include smaller chunks configuring dedicated build areas. // Some of these intersect or add additional functionality. // The order of inclusion of these files shouldn't matter (but may // if the build file is incorrectly written and evaluates something // eagerly). +apply from: file('gradle/conventions.gradle') apply from: file('gradle/generation/local-settings.gradle') // Ant-compatibility layer: apply folder layout early so that @@ -129,7 +133,6 @@ apply from: file('gradle/java/javac.gradle') apply from: file('gradle/testing/defaults-tests.gradle') apply from: file('gradle/testing/randomization.gradle') apply from: file('gradle/testing/fail-on-no-tests.gradle') -apply from: file('gradle/testing/fail-on-unsupported-jdk.gradle') apply from: file('gradle/testing/alternative-jdk-support.gradle') apply from: file('gradle/java/jar-manifest.gradle') apply from: file('gradle/testing/retry-test.gradle') @@ -148,7 +151,6 @@ apply from: file('gradle/validation/precommit.gradle') apply from: file('gradle/validation/forbidden-apis.gradle') apply from: file('gradle/validation/jar-checks.gradle') apply from: file('gradle/validation/git-status.gradle') -apply from: file('gradle/validation/versions-props-sorted.gradle') apply from: file('gradle/validation/validate-source-patterns.gradle') apply from: file('gradle/validation/rat-sources.gradle') apply from: file('gradle/validation/owasp-dependency-check.gradle') @@ -159,9 +161,17 @@ apply from: file('gradle/validation/validate-log-calls.gradle') apply from: file('gradle/validation/check-broken-links.gradle') apply from: file('gradle/validation/solr.config-file-sanity.gradle') - +apply from: file('gradle/validation/dependencies.gradle') apply from: file('gradle/validation/spotless.gradle') +// Wire up included builds to some validation tasks. +rootProject.tasks.named("tidy").configure { + dependsOn gradle.includedBuilds*.task(":tidy") +} +rootProject.tasks.named("clean").configure { + dependsOn gradle.includedBuilds*.task(":clean") +} + // Source or data regeneration tasks apply from: file('gradle/generation/regenerate.gradle') apply from: file('gradle/generation/javacc.gradle') @@ -195,7 +205,6 @@ apply from: file('gradle/hacks/global-exclude-dependencies.gradle') apply from: file('gradle/hacks/gradle-archives.gradle') apply from: file('gradle/hacks/wipe-temp.gradle') -apply from: file('gradle/hacks/hashmapAssertions.gradle') apply from: file('gradle/hacks/turbocharge-jvm-opts.gradle') apply from: file('gradle/hacks/dummy-outputs.gradle') diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java b/buildSrc/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java deleted file mode 100644 index 44cd09b33f4..00000000000 --- a/buildSrc/src/main/java/org/apache/lucene/gradle/ErrorReportingTestListener.java +++ /dev/null @@ -1,275 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.lucene.gradle; - -import java.io.*; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.ConcurrentHashMap; -import java.util.regex.Pattern; - -import org.gradle.api.internal.tasks.testing.logging.FullExceptionFormatter; -import org.gradle.api.internal.tasks.testing.logging.TestExceptionFormatter; -import org.gradle.api.logging.Logger; -import org.gradle.api.logging.Logging; -import org.gradle.api.tasks.testing.TestDescriptor; -import org.gradle.api.tasks.testing.TestListener; -import org.gradle.api.tasks.testing.TestOutputEvent; -import org.gradle.api.tasks.testing.TestOutputListener; -import org.gradle.api.tasks.testing.TestResult; -import org.gradle.api.tasks.testing.logging.TestLogging; - -/** - * An error reporting listener that queues test output streams and displays them - * on failure. - *

- * Heavily inspired by Elasticsearch's ErrorReportingTestListener (ASL 2.0 licensed). - */ -public class ErrorReportingTestListener implements TestOutputListener, TestListener { - private static final Logger LOGGER = Logging.getLogger(ErrorReportingTestListener.class); - - private final TestExceptionFormatter formatter; - private final Map outputHandlers = new ConcurrentHashMap<>(); - private final Path spillDir; - private final Path outputsDir; - private final boolean verboseMode; - - public ErrorReportingTestListener(TestLogging testLogging, Path spillDir, Path outputsDir, boolean verboseMode) { - this.formatter = new FullExceptionFormatter(testLogging); - this.spillDir = spillDir; - this.outputsDir = outputsDir; - this.verboseMode = verboseMode; - } - - @Override - public void onOutput(TestDescriptor testDescriptor, TestOutputEvent outputEvent) { - handlerFor(testDescriptor).write(outputEvent); - } - - @Override - public void beforeSuite(TestDescriptor suite) { - // noop. - } - - @Override - public void beforeTest(TestDescriptor testDescriptor) { - // Noop. - } - - @Override - public void afterSuite(final TestDescriptor suite, TestResult result) { - if (suite.getParent() == null || suite.getName().startsWith("Gradle")) { - return; - } - - TestKey key = TestKey.of(suite); - try { - OutputHandler outputHandler = outputHandlers.get(key); - if (outputHandler != null) { - long length = outputHandler.length(); - if (length > 1024 * 1024 * 10) { - LOGGER.warn(String.format(Locale.ROOT, "WARNING: Test %s wrote %,d bytes of output.", - suite.getName(), - length)); - } - } - - boolean echoOutput = Objects.equals(result.getResultType(), TestResult.ResultType.FAILURE); - boolean dumpOutput = echoOutput; - - // If the test suite failed, report output. - if (dumpOutput || echoOutput) { - Files.createDirectories(outputsDir); - Path outputLog = outputsDir.resolve(getOutputLogName(suite)); - - // Save the output of a failing test to disk. - try (Writer w = Files.newBufferedWriter(outputLog, StandardCharsets.UTF_8)) { - if (outputHandler != null) { - outputHandler.copyTo(w); - } - } - - if (echoOutput && !verboseMode) { - synchronized (this) { - System.out.println(""); - System.out.println(suite.getClassName() + " > test suite's output saved to " + outputLog + ", copied below:"); - try (BufferedReader reader = Files.newBufferedReader(outputLog, StandardCharsets.UTF_8)) { - char[] buf = new char[1024]; - int len; - while ((len = reader.read(buf)) >= 0) { - System.out.print(new String(buf, 0, len)); - } - System.out.println(); - } - } - } - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } finally { - OutputHandler handler = outputHandlers.remove(key); - if (handler != null) { - try { - handler.close(); - } catch (IOException e) { - LOGGER.error("Failed to close output handler for: " + key, e); - } - } - } - } - - private static Pattern SANITIZE = Pattern.compile("[^a-zA-Z .\\-_0-9]+"); - - public static String getOutputLogName(TestDescriptor suite) { - return SANITIZE.matcher("OUTPUT-" + suite.getName() + ".txt").replaceAll("_"); - } - - @Override - public void afterTest(TestDescriptor testDescriptor, TestResult result) { - // Include test failure exception stacktrace(s) in test output log. - if (result.getResultType() == TestResult.ResultType.FAILURE) { - if (result.getExceptions().size() > 0) { - String message = formatter.format(testDescriptor, result.getExceptions()); - handlerFor(testDescriptor).write(message); - } - } - } - - private OutputHandler handlerFor(TestDescriptor descriptor) { - // Attach output of leaves (individual tests) to their parent. - if (!descriptor.isComposite()) { - descriptor = descriptor.getParent(); - } - return outputHandlers.computeIfAbsent(TestKey.of(descriptor), (key) -> new OutputHandler()); - } - - public static class TestKey { - private final String key; - - private TestKey(String key) { - this.key = key; - } - - public static TestKey of(TestDescriptor d) { - StringBuilder key = new StringBuilder(); - key.append(d.getClassName()); - key.append("::"); - key.append(d.getName()); - key.append("::"); - key.append(d.getParent() == null ? "-" : d.getParent().toString()); - return new TestKey(key.toString()); - } - - @Override - public boolean equals(Object o) { - return o != null && - o.getClass() == this.getClass() && - Objects.equals(((TestKey) o).key, key); - } - - @Override - public int hashCode() { - return key.hashCode(); - } - - @Override - public String toString() { - return key; - } - } - - private class OutputHandler implements Closeable { - // Max single-line buffer before automatic wrap occurs. - private static final int MAX_LINE_WIDTH = 1024 * 4; - - private final SpillWriter buffer; - - // internal stream. - private final PrefixedWriter sint; - // stdout - private final PrefixedWriter sout; - // stderr - private final PrefixedWriter serr; - - // last used stream (so that we can flush it properly and prefixes are not screwed up). - private PrefixedWriter last; - - public OutputHandler() { - buffer = new SpillWriter(() -> { - try { - return Files.createTempFile(spillDir, "spill-", ".tmp"); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - }); - - Writer sink = buffer; - if (verboseMode) { - sink = new StdOutTeeWriter(buffer); - } - - sint = new PrefixedWriter(" > ", sink, MAX_LINE_WIDTH); - sout = new PrefixedWriter(" 1> ", sink, MAX_LINE_WIDTH); - serr = new PrefixedWriter(" 2> ", sink, MAX_LINE_WIDTH); - last = sint; - } - - public void write(TestOutputEvent event) { - write((event.getDestination() == TestOutputEvent.Destination.StdOut ? sout : serr), event.getMessage()); - } - - public void write(String message) { - write(sint, message); - } - - public long length() throws IOException { - return buffer.length(); - } - - private void write(PrefixedWriter out, String message) { - try { - if (out != last) { - last.completeLine(); - last = out; - } - out.write(message); - } catch (IOException e) { - throw new UncheckedIOException("Unable to write to test output.", e); - } - } - - public void copyTo(Writer out) throws IOException { - flush(); - buffer.copyTo(out); - } - - public void flush() throws IOException { - sout.completeLine(); - serr.completeLine(); - buffer.flush(); - } - - @Override - public void close() throws IOException { - buffer.close(); - } - } -} diff --git a/buildSrc/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java b/buildSrc/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java deleted file mode 100644 index 20a4c8524f6..00000000000 --- a/buildSrc/src/main/java/org/apache/lucene/gradle/StdOutTeeWriter.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.lucene.gradle; - -import java.io.IOException; -import java.io.PrintStream; -import java.io.Writer; - -class StdOutTeeWriter extends Writer { - private final Writer delegate; - private final PrintStream out = System.out; - - public StdOutTeeWriter(Writer delegate) { - this.delegate = delegate; - } - - @Override - public void write(int c) throws IOException { - delegate.write(c); - out.write(c); - } - - @Override - public void write(char[] cbuf) throws IOException { - delegate.write(cbuf); - out.print(cbuf); - } - - @Override - public void write(String str) throws IOException { - delegate.write(str); - out.print(str); - } - - @Override - public void write(String str, int off, int len) throws IOException { - delegate.write(str, off, len); - out.append(str, off, len); - } - - @Override - public Writer append(CharSequence csq) throws IOException { - delegate.append(csq); - out.append(csq); - return this; - } - - @Override - public Writer append(CharSequence csq, int start, int end) throws IOException { - delegate.append(csq, start, end); - out.append(csq, start, end); - return this; - } - - @Override - public Writer append(char c) throws IOException { - delegate.append(c); - out.append(c); - return this; - } - - @Override - public void write(char[] cbuf, int off, int len) throws IOException { - delegate.write(cbuf, off, len); - out.print(new String(cbuf, off, len)); - } - - @Override - public void flush() throws IOException { - delegate.flush(); - out.flush(); - } - - @Override - public void close() throws IOException { - delegate.close(); - // Don't close the actual output. - } -} diff --git a/dev-docs/FAQ.adoc b/dev-docs/FAQ.adoc index b25d81bd6a9..820e952b5f3 100644 --- a/dev-docs/FAQ.adoc +++ b/dev-docs/FAQ.adoc @@ -97,3 +97,7 @@ If you don't yet have an account, you have to ask for one in the 'users' or 'dev * http://fucit.org/solr-jenkins-reports/failure-report.html * https://ge.apache.org/scans/tests?search.relativeStartTime=P90D&search.rootProjectNames=solr* * https://lists.apache.org[Solr mailing list archives especially builds] + +=== How can I build the JavaDoc's and the Reference Guide? + +Run `./gradlew buildLocalSite` to build the Javadoc's and the Reference Guide site. diff --git a/dev-docs/apis.adoc b/dev-docs/apis.adoc index 9e710f7e0a4..fa0565a14d9 100644 --- a/dev-docs/apis.adoc +++ b/dev-docs/apis.adoc @@ -81,4 +81,3 @@ A good example for each of these steps can be seen in Solr's v2 "add-replica-pro While we've settled on JAX-RS as our framework for defining v2 APIs going forward, Solr still retains many v2 APIs that were written using an older homegrown framework. This framework defines APIs using annotations (e.g. `@EndPoint`) similar to those used by JAX-RS, but lacks the full range of features and 3rd-party tooling. We're in the process of migrating these API definitions to JAX-RS and hope to remove all support for this legacy framework in a future release. - diff --git a/dev-docs/asf-jenkins.adoc b/dev-docs/asf-jenkins.adoc new file mode 100644 index 00000000000..b3c59b6fd9e --- /dev/null +++ b/dev-docs/asf-jenkins.adoc @@ -0,0 +1,73 @@ += ASF Jenkins Setup +:toc: left + +The Solr project uses a Jenkins instance provided by the Apache Software Foundation ("ASF") for running tests, validation, etc. + +This file aims to document our [ASF Jenkins](https://ci-builds.apache.org/job/Solr/) usage and administration, to prevent it from becoming "tribal knowledge" understood by just a few. + +== Jobs + +We run a number of jobs on Jenkins, each validating an overlapping set of concerns: + +* `Solr-Artifacts-*` - daily jobs that run `./gradlew assemble` to ensure that build artifacts (except docker images) can be created successfully +* `Solr-Lint-*` - daily jobs that run static analysis (i.e. `precommit` and `check -x test`) on a branch +* `Solr-Test-*` - "hourly" jobs that run all (non-integration) tests (i.e. `./gradlew test`) +* `Solr-TestIntegration-*` - daily jobs that run project integration tests (i.e. `./gradlew integrationTests`) +* `Solr-Docker-Nightly-*` - daily jobs that `./gradlew testDocker dockerPush` to validate docker image packaging. Snapshot images are pushed to hub.docker.com +* `Solr-reference-guide-*` - daily jobs that build the Solr reference guide via `./gradlew checkSite` and push the resulting artifact to the staging/preview site `nightlies.apache.org` +* `Solr-Smoketest-*` - daily jobs that produce a snapshot release (via the `assembleRelease` task) and run the release smoketester + +Most jobs that validate particular build artifacts are run "daily", which is sufficient to prevent any large breaks from creeping into the build. +On the other hand, jobs that run tests are triggered "hourly" in order to squeeze as many test runs as possible out of our Jenkins hardware. +This is a necessary consequence of Solr's heavy use of randomization in its test-suite. +"Hourly" scheduling ensures that a test run is either currently running or in the build queue at all times, and enables us to get the maximum data points from our hardware. + +== Jenkins Agents + +All Solr jobs run on Jenkins agents marked with the 'solr' label. +Currently, this maps to two Jenkins agents: + +* `lucene-solr-1` - available at lucene1-us-west.apache.org +* `lucene-solr-2` - available (confusingly) at lucene-us-west.apache.org + +These agents are "project-specific" VMs shared by the Lucene and Solr projects. +That is: they are VMs requested by a project for their exclusive use. +(INFRA policy appears to be that each Apache project may request 1 dedicated VM; it's unclear how Solr ended up with 2.) + +Maintenance of these agent VMs falls into a bit of a gray area. +INFRA will still intervene when asked: to reboot nodes, to deploy OS upgrades, etc. +But some burden also falls on Lucene and Solr as project teams to monitor the the VMs and keep them healthy. + +=== Accessing Jenkins Agents + +With a few steps, Solr committers can access our project's Jenkins agent VMs via SSH to troubleshoot and resolve issues. + +1. Ensure your account on id.apache.org has an SSH key associated with it. +2. Ask INFRA to give your Apache ID SSH access to these boxes. (See [this JIRA ticket](https://issues.apache.org/jira/browse/INFRA-3682) for an example.) +3. SSH into the desired box with: `ssh @$HOSTNAME` (where `$HOSTNAME` is either `lucene1-us-west.apache.org` or `lucene-us-west.apache.org`) + +Often, SSH access on the boxes is not sufficient, and administrators require "root" access to diagnose and solve problems. +Sudo/su priveleges can be accessed via a one-time pad ("OTP") challenge, managed by the "Orthrus PAM" module. +Users in need of root access can perform the following steps: + +1. Open the ASF's [OTP Generator Tool](https://selfserve.apache.org/otp-calculator.html) in your browser of choice +2. Run `ortpasswd` on the machine. This will print out a OTP "challenge" (e.g. `otp-md5 497 lu6126`) and provide a password prompt. This password prompt should be given a OTP password, generated in steps 3-5 below. +3. Copy the "challenge" from the previous step into the relevant field on the "OTP Generator Tool" form. +4. Choose a password to use for OTP Challenges (or recall one you've used in the past), and type this into the relevant field on the "OTP Generator Tool" form. +5. Click "Compute", and copy the first line from the "Response" box into your SSH session's password prompt. You're now established in the "Orthrus PAM" system. +6. Run a command requesting `su` escalation (e.g. `sudo su -`). This should print another "challenge" and password prompt. Repeat steps 3-5. + +If this fails at any point, open a ticket with INFRA. +You may need to be added to the 'sudoers' file for the VM(s) in question. + +=== Known Jenkins Issues + +One recurring problem with the Jenkins agents is that they periodically run out of disk-space. +Usually this happens when enough "workspaces" are orphaned or left behind, consuming all of the agent's disk space. + +Solr Jenkins jobs are currently configured to clean up the previous workspace at the *start* of the subsequent run. +This avoids orphans in the common case but leaves workspaces behind any time a job is renamed or deleted (as happens during the Solr release process). + +Luckily, this has an easy fix: SSH into the agent VM and delete any workspaces no longer needed in `/home/jenkins/jenkins-slave/workspace/Solr`. +Any workspace that doesn't correspond to a [currently existing job](https://ci-builds.apache.org/job/Solr/) can be safely deleted. +(It may also be worth comparing the Lucene workspaces in `/home/jenkins/jenkins-slave/workspace/Lucene` to [that project's list of jobs](https://ci-builds.apache.org/job/Lucene/).) diff --git a/dev-docs/dependency-upgrades.adoc b/dev-docs/dependency-upgrades.adoc index 9f7372cc1bd..aa5cd93a2de 100644 --- a/dev-docs/dependency-upgrades.adoc +++ b/dev-docs/dependency-upgrades.adoc @@ -16,30 +16,57 @@ // specific language governing permissions and limitations // under the License. -Solr has lots of 3rd party dependencies, defined mainly in `versions.props`. +Solr has lots of 3rd party dependencies, defined in `gradle/libs.versions.toml`. Keeping them up-to-date is crucial for a number of reasons: * minimizing the risk of critical CVE vulnerabilities by staying on a recent and supported version * avoiding "dependency hell", that can arise from falling too far behind -Read the https://github.com/apache/solr/blob/main/help/dependencies.txt[help/dependencies.txt] file for an in-depth explanation of how gradle is deployed in Solr, using -https://github.com/palantir/gradle-consistent-versions[Gradle consistent-versions] plugin. +Read the https://github.com/apache/solr/blob/main/help/dependencies.txt[help/dependencies.txt] file for an in-depth +explanation of how dependencies are managed. == Manual dependency upgrades In order to upgrade a dependency, you need to run through a number of steps: 1. Identify the available versions from e.g. https://search.maven.org[Maven Central] -2. Update the version in `versions.props` file -3. Run `./gradlew --write-locks` to re-generate `versions.lock`. Note that this may cause a cascading effect where +2. Update the version in `gradle/libs.versions.toml` file +3. Run `./gradlew writeLocks` to re-generate `versions.lock`. Note that this may cause a cascading effect where the locked version of other dependencies also change. -4. Run `./gradlew updateLicenses` to re-generate SHA1 checksums of the new jar files. -5. Once in a while, a new version of a dependency will transitively bring in brand-new dependencies. +4. In case of a conflict, resolve the conflict according to `help/dependencies.txt` +5. Check if there are any constraints that are obsolete after the dependency update +6. Update the license and notice files of the changed dependencies. See `help/dependencies.txt` for + details. +7. Run `./gradlew updateLicenses` to re-generate SHA1 checksums of the new jar files. +8. Once in a while, a new version of a dependency will transitively bring in brand-new dependencies. You'll need to decide whether to keep or exclude them. See `help/dependencies.txt` for details. +=== Reviewing Constraints + +The constraints are defined in gradle/validation/dependencies.gradle. There, if the updated dependency is listed, +the constraint can be reviewed, updated or removed. + +The constraints fall into two "groups". In the first group there are dependency constraints from dependencies +that our project directly includes and require version alignment to sync the versions across all transitive +dependencies. In the second group are dependencies that are only present as transitive dependencies. +There, we try to follow the convention to provide additional information with "which dependencies use what version", +so that the next person reviewing the constraint does not have to look it up. However, this is quite time-consuming +to analyze the dependencies and therefore subject to change. + +In order to review a constraint, you have to check if the updated dependency is mentioned in any of the constraints, +either as a reason for another dependency constraint or as the constraint's dependency. Removing temporarily +a constraint, the task writeLocks will fail if the constraint is still required. + +This process and the constraints of dependencies.gradle are not optimal, as it is quite time-consuming and not obvious +by just looking at it. We just haven't found yet a more efficient way to maintain these constraints. + == Renovate bot Pull Requests + +The renovate bot may be replaced in the future with dependabot and this section may only be relevant for older +versions (<10.0). See https://lists.apache.org/thread/1sb9ttv3lp57z2yod1htx1fykp5sj73z for updates. + A member of the Solr community operates a Github bot running https://github.com/renovatebot/renovate[Renovate], which files Pull Requests to Solr with dependency upgrade proposals. The PRs are labeled `dependencies` and do include -changes resulting from `gradle --write-locks` and `updateLicenses`. +changes resulting from `./gradlew writeLocks` and `updateLicenses`. Community members and committers can then review, and if manual changes are needed, help bring the PR to completion. For many dependencies, a changelog is included in the PR text, which may help guide the upgrade decision. diff --git a/dev-docs/lucene-upgrade.md b/dev-docs/lucene-upgrade.md index c9ee1a27802..e5ab91e7c9f 100644 --- a/dev-docs/lucene-upgrade.md +++ b/dev-docs/lucene-upgrade.md @@ -27,7 +27,7 @@ Create a new branch locally e.g. `git checkout -b lucene940 -t origin/main` for ## Build -### `versions.props` update +### `gradle/libs.versions.toml` update ``` - org.apache.lucene:*=9.3.0 @@ -37,7 +37,7 @@ Create a new branch locally e.g. `git checkout -b lucene940 -t origin/main` for ### `versions.lock` update ``` -gradlew --write-locks +gradlew :writeLocks ``` ### `solr/licenses` update diff --git a/dev-docs/running-in-docker.adoc b/dev-docs/running-in-docker.adoc index 202d0379b1e..e1e96dac244 100644 --- a/dev-docs/running-in-docker.adoc +++ b/dev-docs/running-in-docker.adoc @@ -8,7 +8,7 @@ To run Solr in a container and expose the Solr port, run: In order to start Solr in cloud mode, run the following. -`docker run -p 8983:8983 solr solr-fg -c` +`docker run -p 8983:8983 solr solr-fg` For documentation on using the official docker builds, please refer to the https://hub.docker.com/_/solr[DockerHub page]. Up-to-date documentation for running locally built images of this branch can be found in the xref:_running_solr_in_docker[local reference guide]. @@ -30,4 +30,4 @@ For more info on building an image, run: `./gradlew helpDocker` == Additional Information -You can find additional information in the https://solr.apache.org/guide/solr/latest/deployment-guide/solr-in-docker.html[Solr Ref Guide Docker Page] \ No newline at end of file +You can find additional information in the https://solr.apache.org/guide/solr/latest/deployment-guide/solr-in-docker.html[Solr Ref Guide Docker Page] diff --git a/dev-docs/solr-source-code.adoc b/dev-docs/solr-source-code.adoc index ad73858a478..5874a2e06ec 100644 --- a/dev-docs/solr-source-code.adoc +++ b/dev-docs/solr-source-code.adoc @@ -2,10 +2,10 @@ ## Building Solr from Source -Download the Java 11 JDK (Java Development Kit) or later. +Download the Java 21 JDK (Java Development Kit) or later. We recommend the OpenJDK distribution Eclipse Temurin available from https://adoptium.net/. You will need the JDK installed, and the $JAVA_HOME/bin (Windows: %JAVA_HOME%\bin) folder included on your command path. -To test this, issue a "java -version" command from your shell (command prompt) and verify that the Java version is 11 or later. +To test this, issue a "java -version" command from your shell (command prompt) and verify that the Java version is 21 or later. See the xref:jvms.adoc[JVM developer doc] for more information on Gradle and JVMs. Clone the latest Apache Solr source code directly from the Git repository: . diff --git a/dev-docs/v2-api-conventions.adoc b/dev-docs/v2-api-conventions.adoc index d2159239694..74a760c2bfd 100644 --- a/dev-docs/v2-api-conventions.adoc +++ b/dev-docs/v2-api-conventions.adoc @@ -66,6 +66,14 @@ For use within the v2 API, the four "popular" HTTP methods have the following se * `PUT` - used for idempotent resource modifications. * `DELETE` - Used to delete or cleanup resource +== Errors + +v2 APIs should be consistent in how they report errors. Throwing a `SolrException` will convey +1.the error code as the HTTP response status code, as `responseHeader.status` and as `error.code`, and +1.the error message as `error.msg`. + +API calls that reference a specific resource (e.g. `specificCollName`, `specificAliasName`, `specificPropertyName` and others per the above list) that do not exist should return `SolrException.ErrorCode.NOT_FOUND` (HTTP 404). + == Exceptional Cases - "Command" APIs The pairing of semantic HTTP verbs and "resource"-based paths gives Solr an intuitive pattern for representing many operations, but not all. diff --git a/dev-tools/README.txt b/dev-tools/README.txt index db5a8e179b2..76ae0c52f3a 100644 --- a/dev-tools/README.txt +++ b/dev-tools/README.txt @@ -5,7 +5,6 @@ as to the usefulness of the tools. Description of dev-tools/ contents: -./missing-doclet -- JavaDoc validation doclet subproject ./doap/ -- Lucene and Solr project descriptors in DOAP RDF format. ./scripts/ -- Odds and ends for building releases, etc. ./test-patch/ -- Scripts for automatically validating patches diff --git a/dev-tools/doap/solr.rdf b/dev-tools/doap/solr.rdf index acfbd01dbf6..c3494226dde 100644 --- a/dev-tools/doap/solr.rdf +++ b/dev-tools/doap/solr.rdf @@ -152,6 +152,13 @@ 9.0.0 + + + solr-8.11.4 + 2024-09-24 + 8.11.4 + + solr-8.11.3 diff --git a/dev-tools/scripts/cloud.sh b/dev-tools/scripts/cloud.sh index 3ab979d5ac3..9824fb3eea7 100755 --- a/dev-tools/scripts/cloud.sh +++ b/dev-tools/scripts/cloud.sh @@ -334,7 +334,7 @@ start(){ echo "Final NUM_NODES is $NUM_NODES" for i in `seq 1 $NUM_NODES`; do mkdir -p "${CLUSTER_WD}/n${i}" - argsArray=(-c -s $CLUSTER_WD_FULL/n${i} -z localhost:${ZK_PORT}/solr_${SAFE_DEST} -p 898${i} -m $MEMORY \ + argsArray=(-c --solr-home $CLUSTER_WD_FULL/n${i} -z localhost:${ZK_PORT}/solr_${SAFE_DEST} -p 898${i} -m $MEMORY \ -a "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=500${i} \ -Dsolr.log.dir=$CLUSTER_WD_FULL/n${i} $JVM_ARGS") FINAL_COMMAND="${SOLR}/bin/solr ${argsArray[@]}" diff --git a/dev-tools/scripts/releaseWizard.yaml b/dev-tools/scripts/releaseWizard.yaml index 5de7067d331..a1ccdc5cf44 100644 --- a/dev-tools/scripts/releaseWizard.yaml +++ b/dev-tools/scripts/releaseWizard.yaml @@ -391,7 +391,6 @@ groups: title: Create a new minor branch off the stable branch description: | In our case we'll create {{ release_branch }}. - Also edit `.asf.yaml` to add the new branch under `protected_branches`. types: - major - minor @@ -417,15 +416,37 @@ groups: - !Command cmd: git push --set-upstream origin {{ release_branch }} tee: true - - !Command - cmd: "{{ editor }} .asf.yaml" - comment: | - Add the new branch {{ release_branch }} under `protected_branches` in `.asf.yaml`. An editor will open. - stdout: true - - !Command - cmd: git add .asf.yaml && git commit -m "Add branch protection for {{ release_branch }}" && git push - logfile: commit-branch-protection.log - tee: true + - !Todo + id: protect_branches + title: Protect the new git branch(es) + vars: + branch_names: "{{ release_branch }}{% if release_type == 'major' %} and {{ stable_branch }}{% endif %}" + description: > + Edit `.asf.yaml` to protect new git branch(es) {{ branch_names }}. + types: + - major + - minor + depends: create_minor_branch + commands: !Commands + root_folder: '{{ git_checkout_folder }}' + commands_text: Run these commands to edit `.asf.yaml` and protect the new branch(es) + commands: + - !Command + cmd: git checkout main + tee: true + - !Command + cmd: git pull --ff-only + tee: true + - !Command + cmd: "{{ editor }} .asf.yaml" + comment: > + Add the newly created branch(es) {{ branch_names }} + under `protected_branches` in `.asf.yaml`. An editor will open. + stdout: true + - !Command + cmd: git add .asf.yaml && git commit -m "Add branch protection for {{ branch_names }}" && git push + logfile: commit-branch-protection.log + tee: true - !Todo id: update_minor_branch_prerelease_antora title: Update Ref Guide Metadata for new Minor Branch @@ -1843,16 +1864,9 @@ groups: root_folder: '{{ git_checkout_folder }}' commands_text: | Run these commands to delete proposed versions from distribution directory. - Note, as long as we have some releases (7.x, 8.x) in Lucene dist repo and other - releases (9.0 ->) in the Solr dist repo, we may need to delete two places. - + WARNING: Validate that the proposal is correct! commands: - - !Command - cmd: | - svn rm -m "Stop publishing old Solr releases"{% for ver in mirrored_versions_to_delete %} https://dist.apache.org/repos/dist/release/lucene/solr/{{ ver }}{% endfor %} - logfile: svn-rm-solr.log - comment: Delete from Lucene dist area - !Command cmd: | svn rm -m "Stop publishing old Solr releases"{% for ver in mirrored_versions_to_delete %} https://dist.apache.org/repos/dist/release/solr/solr/{{ ver }}{% endfor %} diff --git a/dev-tools/scripts/smokeTestRelease.py b/dev-tools/scripts/smokeTestRelease.py index 4ab97eb1f09..acfa109f2fc 100755 --- a/dev-tools/scripts/smokeTestRelease.py +++ b/dev-tools/scripts/smokeTestRelease.py @@ -628,7 +628,7 @@ def verifyUnpacked(java, artifact, unpackPath, gitRevision, version, testArgs): expected_src_root_folders = ['buildSrc', 'dev-docs', 'dev-tools', 'gradle', 'help', 'solr'] expected_src_root_files = ['build.gradle', 'gradlew', 'gradlew.bat', 'settings.gradle', 'versions.lock', 'versions.props'] expected_src_solr_files = ['build.gradle'] - expected_src_solr_folders = ['benchmark', 'bin', 'modules', 'api', 'core', 'docker', 'documentation', 'example', 'licenses', 'packaging', 'distribution', 'prometheus-exporter', 'server', 'solr-ref-guide', 'solrj', 'solrj-streaming', 'solrj-zookeeper', 'test-framework', 'webapp', '.gitignore', '.gitattributes'] + expected_src_solr_folders = ['benchmark', 'bin', 'modules', 'api', 'core', 'cross-dc-manager', 'docker', 'documentation', 'example', 'licenses', 'packaging', 'distribution', 'prometheus-exporter', 'server', 'solr-ref-guide', 'solrj', 'solrj-streaming', 'solrj-zookeeper', 'test-framework', 'webapp', '.gitignore', '.gitattributes'] is_in_list(in_root_folder, expected_src_root_folders) is_in_list(in_root_folder, expected_src_root_files) is_in_list(in_solr_folder, expected_src_solr_folders) @@ -638,7 +638,7 @@ def verifyUnpacked(java, artifact, unpackPath, gitRevision, version, testArgs): elif isSlim: is_in_list(in_root_folder, ['bin', 'docker', 'docs', 'example', 'licenses', 'server', 'lib']) else: - is_in_list(in_root_folder, ['bin', 'modules', 'docker', 'prometheus-exporter', 'docs', 'example', 'licenses', 'server', 'lib']) + is_in_list(in_root_folder, ['bin', 'modules', 'cross-dc-manager', 'docker', 'prometheus-exporter', 'docs', 'example', 'licenses', 'server', 'lib']) if len(in_root_folder) > 0: raise RuntimeError('solr: unexpected files/dirs in artifact %s: %s' % (artifact, in_root_folder)) @@ -774,8 +774,6 @@ def testSolrExample(binaryDistPath, javaPath): raise RuntimeError('Failed to run the techproducts example, check log for previous errors.') os.chdir('example') - print(' test utf8...') - run('sh ./exampledocs/test_utf8.sh http://localhost:8983/solr/techproducts', 'utf8.log') print(' run query...') s = load('http://localhost:8983/solr/techproducts/select/?q=video') if s.find('"numFound":3,') == -1: diff --git a/gradle/testing/fail-on-unsupported-jdk.gradle b/gradle/conventions.gradle similarity index 66% rename from gradle/testing/fail-on-unsupported-jdk.gradle rename to gradle/conventions.gradle index 7d94b709764..fabc9b4cc58 100644 --- a/gradle/testing/fail-on-unsupported-jdk.gradle +++ b/gradle/conventions.gradle @@ -15,18 +15,19 @@ * limitations under the License. */ -configure(rootProject) { - task ensureJdkSupported() { - doFirst { - if (System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("mac") && rootProject.runtimeJavaVersion == JavaVersion.VERSION_20) { - throw new GradleException("Tests cannot be run with JDK20 on Mac; see SOLR-16733 for more details.") - } +configure(allprojects) { + tasks.register("tidy").configure { + description "Applies formatters and cleanups to sources." + group "verification" } - } +} - allprojects { - tasks.withType(Test) { - dependsOn ":ensureJdkSupported" +// Locate script-relative resource folder. This is context-sensitive so pass +// the right buildscript (top-level). +configure(rootProject) { + ext { + scriptResources = { buildscript -> + return file(buildscript.sourceFile.absolutePath.replaceAll('.gradle$', "")) + } } - } } diff --git a/gradle/documentation/changes-to-html.gradle b/gradle/documentation/changes-to-html.gradle index 3b4ca69bf9d..af9d1b5fa9a 100644 --- a/gradle/documentation/changes-to-html.gradle +++ b/gradle/documentation/changes-to-html.gradle @@ -76,6 +76,13 @@ class ChangesToHtmlTask extends DefaultTask { def toHtml(File versionsFile) { def output = new ByteArrayOutputStream() + + // Check if the perl executable exists + if (!perlExists()) { + logger.warn("WARNING: Perl is not installed, skipping creating Changes.html") + return + } + def result = project.exec { executable project.externalTool("perl") standardInput changesFile.newInputStream() @@ -114,4 +121,14 @@ class ChangesToHtmlTask extends DefaultTask { throw new GradleException("Changes file ${changesFile} or Doap file ${changesDoapFile} not found.") } } + + def perlExists() { + try { + def process = "perl -v".execute() + process.waitFor() + return process.exitValue() == 0 + } catch (Exception e) { + return false + } + } } diff --git a/gradle/documentation/markdown.gradle b/gradle/documentation/markdown.gradle index 29d23d87c75..d9a890d72eb 100644 --- a/gradle/documentation/markdown.gradle +++ b/gradle/documentation/markdown.gradle @@ -33,10 +33,10 @@ buildscript { } dependencies { - classpath "com.vladsch.flexmark:flexmark:${scriptDepVersions['flexmark']}" - classpath "com.vladsch.flexmark:flexmark-ext-abbreviation:${scriptDepVersions['flexmark']}" - classpath "com.vladsch.flexmark:flexmark-ext-attributes:${scriptDepVersions['flexmark']}" - classpath "com.vladsch.flexmark:flexmark-ext-autolink:${scriptDepVersions['flexmark']}" + classpath libs.flexmark.flexmark + classpath libs.flexmark.extensions.abbreviation + classpath libs.flexmark.extensions.attributes + classpath libs.flexmark.extensions.autolink } } diff --git a/gradle/documentation/pull-lucene-javadocs.gradle b/gradle/documentation/pull-lucene-javadocs.gradle index 5fdc4a70040..17985d88f1b 100644 --- a/gradle/documentation/pull-lucene-javadocs.gradle +++ b/gradle/documentation/pull-lucene-javadocs.gradle @@ -45,11 +45,11 @@ configure(project(":solr:documentation")) { // from all Solr javadocs?) then perhaps we can find a way to build this list programatically? // - If these javadocs are (only every) consumed by the ref guide only, then these deps & associated tasks // should just be moved to the ref-guide build.gradle - javadocs group: 'org.apache.lucene', name: 'lucene-core', classifier: 'javadoc' - javadocs group: 'org.apache.lucene', name: 'lucene-analysis-common', classifier: 'javadoc' - javadocs group: 'org.apache.lucene', name: 'lucene-analysis-stempel', classifier: 'javadoc' - javadocs group: 'org.apache.lucene', name: 'lucene-queryparser', classifier: 'javadoc' - javadocs group: 'org.apache.lucene', name: 'lucene-spatial-extras', classifier: 'javadoc' + javadocs variantOf(libs.apache.lucene.core) { classifier 'javadoc' } + javadocs variantOf(libs.apache.lucene.analysis.common) { classifier 'javadoc' } + javadocs variantOf(libs.apache.lucene.analysis.stempel) { classifier 'javadoc' } + javadocs variantOf(libs.apache.lucene.queryparser) { classifier 'javadoc' } + javadocs variantOf(libs.apache.lucene.spatialextras) { classifier 'javadoc' } } diff --git a/gradle/documentation/render-javadoc.gradle b/gradle/documentation/render-javadoc.gradle index bd90ad35426..6c637e540df 100644 --- a/gradle/documentation/render-javadoc.gradle +++ b/gradle/documentation/render-javadoc.gradle @@ -32,7 +32,7 @@ allprojects { missingdoclet "org.apache.solr.tools:missing-doclet" } - ext { + project.ext { relativeDocPath = project.path.replaceFirst(/:\w+:/, "").replace(':', '/') } diff --git a/gradle/generation/javacc.gradle b/gradle/generation/javacc.gradle index 54fc7e91359..0b70ba656ee 100644 --- a/gradle/generation/javacc.gradle +++ b/gradle/generation/javacc.gradle @@ -26,7 +26,7 @@ configure(rootProject) { } dependencies { - javacc "net.java.dev.javacc:javacc:${scriptDepVersions['javacc']}" + javacc libs.javacc.javacc } task javacc() { diff --git a/gradle/globals.gradle b/gradle/globals.gradle index 30eaa0857ab..d8a99de69c2 100644 --- a/gradle/globals.gradle +++ b/gradle/globals.gradle @@ -37,7 +37,7 @@ allprojects { // so :solr:core will have solr-core.jar, etc. project.archivesBaseName = project.path.replaceAll("^:", "").replace(':', '-') - ext { + project.ext { // Utility method to support passing overrides via -P or -D. propertyOrDefault = { propName, defValue -> def result @@ -173,5 +173,6 @@ allprojects { // Assign different java version for client-side modules 'api' and 'solrj*' var isSolrJ = project.name.matches("^(solrj.*|api)\$") minJavaVersion = isSolrJ ? rootProject.minJavaVersionSolrJ : rootProject.minJavaVersionDefault + minJavaTestVersion = rootProject.minJavaVersionDefault } } diff --git a/gradle/ide/eclipse.gradle b/gradle/ide/eclipse.gradle index a088c8b87a5..d7d453c39d9 100644 --- a/gradle/ide/eclipse.gradle +++ b/gradle/ide/eclipse.gradle @@ -21,65 +21,68 @@ import org.gradle.plugins.ide.eclipse.model.ClasspathEntry def resources = scriptResources(buildscript) configure(rootProject) { - apply plugin: "eclipse" + plugins.withType(JavaPlugin) { + apply plugin: "eclipse" - def relativize = { other -> rootProject.rootDir.relativePath(other).toString() } + def eclipseJavaVersion = propertyOrDefault("eclipse.javaVersion", libs.versions.java.min.get()) + def relativize = { other -> rootProject.rootDir.relativePath(other).toString() } - eclipse { - project { - name = "Apache Solr ${version}" - } + eclipse { + project { + name = "Apache Solr ${version}" + } - classpath { - downloadSources = true - downloadJavadoc = true - defaultOutputDir = file('build/eclipse') + classpath { + downloadSources = true + downloadJavadoc = true + defaultOutputDir = file('build/eclipse') - file { - beforeMerged { classpath -> classpath.entries.removeAll { it.kind == "src" } } + file { + beforeMerged { classpath -> classpath.entries.removeAll { it.kind == "src" } } - whenMerged { classpath -> - def projects = allprojects.findAll { prj -> - return prj.plugins.hasPlugin(JavaPlugin) && - prj.path != ":solr:solr-ref-guide" - } - - Set sources = [] - Set jars = [] - projects.each { prj -> - prj.sourceSets.each { sourceSet -> - sources += sourceSet.java.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) } - sources += sourceSet.resources.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) } + whenMerged { classpath -> + def projects = allprojects.findAll { prj -> + return prj.plugins.hasPlugin(JavaPlugin) && + prj.path != ":solr:solr-ref-guide" } - // This is hacky - we take the resolved compile classpath and just - // include JAR files from there. We should probably make it smarter - // by looking at real dependencies. But then: this Eclipse configuration - // doesn't really separate sources anyway so why bother. - jars += prj.configurations.compileClasspath.resolve() - jars += prj.configurations.testCompileClasspath.resolve() - } + Set sources = [] + Set jars = [] + projects.each { prj -> + prj.sourceSets.each { sourceSet -> + sources += sourceSet.java.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) } + sources += sourceSet.resources.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) } + } + + // This is hacky - we take the resolved compile classpath and just + // include JAR files from there. We should probably make it smarter + // by looking at real dependencies. But then: this Eclipse configuration + // doesn't really separate sources anyway so why bother. + jars += prj.configurations.compileClasspath.resolve() + jars += prj.configurations.testCompileClasspath.resolve() + } - classpath.entries += sources.sort().collect {name -> new SourceFolder(name, "build/eclipse/" + name) } - classpath.entries += jars.unique().findAll { location -> location.isFile() }.collect { location -> - new LibEntry(location.toString()) + classpath.entries += sources.sort().collect {name -> new SourceFolder(name, "build/eclipse/" + name) } + classpath.entries += jars.unique().findAll { location -> location.isFile() }.collect { location -> + new LibEntry(location.toString()) + } } } } } jdt { - sourceCompatibility = rootProject.minJavaVersionDefault - targetCompatibility = rootProject.minJavaVersionDefault - javaRuntimeName = "JavaSE-${rootProject.minJavaVersionDefault}" + sourceCompatibility = eclipseJavaVersion + targetCompatibility = eclipseJavaVersion + javaRuntimeName = "JavaSE-${eclipseJavaVersion}" } - } - eclipseJdt { - doLast { - project.sync { - from rootProject.file("${resources}/dot.settings") - into rootProject.file(".settings") + eclipseJdt { + doLast { + project.sync { + from rootProject.file("${resources}/dot.settings") + into rootProject.file(".settings") + } } } } diff --git a/gradle/java/javac.gradle b/gradle/java/javac.gradle index 53320cc01c0..ec33f977c64 100644 --- a/gradle/java/javac.gradle +++ b/gradle/java/javac.gradle @@ -19,14 +19,19 @@ allprojects { plugins.withType(JavaPlugin) { - sourceCompatibility = project.minJavaVersion - targetCompatibility = project.minJavaVersion - - // Use 'release' flag instead of 'source' and 'target' - tasks.withType(JavaCompile) { - options.compilerArgs += ["--release", project.minJavaVersion.toString()] - } - + // Use 'release' flag instead of 'source' and 'target' + tasks.withType(JavaCompile) { + compileTestJava { + sourceCompatibility = project.minJavaTestVersion + targetCompatibility = project.minJavaTestVersion + options.compilerArgs += ["--release", project.minJavaTestVersion.toString()] + } + compileJava { + sourceCompatibility = project.minJavaVersion + targetCompatibility = project.minJavaVersion + options.compilerArgs += ["--release", project.minJavaVersion.toString()] + } + } // Configure warnings. tasks.withType(JavaCompile) { options.encoding = "UTF-8" @@ -51,22 +56,12 @@ allprojects { "-Xdoclint:all/protected", "-Xdoclint:-missing", "-Xdoclint:-accessibility", + "-Xlint:synchronization", + "-Xlint:text-blocks", "-proc:none", // proc:none was added because of LOG4J2-1925 / JDK-8186647 + "-Xlint:removal" ] - // enable some warnings only relevant to newer language features - if (rootProject.runtimeJavaVersion >= JavaVersion.VERSION_15) { - options.compilerArgs += [ - "-Xlint:text-blocks", - ] - } - - if (rootProject.runtimeJavaVersion >= JavaVersion.VERSION_16) { - options.compilerArgs += [ - "-Xlint:synchronization", - ] - } - if (propertyOrDefault("javac.failOnWarnings", true).toBoolean()) { options.compilerArgs += "-Werror" } diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml new file mode 100644 index 00000000000..685fe012216 --- /dev/null +++ b/gradle/libs.versions.toml @@ -0,0 +1,459 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +[versions] +adobe-testing-s3mock = "2.17.0" +amazon-awssdk = "2.26.19" +# @keep Antora version used in ref-guide +antora = "3.1.4" +# @keep Most recent commit as of 2022-06-24, this repo does not have tags +antora-default-ui = "51ad811622394027afb4e182c2fdabc235ae04dd" +# @keep Antora Lunr extensions version used in ref-guide +antora-lunr-extension = "1.0.0-alpha.8" +apache-calcite = "1.37.0" +apache-calcite-avatica = "1.25.0" +apache-commons-collections4 = "4.4" +apache-commons-compress = "1.26.1" +apache-commons-configuration2 = "2.11.0" +apache-commons-exec = "1.4.0" +apache-commons-lang3 = "3.15.0" +apache-commons-math3 = "3.6.1" +# @keep for version alignment +apache-commons-text = "1.12.0" +apache-curator = "5.7.1" +apache-hadoop = "3.4.0" +apache-hadoop-thirdparty = "1.2.0" +apache-httpcomponents-httpclient = "4.5.14" +apache-httpcomponents-httpcore = "4.4.16" +apache-httpcomponents-httpmime = "4.5.14" +apache-kafka = "3.7.1" +apache-log4j = "2.21.0" +apache-lucene = "9.12.1" +apache-opennlp = "1.9.4" +apache-poi = "5.2.2" +apache-rat = "0.15" +apache-tika = "1.28.5" +apache-tomcat = "6.0.53" +apache-zookeeper = "3.9.2" +# @keep for version alignment +apiguardian = "1.1.2" +aqute-bnd = "6.4.1" +# @keep Asciidoctor mathjax version used in ref-guide +asciidoctor-mathjax = "0.0.9" +# @keep Asciidoctor tabs version used in ref-guide +asciidoctor-tabs = "1.0.0-beta.6" +# @keep bats-assert (node) version used in packaging +bats-assert = "2.0.0" +# @keep bats-core (node) version used in packaging +bats-core = "1.8.2" +# @keep bats-file (node) version used in packaging +bats-file = "0.3.0" +bc-jose4j = "0.9.6" +benmanes-caffeine = "3.1.8" +benmanes-versions = "0.51.0" +bouncycastle = "1.78.1" +# @keep Browserify version used in ref-guide +browserify = "17.0.0" +carrot2-core = "4.5.1" +carrotsearch-dependencychecks = "0.0.9" +carrotsearch-hppc = "0.10.0" +carrotsearch-randomizedtesting = "2.8.1" +# @keep for version alignment +checkerframework = "3.44.0" +codehaus-woodstox = "4.2.2" +commons-cli = "1.9.0" +commons-codec = "1.17.1" +commons-collections = "3.2.2" +commons-io = "2.15.1" +cutterslade-analyze = "1.10.0" +cybozulabs-langdetect = "1.1-20120112" +diffplug-spotless = "6.5.2" +dropwizard-metrics = "4.2.26" +eclipse-ecj = "3.39.0" +eclipse-jetty = "10.0.22" +eclipse-jettytoolchain = "4.0.6" +# @keep jgit version used by git-status.gradle +eclipse-jgit = "6.7.0.202309050840-r" +fasterxml = "2.18.0" +fasterxml-woodstox = "7.0.0" +# @keep Flexmark used in classpath +flexmark = "0.64.8" +google-api-gax = "2.33.0" +# @keep for version alignment +google-api-grpc-proto = "2.41.0" +google-auth = "1.19.0" +# @keep for version alignment +google-autovalue = "1.10.4" +google-cloud-bom = "0.224.0" +google-cloud-core = "2.23.0" +google-cloud-nio = "0.127.3" +google-cloud-storage = "2.27.0" +google-errorprone = "2.31.0" +# @keep for version alignment +google-failureaccess = "1.0.2" +# @keep for version alignment +google-gson = "2.11.0" +google-guava = "33.1.0-jre" +# @keep for version alignment +google-j2objc = "3.0.0" +# @keep This is GJF version for spotless/ tidy. +google-javaformat = "1.18.1" +# @keep for version alignment +google-protobuf = "3.25.3" +google-re2j = "1.7" +# @keep Gradle version to run the build +gradle = "8.10" +grpc = "1.65.1" +# @keep Gulp version used in ref-guide +gulp-cli = "2.3.0" +hamcrest = "3.0" +hk2 = "3.1.1" +hsqldb = "2.7.2" +ibm-icu = "74.2" +immutables-valueannotations = "2.10.1" +j256-simplemagic = "1.17" +jakarta-annotation = "2.1.1" +jakarta-inject = "2.0.1" +jakarta-ws = "3.1.0" +# @keep This is the minimum required Java version for the project. +java-min = "21" +# @keep This is the minimum required Java version for SolrJ. +java-solrj = "17" +javacc = "7.0.12" +# @keep for version alignment +jaxb = "2.3.8" +jayway-jsonpath = "2.9.0" +jctools = "4.0.5" +jersey = "3.1.9" +# TODO Sync with jersey versions +jersey-containers = "2.39.1" +# @keep for version alignment +joda-time = "2.8.1" +junit = "4.13.2" +langchain4j = "0.35.0" +# @keep Link checker version used in ref-guide +link-checker = "1.4.2" +littlerobots-versioncatalogupdate = "0.8.4" +lmax-disruptor = "3.4.4" +ltgt-errorprone = "3.1.0" +mockito = "5.12.0" +morethan-jmhreport = "0.9.0" +navsecurity = "0.5.10" +netty = "4.1.114.Final" +# @keep for version alignment +netty-tcnative = "2.0.66.Final" +nimbusds-josejwt = "9.30.2" +nodegradle-node = "7.0.1" +# @keep Node JS version used in node.gradle (LTS) +nodejs = "16.20.2" +openapi = "7.6.0" +openjdk-jmh = "1.37" +opentelemetry = "1.40.0" +osgi-annotation = "8.1.0" +# @keep for version alignment +ow2-asm = "9.3" +owasp-dependencycheck = "9.0.8" +# @keep for version alignment +perfmark = "0.27.0" +prometheus-metrics = "1.1.0" +prometheus-simpleclient = "0.16.0" +quicktheories = "0.26" +semver4j = "5.3.0" +slf4j = "2.0.13" +spatial4j = "0.8" +spotbugs = "4.8.6" +squareup-okhttp3-mockwebserver = "4.11.0" +squareup-okhttp3-okhttp = "4.12.0" +stephenc-jcip = "1.0-1" +swagger3 = "2.2.22" +tdunning-tdigest = "3.3" +thetaphi-forbiddenapis = "3.7" +thisptr-jacksonjq = "0.0.13" +threeten-bp = "1.6.8" +undercouch-download = "5.5.0" +xerces = "2.12.2" +xerial-snappy = "1.1.10.5" + +[plugins] +benmanes-versions = { id = "com.github.ben-manes.versions", version.ref = "benmanes-versions" } +carrotsearch-dependencychecks = { id = "com.carrotsearch.gradle.dependencychecks", version.ref = "carrotsearch-dependencychecks" } +cutterslade-analyze = { id = "ca.cutterslade.analyze", version.ref = "cutterslade-analyze" } +diffplug-spotless = { id = "com.diffplug.spotless", version.ref = "diffplug-spotless" } +littlerobots-versioncatalogupdate = { id = "nl.littlerobots.version-catalog-update", version.ref = "littlerobots-versioncatalogupdate" } +ltgt-errorprone = { id = "net.ltgt.errorprone", version.ref = "ltgt-errorprone" } +morethan-jmhreport = { id = "io.morethan.jmhreport", version.ref = "morethan-jmhreport" } +nodegradle-node = { id = "com.github.node-gradle.node", version.ref = "nodegradle-node" } +openapi-generator = { id = "org.openapi.generator", version.ref = "openapi" } +owasp-dependencycheck = { id = "org.owasp.dependencycheck", version.ref = "owasp-dependencycheck" } +swagger3-core = { id = "io.swagger.core.v3.swagger-gradle-plugin", version.ref = "swagger3" } +thetaphi-forbiddenapis = { id = "de.thetaphi.forbiddenapis", version.ref = "thetaphi-forbiddenapis" } +undercouch-download = { id = "de.undercouch.download", version.ref = "undercouch-download" } + +[libraries] +adobe-testing-s3mock-junit4 = { module = "com.adobe.testing:s3mock-junit4", version.ref = "adobe-testing-s3mock" } +adobe-testing-s3mock-testsupportcommon = { module = "com.adobe.testing:s3mock-testsupport-common", version.ref = "adobe-testing-s3mock" } +amazon-awssdk-apacheclient = { module = "software.amazon.awssdk:apache-client", version.ref = "amazon-awssdk" } +amazon-awssdk-auth = { module = "software.amazon.awssdk:auth", version.ref = "amazon-awssdk" } +amazon-awssdk-awscore = { module = "software.amazon.awssdk:aws-core", version.ref = "amazon-awssdk" } +amazon-awssdk-bom = { module = "software.amazon.awssdk:bom", version.ref = "amazon-awssdk" } +amazon-awssdk-httpclient-spi = { module = "software.amazon.awssdk:http-client-spi", version.ref = "amazon-awssdk" } +amazon-awssdk-profiles = { module = "software.amazon.awssdk:profiles", version.ref = "amazon-awssdk" } +amazon-awssdk-regions = { module = "software.amazon.awssdk:regions", version.ref = "amazon-awssdk" } +amazon-awssdk-s3 = { module = "software.amazon.awssdk:s3", version.ref = "amazon-awssdk" } +amazon-awssdk-sdkcore = { module = "software.amazon.awssdk:sdk-core", version.ref = "amazon-awssdk" } +amazon-awssdk-sts = { module = "software.amazon.awssdk:sts", version.ref = "amazon-awssdk" } +apache-calcite-avatica-core = { module = "org.apache.calcite.avatica:avatica-core", version.ref = "apache-calcite-avatica" } +apache-calcite-core = { module = "org.apache.calcite:calcite-core", version.ref = "apache-calcite" } +apache-calcite-linq4j = { module = "org.apache.calcite:calcite-linq4j", version.ref = "apache-calcite" } +apache-commons-collections4 = { module = "org.apache.commons:commons-collections4", version.ref = "apache-commons-collections4" } +apache-commons-compress = { module = "org.apache.commons:commons-compress", version.ref = "apache-commons-compress" } +apache-commons-configuration2 = { module = "org.apache.commons:commons-configuration2", version.ref = "apache-commons-configuration2" } +apache-commons-exec = { module = "org.apache.commons:commons-exec", version.ref = "apache-commons-exec" } +apache-commons-lang3 = { module = "org.apache.commons:commons-lang3", version.ref = "apache-commons-lang3" } +apache-commons-math3 = { module = "org.apache.commons:commons-math3", version.ref = "apache-commons-math3" } +# @keep transitive dependency for version alignment +apache-commons-text = { module = "org.apache.commons:commons-text", version.ref = "apache-commons-text" } +apache-curator-client = { module = "org.apache.curator:curator-client", version.ref = "apache-curator" } +apache-curator-framework = { module = "org.apache.curator:curator-framework", version.ref = "apache-curator" } +apache-curator-recipes = { module = "org.apache.curator:curator-recipes", version.ref = "apache-curator" } +apache-curator-test = { module = "org.apache.curator:curator-test", version.ref = "apache-curator" } +apache-hadoop-client-api = { module = "org.apache.hadoop:hadoop-client-api", version.ref = "apache-hadoop" } +apache-hadoop-client-minicluster = { module = "org.apache.hadoop:hadoop-client-minicluster", version.ref = "apache-hadoop" } +apache-hadoop-client-runtime = { module = "org.apache.hadoop:hadoop-client-runtime", version.ref = "apache-hadoop" } +apache-hadoop-hdfs = { module = "org.apache.hadoop:hadoop-hdfs", version.ref = "apache-hadoop" } +apache-hadoop-thirdparty-shadedguava = { module = "org.apache.hadoop.thirdparty:hadoop-shaded-guava", version.ref = "apache-hadoop-thirdparty" } +apache-httpcomponents-httpclient = { module = "org.apache.httpcomponents:httpclient", version.ref = "apache-httpcomponents-httpclient" } +apache-httpcomponents-httpcore = { module = "org.apache.httpcomponents:httpcore", version.ref = "apache-httpcomponents-httpcore" } +apache-httpcomponents-httpmime = { module = "org.apache.httpcomponents:httpmime", version.ref = "apache-httpcomponents-httpmime" } +apache-kafka-clients = { module = "org.apache.kafka:kafka-clients", version.ref = "apache-kafka" } +apache-kafka-kafka213 = { module = "org.apache.kafka:kafka_2.13", version.ref = "apache-kafka" } +apache-kafka-server-common = { module = "org.apache.kafka:kafka-server-common", version.ref = "apache-kafka" } +apache-kafka-streams = { module = "org.apache.kafka:kafka-streams", version.ref = "apache-kafka" } +apache-log4j-api = { module = "org.apache.logging.log4j:log4j-api", version.ref = "apache-log4j" } +apache-log4j-core = { module = "org.apache.logging.log4j:log4j-core", version.ref = "apache-log4j" } +apache-log4j-jul = { module = "org.apache.logging.log4j:log4j-jul", version.ref = "apache-log4j" } +apache-log4j-layout-templatejson = { module = "org.apache.logging.log4j:log4j-layout-template-json", version.ref = "apache-log4j" } +apache-log4j-slf4j2impl = { module = "org.apache.logging.log4j:log4j-slf4j2-impl", version.ref = "apache-log4j" } +apache-log4j-web = { module = "org.apache.logging.log4j:log4j-web", version.ref = "apache-log4j" } +apache-log4j1-api = { module = "org.apache.logging.log4j:log4j-1.2-api", version.ref = "apache-log4j" } +apache-lucene-analysis-common = { module = "org.apache.lucene:lucene-analysis-common", version.ref = "apache-lucene" } +apache-lucene-analysis-icu = { module = "org.apache.lucene:lucene-analysis-icu", version.ref = "apache-lucene" } +apache-lucene-analysis-kuromoji = { module = "org.apache.lucene:lucene-analysis-kuromoji", version.ref = "apache-lucene" } +apache-lucene-analysis-morfologik = { module = "org.apache.lucene:lucene-analysis-morfologik", version.ref = "apache-lucene" } +apache-lucene-analysis-nori = { module = "org.apache.lucene:lucene-analysis-nori", version.ref = "apache-lucene" } +apache-lucene-analysis-opennlp = { module = "org.apache.lucene:lucene-analysis-opennlp", version.ref = "apache-lucene" } +apache-lucene-analysis-phonetic = { module = "org.apache.lucene:lucene-analysis-phonetic", version.ref = "apache-lucene" } +apache-lucene-analysis-smartcn = { module = "org.apache.lucene:lucene-analysis-smartcn", version.ref = "apache-lucene" } +apache-lucene-analysis-stempel = { module = "org.apache.lucene:lucene-analysis-stempel", version.ref = "apache-lucene" } +apache-lucene-backward-codecs = { module = "org.apache.lucene:lucene-backward-codecs", version.ref = "apache-lucene" } +apache-lucene-classification = { module = "org.apache.lucene:lucene-classification", version.ref = "apache-lucene" } +apache-lucene-codecs = { module = "org.apache.lucene:lucene-codecs", version.ref = "apache-lucene" } +apache-lucene-core = { module = "org.apache.lucene:lucene-core", version.ref = "apache-lucene" } +apache-lucene-expressions = { module = "org.apache.lucene:lucene-expressions", version.ref = "apache-lucene" } +apache-lucene-grouping = { module = "org.apache.lucene:lucene-grouping", version.ref = "apache-lucene" } +apache-lucene-highlighter = { module = "org.apache.lucene:lucene-highlighter", version.ref = "apache-lucene" } +apache-lucene-join = { module = "org.apache.lucene:lucene-join", version.ref = "apache-lucene" } +apache-lucene-misc = { module = "org.apache.lucene:lucene-misc", version.ref = "apache-lucene" } +apache-lucene-queries = { module = "org.apache.lucene:lucene-queries", version.ref = "apache-lucene" } +apache-lucene-queryparser = { module = "org.apache.lucene:lucene-queryparser", version.ref = "apache-lucene" } +apache-lucene-spatialextras = { module = "org.apache.lucene:lucene-spatial-extras", version.ref = "apache-lucene" } +apache-lucene-suggest = { module = "org.apache.lucene:lucene-suggest", version.ref = "apache-lucene" } +apache-lucene-testframework = { module = "org.apache.lucene:lucene-test-framework", version.ref = "apache-lucene" } +apache-opennlp-tools = { module = "org.apache.opennlp:opennlp-tools", version.ref = "apache-opennlp" } +apache-poi-ooxml = { module = "org.apache.poi:poi-ooxml", version.ref = "apache-poi" } +apache-poi-poi = { module = "org.apache.poi:poi", version.ref = "apache-poi" } +apache-rat-rat = { module = "org.apache.rat:apache-rat", version.ref = "apache-rat" } +apache-tika-core = { module = "org.apache.tika:tika-core", version.ref = "apache-tika" } +apache-tika-parsers = { module = "org.apache.tika:tika-parsers", version.ref = "apache-tika" } +apache-tomcat-annotationsapi = { module = "org.apache.tomcat:annotations-api", version.ref = "apache-tomcat" } +apache-zookeeper-jute = { module = "org.apache.zookeeper:zookeeper-jute", version.ref = "apache-zookeeper" } +apache-zookeeper-zookeeper = { module = "org.apache.zookeeper:zookeeper", version.ref = "apache-zookeeper" } +# @keep transitive dependency for version alignment +apiguardian-api = { module = "org.apiguardian:apiguardian-api", version.ref = "apiguardian" } +aqute-bnd-annotation = { module = "biz.aQute.bnd:biz.aQute.bnd.annotation", version.ref = "aqute-bnd" } +bc-jose4j = { module = "org.bitbucket.b_c:jose4j", version.ref = "bc-jose4j" } +benmanes-caffeine = { module = "com.github.ben-manes.caffeine:caffeine", version.ref = "benmanes-caffeine" } +bouncycastle-bcpkix = { module = "org.bouncycastle:bcpkix-jdk18on", version.ref = "bouncycastle" } +bouncycastle-bcprov = { module = "org.bouncycastle:bcprov-jdk18on", version.ref = "bouncycastle" } +carrot2-core = { module = "org.carrot2:carrot2-core", version.ref = "carrot2-core" } +carrotsearch-hppc = { module = "com.carrotsearch:hppc", version.ref = "carrotsearch-hppc" } +carrotsearch-randomizedtesting-runner = { module = "com.carrotsearch.randomizedtesting:randomizedtesting-runner", version.ref = "carrotsearch-randomizedtesting" } +# @keep transitive dependency for version alignment +checkerframework-qual = { module = "org.checkerframework:checker-qual", version.ref = "checkerframework" } +codehaus-woodstox-stax2api = { module = "org.codehaus.woodstox:stax2-api", version.ref = "codehaus-woodstox" } +commonscli-commonscli = { module = "commons-cli:commons-cli", version.ref = "commons-cli" } +commonscodec-commonscodec = { module = "commons-codec:commons-codec", version.ref = "commons-codec" } +commonscollections-commonscollections = { module = "commons-collections:commons-collections", version.ref = "commons-collections" } +commonsio-commonsio = { module = "commons-io:commons-io", version.ref = "commons-io" } +cybozulabs-langdetect = { module = "com.cybozu.labs:langdetect", version.ref = "cybozulabs-langdetect" } +dropwizard-metrics-core = { module = "io.dropwizard.metrics:metrics-core", version.ref = "dropwizard-metrics" } +dropwizard-metrics-graphite = { module = "io.dropwizard.metrics:metrics-graphite", version.ref = "dropwizard-metrics" } +dropwizard-metrics-jetty10 = { module = "io.dropwizard.metrics:metrics-jetty10", version.ref = "dropwizard-metrics" } +dropwizard-metrics-jmx = { module = "io.dropwizard.metrics:metrics-jmx", version.ref = "dropwizard-metrics" } +dropwizard-metrics-jvm = { module = "io.dropwizard.metrics:metrics-jvm", version.ref = "dropwizard-metrics" } +dropwizard-metrics-servlets = { module = "io.dropwizard.metrics:metrics-servlets", version.ref = "dropwizard-metrics" } +eclipse-jdt-ecj = { module = "org.eclipse.jdt:ecj", version.ref = "eclipse-ecj" } +eclipse-jetty-alpnjavaclient = { module = "org.eclipse.jetty:jetty-alpn-java-client", version.ref = "eclipse-jetty" } +eclipse-jetty-alpnjavaserver = { module = "org.eclipse.jetty:jetty-alpn-java-server", version.ref = "eclipse-jetty" } +eclipse-jetty-alpnserver = { module = "org.eclipse.jetty:jetty-alpn-server", version.ref = "eclipse-jetty" } +eclipse-jetty-client = { module = "org.eclipse.jetty:jetty-client", version.ref = "eclipse-jetty" } +eclipse-jetty-deploy = { module = "org.eclipse.jetty:jetty-deploy", version.ref = "eclipse-jetty" } +eclipse-jetty-http = { module = "org.eclipse.jetty:jetty-http", version.ref = "eclipse-jetty" } +eclipse-jetty-http2-client = { module = "org.eclipse.jetty.http2:http2-client", version.ref = "eclipse-jetty" } +eclipse-jetty-http2-common = { module = "org.eclipse.jetty.http2:http2-common", version.ref = "eclipse-jetty" } +eclipse-jetty-http2-hpack = { module = "org.eclipse.jetty.http2:http2-hpack", version.ref = "eclipse-jetty" } +eclipse-jetty-http2-httpclienttransport = { module = "org.eclipse.jetty.http2:http2-http-client-transport", version.ref = "eclipse-jetty" } +eclipse-jetty-http2-server = { module = "org.eclipse.jetty.http2:http2-server", version.ref = "eclipse-jetty" } +eclipse-jetty-io = { module = "org.eclipse.jetty:jetty-io", version.ref = "eclipse-jetty" } +eclipse-jetty-jmx = { module = "org.eclipse.jetty:jetty-jmx", version.ref = "eclipse-jetty" } +eclipse-jetty-rewrite = { module = "org.eclipse.jetty:jetty-rewrite", version.ref = "eclipse-jetty" } +eclipse-jetty-security = { module = "org.eclipse.jetty:jetty-security", version.ref = "eclipse-jetty" } +eclipse-jetty-server = { module = "org.eclipse.jetty:jetty-server", version.ref = "eclipse-jetty" } +eclipse-jetty-servlet = { module = "org.eclipse.jetty:jetty-servlet", version.ref = "eclipse-jetty" } +eclipse-jetty-servlets = { module = "org.eclipse.jetty:jetty-servlets", version.ref = "eclipse-jetty" } +eclipse-jetty-start = { module = "org.eclipse.jetty:jetty-start", version.ref = "eclipse-jetty" } +eclipse-jetty-toolchain-servletapi = { module = "org.eclipse.jetty.toolchain:jetty-servlet-api", version.ref = "eclipse-jettytoolchain" } +eclipse-jetty-util = { module = "org.eclipse.jetty:jetty-util", version.ref = "eclipse-jetty" } +eclipse-jetty-webapp = { module = "org.eclipse.jetty:jetty-webapp", version.ref = "eclipse-jetty" } +eclipse-jetty-xml = { module = "org.eclipse.jetty:jetty-xml", version.ref = "eclipse-jetty" } +eclipse-jgit-jgit = { module = "org.eclipse.jgit:org.eclipse.jgit", version.ref = "eclipse-jgit" } +fasterxml-jackson-bom = { module = "com.fasterxml.jackson:jackson-bom", version.ref = "fasterxml" } +fasterxml-jackson-core-annotations = { module = "com.fasterxml.jackson.core:jackson-annotations", version.ref = "fasterxml" } +fasterxml-jackson-core-core = { module = "com.fasterxml.jackson.core:jackson-core", version.ref = "fasterxml" } +fasterxml-jackson-core-databind = { module = "com.fasterxml.jackson.core:jackson-databind", version.ref = "fasterxml" } +fasterxml-jackson-dataformat-cbor = { module = "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor", version.ref = "fasterxml" } +fasterxml-jackson-dataformat-smile = { module = "com.fasterxml.jackson.dataformat:jackson-dataformat-smile", version.ref = "fasterxml" } +fasterxml-woodstox-core = { module = "com.fasterxml.woodstox:woodstox-core", version.ref = "fasterxml-woodstox" } +flexmark-extensions-abbreviation = { module = "com.vladsch.flexmark:flexmark-ext-abbreviation", version.ref = "flexmark" } +flexmark-extensions-attributes = { module = "com.vladsch.flexmark:flexmark-ext-attributes", version.ref = "flexmark" } +flexmark-extensions-autolink = { module = "com.vladsch.flexmark:flexmark-ext-autolink", version.ref = "flexmark" } +flexmark-flexmark = { module = "com.vladsch.flexmark:flexmark", version.ref = "flexmark" } +google-api-gax = { module = "com.google.api:gax", version.ref = "google-api-gax" } +# @keep transitive dependency for version alignment +google-api-grpc-proto = { module = "com.google.api.grpc:proto-google-common-protos", version.ref = "google-api-grpc-proto" } +google-auth-credentials = { module = "com.google.auth:google-auth-library-credentials", version.ref = "google-auth" } +google-auth-oauth2http = { module = "com.google.auth:google-auth-library-oauth2-http", version.ref = "google-auth" } +# @keep transitive dependency for version alignment +google-autovalue-annotations = { module = "com.google.auto.value:auto-value-annotations", version.ref = "google-autovalue" } +google-cloud-bom = { module = "com.google.cloud:google-cloud-bom", version.ref = "google-cloud-bom" } +google-cloud-core = { module = "com.google.cloud:google-cloud-core", version.ref = "google-cloud-core" } +google-cloud-corehttp = { module = "com.google.cloud:google-cloud-core-http", version.ref = "google-cloud-core" } +google-cloud-nio = { module = "com.google.cloud:google-cloud-nio", version.ref = "google-cloud-nio" } +google-cloud-storage = { module = "com.google.cloud:google-cloud-storage", version.ref = "google-cloud-storage" } +# @keep transitive dependency for version alignment +google-errorprone-annotations = { module = "com.google.errorprone:error_prone_annotations", version.ref = "google-errorprone" } +google-errorprone-core = { module = "com.google.errorprone:error_prone_core", version.ref = "google-errorprone" } +# @keep transitive dependency for version alignment +google-gson = { module = "com.google.code.gson:gson", version.ref = "google-gson" } +google-guava = { module = "com.google.guava:guava", version.ref = "google-guava" } +# @keep transitive dependency for version alignment +google-j2objc-annotations = { module = "com.google.j2objc:j2objc-annotations", version.ref = "google-j2objc" } +# @keep transitive dependency for version alignment +google-protobuf-java = { module = "com.google.protobuf:protobuf-java", version.ref = "google-protobuf" } +google-protobuf-javautils = { module = "com.google.protobuf:protobuf-java-util", version.ref = "google-protobuf" } +google-re2j = { module = "com.google.re2j:re2j", version.ref = "google-re2j" } +# @keep transitive dependency for version alignment +grpc-api = { module = "io.grpc:grpc-api", version.ref = "grpc" } +# @keep transitive dependency for version alignment +grpc-bom = { module = "io.grpc:grpc-bom", version.ref = "grpc" } +grpc-context = { module = "io.grpc:grpc-context", version.ref = "grpc" } +# @keep transitive dependency for version alignment +grpc-core = { module = "io.grpc:grpc-core", version.ref = "grpc" } +grpc-netty = { module = "io.grpc:grpc-netty", version.ref = "grpc" } +grpc-protobuf = { module = "io.grpc:grpc-protobuf", version.ref = "grpc" } +# @keep transitive dependency for version alignment +grpc-protobuf-lite = { module = "io.grpc:grpc-protobuf-lite", version.ref = "grpc" } +grpc-stub = { module = "io.grpc:grpc-stub", version.ref = "grpc" } +# @keep transitive dependency for version alignment +grpc-util = { module = "io.grpc:grpc-util", version.ref = "grpc" } +hamcrest-hamcrest = { module = "org.hamcrest:hamcrest", version.ref = "hamcrest" } +hk2-api = { module = "org.glassfish.hk2:hk2-api", version.ref = "hk2" } +# @keep transitive dependency for version alignment +hk2-locator = { module = "org.glassfish.hk2:hk2-locator", version.ref = "hk2" } +hsqldb-hsqldb = { module = "org.hsqldb:hsqldb", version.ref = "hsqldb" } +ibm-icu-icu4j = { module = "com.ibm.icu:icu4j", version.ref = "ibm-icu" } +immutables-valueannotations = { module = "org.immutables:value-annotations", version.ref = "immutables-valueannotations" } +j256-simplemagic = { module = "com.j256.simplemagic:simplemagic", version.ref = "j256-simplemagic" } +jakarta-annotation-api = { module = "jakarta.annotation:jakarta.annotation-api", version.ref = "jakarta-annotation" } +jakarta-inject-api = { module = "jakarta.inject:jakarta.inject-api", version.ref = "jakarta-inject" } +jakarta-ws-rsapi = { module = "jakarta.ws.rs:jakarta.ws.rs-api", version.ref = "jakarta-ws" } +javacc-javacc = { module = "net.java.dev.javacc:javacc", version.ref = "javacc" } +# @keep transitive dependency for version alignment +jaxb-runtime = { module = "org.glassfish.jaxb:jaxb-runtime", version.ref = "jaxb" } +jayway-jsonpath = { module = "com.jayway.jsonpath:json-path", version.ref = "jayway-jsonpath" } +jctools-core = { module = "org.jctools:jctools-core", version.ref = "jctools" } +jersey-containers-jettyhttp = { module = "org.glassfish.jersey.containers:jersey-container-jetty-http", version.ref = "jersey-containers" } +jersey-core-common = { module = "org.glassfish.jersey.core:jersey-common", version.ref = "jersey" } +jersey-core-server = { module = "org.glassfish.jersey.core:jersey-server", version.ref = "jersey" } +jersey-inject-hk2 = { module = "org.glassfish.jersey.inject:jersey-hk2", version.ref = "jersey" } +jersey-media-jsonjackson = { module = "org.glassfish.jersey.media:jersey-media-json-jackson", version.ref = "jersey" } +# @keep transitive dependency for version alignment +jodatime-jodatime = { module = "joda-time:joda-time", version.ref = "joda-time" } +junit-junit = { module = "junit:junit", version.ref = "junit" } +langchain4j-cohere = { module = "dev.langchain4j:langchain4j-cohere", version.ref = "langchain4j" } +langchain4j-core = { module = "dev.langchain4j:langchain4j-core", version.ref = "langchain4j" } +langchain4j-hugging-face = { module = "dev.langchain4j:langchain4j-hugging-face", version.ref = "langchain4j" } +langchain4j-mistral-ai = { module = "dev.langchain4j:langchain4j-mistral-ai", version.ref = "langchain4j" } +langchain4j-open-ai = { module = "dev.langchain4j:langchain4j-open-ai", version.ref = "langchain4j" } +lmax-disruptor = { module = "com.lmax:disruptor", version.ref = "lmax-disruptor" } +locationtech-spatial4j = { module = "org.locationtech.spatial4j:spatial4j", version.ref = "spatial4j" } +mockito-core = { module = "org.mockito:mockito-core", version.ref = "mockito" } +mockito-subclass = { module = "org.mockito:mockito-subclass", version.ref = "mockito" } +navsecurity-mockoauth2server = { module = "no.nav.security:mock-oauth2-server", version.ref = "navsecurity" } +netty-bom = { module = "io.netty:netty-bom", version.ref = "netty" } +netty-codechttp = { module = "io.netty:netty-codec-http", version.ref = "netty" } +# @keep transitive dependency for version alignment +netty-handler = { module = "io.netty:netty-handler", version.ref = "netty" } +# @keep transitive dependency for version alignment +netty-tcnative-boringssl = { module = "io.netty:netty-tcnative-boringssl-static", version.ref = "netty-tcnative" } +netty-tcnative-classes = { module = "io.netty:netty-tcnative-classes", version.ref = "netty-tcnative" } +# @keep transitive dependency for version alignment +netty-transport-classes-epoll = { module = "io.netty:netty-transport-classes-epoll", version.ref = "netty" } +netty-transport-native-epoll = { module = "io.netty:netty-transport-native-epoll", version.ref = "netty" } +nimbusds-josejwt = { module = "com.nimbusds:nimbus-jose-jwt", version.ref = "nimbusds-josejwt" } +openjdk-jmh-core = { module = "org.openjdk.jmh:jmh-core", version.ref = "openjdk-jmh" } +openjdk-jmh-generatorannprocess = { module = "org.openjdk.jmh:jmh-generator-annprocess", version.ref = "openjdk-jmh" } +opentelemetry-api = { module = "io.opentelemetry:opentelemetry-api", version.ref = "opentelemetry" } +opentelemetry-bom = { module = "io.opentelemetry:opentelemetry-bom", version.ref = "opentelemetry" } +opentelemetry-context = { module = "io.opentelemetry:opentelemetry-context", version.ref = "opentelemetry" } +opentelemetry-exporter-otlp = { module = "io.opentelemetry:opentelemetry-exporter-otlp", version.ref = "opentelemetry" } +opentelemetry-sdk = { module = "io.opentelemetry:opentelemetry-sdk", version.ref = "opentelemetry" } +opentelemetry-sdkextension-autoconfigure = { module = "io.opentelemetry:opentelemetry-sdk-extension-autoconfigure", version.ref = "opentelemetry" } +opentelemetry-sdktesting = { module = "io.opentelemetry:opentelemetry-sdk-testing", version.ref = "opentelemetry" } +opentelemetry-sdktrace = { module = "io.opentelemetry:opentelemetry-sdk-trace", version.ref = "opentelemetry" } +osgi-annotation = { module = "org.osgi:osgi.annotation", version.ref = "osgi-annotation" } +# @keep transitive dependency for version alignment +ow2-asm = { module = "org.ow2.asm:asm", version.ref = "ow2-asm" } +# @keep transitive dependency for version alignment +perfmark-api = { module = "io.perfmark:perfmark-api", version.ref = "perfmark" } +prometheus-metrics-expositionformats = { module = "io.prometheus:prometheus-metrics-exposition-formats", version.ref = "prometheus-metrics" } +prometheus-metrics-model = { module = "io.prometheus:prometheus-metrics-model", version.ref = "prometheus-metrics" } +prometheus-simpleclient = { module = "io.prometheus:simpleclient", version.ref = "prometheus-simpleclient" } +prometheus-simpleclient-httpserver = { module = "io.prometheus:simpleclient_httpserver", version.ref = "prometheus-simpleclient" } +quicktheories-quicktheories = { module = "org.quicktheories:quicktheories", version.ref = "quicktheories" } +semver4j-semver4j = { module = "org.semver4j:semver4j", version.ref = "semver4j" } +slf4j-api = { module = "org.slf4j:slf4j-api", version.ref = "slf4j" } +slf4j-jcloverslf4j = { module = "org.slf4j:jcl-over-slf4j", version.ref = "slf4j" } +slf4j-jultoslf4j = { module = "org.slf4j:jul-to-slf4j", version.ref = "slf4j" } +spotbugs-annotations = { module = "com.github.spotbugs:spotbugs-annotations", version.ref = "spotbugs" } +squareup-okhttp3-mockwebserver = { module = "com.squareup.okhttp3:mockwebserver", version.ref = "squareup-okhttp3-mockwebserver" } +squareup-okhttp3-okhttp = { module = "com.squareup.okhttp3:okhttp", version.ref = "squareup-okhttp3-okhttp" } +stephenc-jcip-annotations = { module = "com.github.stephenc.jcip:jcip-annotations", version.ref = "stephenc-jcip" } +swagger3-annotations-jakarta = { module = "io.swagger.core.v3:swagger-annotations-jakarta", version.ref = "swagger3" } +swagger3-jaxrs2-jakarta = { module = "io.swagger.core.v3:swagger-jaxrs2-jakarta", version.ref = "swagger3" } +tdunning-tdigest = { module = "com.tdunning:t-digest", version.ref = "tdunning-tdigest" } +thisptr-jacksonjq = { module = "net.thisptr:jackson-jq", version.ref = "thisptr-jacksonjq" } +threeten-bp = { module = "org.threeten:threetenbp", version.ref = "threeten-bp" } +xerces-impl = { module = "xerces:xercesImpl", version.ref = "xerces" } +xerial-snappy-java = { module = "org.xerial.snappy:snappy-java", version.ref = "xerial-snappy" } diff --git a/gradle/lucene-dev/lucene-dev-repo-composite.gradle b/gradle/lucene-dev/lucene-dev-repo-composite.gradle index d612b29fe70..62274b32e6c 100644 --- a/gradle/lucene-dev/lucene-dev-repo-composite.gradle +++ b/gradle/lucene-dev/lucene-dev-repo-composite.gradle @@ -104,7 +104,7 @@ if (luceneDevRepo != null) { // We substitute the exact version of Lucene we currently have in versions.props across all the dependencies. // We can't just substitute all references without looking at the versoin because // plugin dependencies then also get substituted and everything crashes. - String luceneVersion = (file("${rootDir}/versions.props").getText("UTF-8") =~ /org.apache.lucene:\*=(.+)/)[0][1] + String luceneVersion = libs.versions.apache.lucene.get() logger.lifecycle("Local Lucene development repository will be used substituting ${luceneVersion}: ${luceneDevRepo}") // Include Lucene repository as a composite and substitute module names. diff --git a/gradle/maven/defaults-maven.gradle b/gradle/maven/defaults-maven.gradle index 96e82dcc1c4..3239c04acfc 100644 --- a/gradle/maven/defaults-maven.gradle +++ b/gradle/maven/defaults-maven.gradle @@ -25,6 +25,7 @@ configure(rootProject) { ext { published = [ ":solr:api", + ":solr:cross-dc-manager", ":solr:core", ":solr:solrj", ":solr:solrj-streaming", @@ -150,17 +151,6 @@ configure(subprojects.findAll { it.path in rootProject.published }) { prj -> artifact javadocJar pom(configurePom) - - pom({ - // LUCENE-9561: - // Remove dependencyManagement section created by a combination of - // Palantir and the publishing plugin. - // - // https://github.com/palantir/gradle-consistent-versions/issues/550 - withXml { - asNode().dependencyManagement.replaceNode {} - } - }) } } } diff --git a/gradle/node.gradle b/gradle/node.gradle index 3da3a51d40e..d585ab5f871 100644 --- a/gradle/node.gradle +++ b/gradle/node.gradle @@ -16,7 +16,7 @@ */ configure([project(":solr:packaging"), project(":solr:solr-ref-guide"), project(":solr:webapp")]) { - apply plugin: "com.github.node-gradle.node" + apply plugin: libs.plugins.nodegradle.node.get().pluginId def npmRegistry = "${ -> propertyOrEnvOrDefault("solr.npm.registry", "SOLR_NPM_REGISTRY", '') }" if (!npmRegistry.isEmpty()) { @@ -34,14 +34,14 @@ configure([project(":solr:packaging"), project(":solr:solr-ref-guide"), project( } } - ext { + project.ext { rootNodeDir = "$rootDir/.gradle/node" nodeProjectDir = file("$rootNodeDir/$project.name") } node { download = true - version = "16.20.2" // LTS + version = libs.versions.nodejs.get() def nodeDistUrl = "${ -> propertyOrEnvOrDefault("solr.node.distUrl", "SOLR_NODE_DIST_URL", '') }" if (!nodeDistUrl.isEmpty()) { diff --git a/gradle/solr/packaging.gradle b/gradle/solr/packaging.gradle index bb3fd5703ab..1b5325f908b 100644 --- a/gradle/solr/packaging.gradle +++ b/gradle/solr/packaging.gradle @@ -36,11 +36,11 @@ // I don't know how to untie these two cleanly. // -configure(allprojects.findAll {project -> project.path.startsWith(":solr:modules:") || project.path.startsWith(":solr:prometheus-exporter") }) { +configure(allprojects.findAll {project -> project.path.startsWith(":solr:modules:") || project.path == ":solr:prometheus-exporter" || project.path == ":solr:cross-dc-manager" }) { plugins.withType(JavaPlugin) { - ext { + project.ext { packagingDir = file("${buildDir}/packaging") - if (project.path.startsWith(":solr:prometheus-exporter")) { + if (project.path.startsWith(":solr:prometheus-exporter") || project.path.startsWith(":solr:cross-dc-manager")) { deps = packagingDir } else { deps = file("${packagingDir}/${project.name}") @@ -62,6 +62,12 @@ configure(allprojects.findAll {project -> project.path.startsWith(":solr:modules solrPlatformLibs project(":solr:solrj-zookeeper") // libExt has logging libs, which we don't want. Lets users decide what they want. solrPlatformLibs project(path: ":solr:server", configuration: 'libExt') + + // The cross-dc-manager uses the cross-dc Solr module libraries as well as the Jetty server jars + if (project.path == ":solr:cross-dc-manager") { + solrPlatformLibs project(":solr:modules:cross-dc") + solrPlatformLibs project(path: ":solr:server", configuration: 'serverLib') + } } // An aggregate that configures lib and test-lib in a temporary location. diff --git a/gradle/template.gradle.properties b/gradle/template.gradle.properties index c52e3048a77..79b18753f43 100644 --- a/gradle/template.gradle.properties +++ b/gradle/template.gradle.properties @@ -49,6 +49,11 @@ # tests.minheapsize=512m # tests.jvmargs=-XX:+UseParallelGC -XX:TieredStopAtLevel=1 -XX:ActiveProcessorCount=1 # +# If you want tests to produce an html report (which intellij provides a clickable link for +# at the end of a failed build) set this to true, defaults to false to save a few seconds. +# +# tests.html=false +# ################# # Gradle Daemon # ################# @@ -98,5 +103,8 @@ org.gradle.workers.max=@MAX_WORKERS@ # Maximum number of test JVMs forked per test task. tests.jvms=@TEST_JVMS@ +# By default skip html generation +tests.html=false + # Disable auto JVM provisioning (we don't use toolchains yet but want no surprises). org.gradle.java.installations.auto-download=false diff --git a/gradle/testing/alternative-jdk-support.gradle b/gradle/testing/alternative-jdk-support.gradle index 72cdabdab4b..97e5311ee09 100644 --- a/gradle/testing/alternative-jdk-support.gradle +++ b/gradle/testing/alternative-jdk-support.gradle @@ -50,7 +50,7 @@ if (jvmGradle != jvmCurrent) { doFirst { def jvmInfo = { JavaInfo javaInfo -> - JvmInstallationMetadata jvmMetadata = jvmDetector.getMetadata(new InstallationLocation(javaInfo.javaHome, "specific path")) + JvmInstallationMetadata jvmMetadata = jvmDetector.getMetadata(InstallationLocation.userDefined(javaInfo.javaHome, "specific path")) return "${jvmMetadata.languageVersion} (${jvmMetadata.displayName} ${jvmMetadata.runtimeVersion}, home at: ${jvmMetadata.javaHome})" } @@ -87,6 +87,6 @@ if (jvmGradle != jvmCurrent) { // Set up root project's properties. rootProject.ext.runtimeJavaHome = jvmCurrent.javaHome -rootProject.ext.runtimeJavaVersion = jvmDetector.getMetadata(new InstallationLocation(jvmCurrent.javaHome, "specific path")).getLanguageVersion() +rootProject.ext.runtimeJavaVersion = jvmDetector.getMetadata(InstallationLocation.userDefined(jvmCurrent.javaHome, "specific path")).getLanguageVersion() rootProject.ext.usesAltJvm = (jvmGradle != jvmCurrent); diff --git a/gradle/testing/beasting.gradle b/gradle/testing/beasting.gradle index 8934100ec10..67c20140ba8 100644 --- a/gradle/testing/beasting.gradle +++ b/gradle/testing/beasting.gradle @@ -27,7 +27,7 @@ def beastingMode = gradle.startParameter.taskNames.any{ name -> name == 'beast' allprojects { plugins.withType(JavaPlugin) { - ext { + project.ext { testOptions += [ [propName: 'tests.dups', value: 0, description: "Reiterate runs of entire test suites ('beast' task)."] ] diff --git a/gradle/testing/defaults-tests.gradle b/gradle/testing/defaults-tests.gradle index d291ca85a40..9241720e8c3 100644 --- a/gradle/testing/defaults-tests.gradle +++ b/gradle/testing/defaults-tests.gradle @@ -18,7 +18,6 @@ import org.apache.tools.ant.taskdefs.condition.Os import org.apache.tools.ant.types.Commandline import org.gradle.api.tasks.testing.logging.* -import org.apache.lucene.gradle.ErrorReportingTestListener def resources = scriptResources(buildscript) def verboseModeHookInstalled = false @@ -112,12 +111,6 @@ allprojects { ignoreFailures = resolvedTestOption("tests.haltonfailure").toBoolean() == false jvmArgs Commandline.translateCommandline(resolvedTestOption("tests.jvmargs")) - - // Up to JDK-15 we have to enforce --illegal-access=deny, because we want no code to access - // JDK internals; JDK-16 and later will default to deny, see https://openjdk.java.net/jeps/396: - if (rootProject.runtimeJavaVersion < JavaVersion.VERSION_16) { - jvmArgs '--illegal-access=deny' - } def loggingConfigFile = layout.projectDirectory.file("${resources}/logging.properties") def tempDir = layout.projectDirectory.dir(testsTmpDir.toString()) @@ -154,7 +147,7 @@ allprojects { } // Disable HTML report generation. The reports are big and slow to generate. - reports.html.required = false + reports.html.required = Boolean.parseBoolean(providers.gradleProperty("tests.html").getOrElse("false")) // Set up logging. testLogging { @@ -173,7 +166,7 @@ allprojects { } def spillDir = getTemporaryDir().toPath() - def listener = new ErrorReportingTestListener(test.testLogging, spillDir, testOutputsDir.toPath(), verboseMode) + def listener = buildinfra.newErrorReportingTestListener(test.testLogging, spillDir, testOutputsDir.toPath(), verboseMode) addTestOutputListener(listener) addTestListener(listener) diff --git a/gradle/testing/failed-tests-at-end.gradle b/gradle/testing/failed-tests-at-end.gradle index 5bffe9c9926..5b3381751d4 100644 --- a/gradle/testing/failed-tests-at-end.gradle +++ b/gradle/testing/failed-tests-at-end.gradle @@ -15,8 +15,6 @@ * limitations under the License. */ -import org.apache.lucene.gradle.ErrorReportingTestListener - // Display all failed tests at the end of the build. def failedTests = new LinkedHashSet() // for dedupe due to weird afterTest classMethod issue @@ -29,7 +27,7 @@ def genFailInfo(def task, TestDescriptor desc) { historyUrl += "&tests.test=$desc.name" historyUrl += " http://fucit.org/solr-jenkins-reports/history-trend-of-recent-failures.html#series/$name" } - def logName = ErrorReportingTestListener.getOutputLogName(desc.parent ?: desc) + def logName = buildinfra.getOutputLogName(desc.parent ?: desc) def output = file("${task.testOutputsDir}/${logName}") def repro = "./gradlew ${task.project.path}:test --tests \"${name}\" ${task.project.testOptionsForReproduceLine}" return ["name": name, "project": "${task.project.path}", "historyUrl": historyUrl, "output": output, "reproduce": repro] diff --git a/gradle/testing/profiling.gradle b/gradle/testing/profiling.gradle index 34b3efe59fa..8b1e5147efc 100644 --- a/gradle/testing/profiling.gradle +++ b/gradle/testing/profiling.gradle @@ -15,13 +15,11 @@ * limitations under the License. */ -import org.apache.lucene.gradle.ProfileResults; - def recordings = files() allprojects { plugins.withType(JavaPlugin) { - ext { + project.ext { testOptions += [ [propName: 'tests.profile', value: false, description: "Enable java flight recorder profiling."] ] @@ -48,7 +46,7 @@ allprojects { gradle.buildFinished { if (!recordings.isEmpty()) { - ProfileResults.printReport(recordings.getFiles().collect { it.toString() }, + buildinfra.profileResultsClass().printReport(recordings.getFiles().collect { it.toString() }, propertyOrDefault(ProfileResults.MODE_KEY, ProfileResults.MODE_DEFAULT) as String, Integer.parseInt(propertyOrDefault(ProfileResults.STACKSIZE_KEY, ProfileResults.STACKSIZE_DEFAULT)), Integer.parseInt(propertyOrDefault(ProfileResults.COUNT_KEY, ProfileResults.COUNT_DEFAULT)), diff --git a/gradle/testing/randomization.gradle b/gradle/testing/randomization.gradle index 9c809fc69e9..d3ae962c144 100644 --- a/gradle/testing/randomization.gradle +++ b/gradle/testing/randomization.gradle @@ -30,7 +30,7 @@ buildscript { } dependencies { - classpath 'com.carrotsearch.randomizedtesting:randomizedtesting-runner:2.7.9' + classpath libs.carrotsearch.randomizedtesting.runner } } @@ -79,7 +79,7 @@ allprojects { // Configure test property defaults and their descriptions. allprojects { plugins.withType(JavaPlugin) { - ext { + project.ext { testOptions += [ // seed, repetition and amplification. [propName: 'tests.seed', value: { -> rootSeed }, description: "Sets the master randomization seed."], @@ -124,7 +124,7 @@ allprojects { // Add Solr-specific test configs settings. configure(allprojects.findAll {project -> project.path.startsWith(":solr") }) { plugins.withType(JavaPlugin) { - ext { + project.ext { testOptions += [ [propName: 'tests.src.home', value: null, description: "See SOLR-14023."], [propName: 'solr.tests.use.numeric.points', value: null, description: "Point implementation to use (true=numerics, false=trie)."], @@ -137,14 +137,14 @@ configure(allprojects.findAll {project -> project.path.startsWith(":solr") }) { allprojects { plugins.withType(JavaPlugin) { afterEvaluate { - ext.testOptionsResolved = testOptions.findAll { opt -> + project.ext.testOptionsResolved = testOptions.findAll { opt -> propertyOrDefault(opt.propName, opt.value) != null }.collectEntries { opt -> [(opt.propName): Objects.toString(resolvedTestOption(opt.propName))] } // Compute the "reproduce with" string. - ext.testOptionsForReproduceLine = testOptions.findAll { opt -> + project.ext.testOptionsForReproduceLine = testOptions.findAll { opt -> if (opt["includeInReproLine"] == false) { return false } @@ -199,7 +199,12 @@ allprojects { // Enable security manager, if requested. We could move the selection of security manager and security policy // to each project's build/ configuration but it seems compact enough to keep it here for now. - if (Boolean.parseBoolean(testOptionsResolved["tests.useSecurityManager"])) { + def useSecurityManager = Boolean.parseBoolean(testOptionsResolved["tests.useSecurityManager"]); + // Allow the project to override this + if (project.ext.has("useSecurityManager")) { + useSecurityManager = project.ext.get("useSecurityManager") + } + if (useSecurityManager) { def commonSolrDir = project(':solr').layout.projectDirectory def javaSecurityPolicy = layout.projectDirectory.file("${resources}/policies/solr-tests.policy") jvmArgumentProviders.add( diff --git a/gradle/testing/randomization/policies/solr-tests.policy b/gradle/testing/randomization/policies/solr-tests.policy index dae3f218ec3..4d61f7985c0 100644 --- a/gradle/testing/randomization/policies/solr-tests.policy +++ b/gradle/testing/randomization/policies/solr-tests.policy @@ -50,12 +50,17 @@ grant { permission java.net.SocketPermission "127.0.0.1:4", "connect,resolve"; permission java.net.SocketPermission "127.0.0.1:6", "connect,resolve"; permission java.net.SocketPermission "127.0.0.1:8", "connect,resolve"; + // Used as an invalid ZK host + permission java.net.SocketPermission "----------:33332", "connect,resolve"; // Basic permissions needed for Lucene to work: permission java.util.PropertyPermission "*", "read,write"; // needed by randomizedtesting runner to identify test methods. permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.dev.langchain4j.model.cohere"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.dev.ai4j.openai4j"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.dev.langchain4j.model.huggingface"; permission java.lang.RuntimePermission "accessDeclaredMembers"; // needed by certain tests to redirect sysout/syserr: permission java.lang.RuntimePermission "setIO"; @@ -85,10 +90,12 @@ grant { // needed by bytebuddy permission java.lang.RuntimePermission "defineClass"; permission java.lang.RuntimePermission "net.bytebuddy.createJavaDispatcher"; + permission java.lang.RuntimePermission "net.bytebuddy.agent.getInstrumentation"; permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.description.method"; permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.description.type"; permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.dynamic.loading"; permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.utility"; + // needed by mockito permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect"; permission java.lang.RuntimePermission "reflectionFactoryAccess"; @@ -96,7 +103,7 @@ grant { permission java.lang.RuntimePermission "closeClassLoader"; // needed by HttpSolrClient permission java.lang.RuntimePermission "getFileSystemAttributes"; - // needed by hadoop auth (TODO: there is a cleaner way to handle this) + // needed by hadoop hdfs (TODO: there is a cleaner way to handle this) permission java.lang.RuntimePermission "loadLibrary.jaas"; permission java.lang.RuntimePermission "loadLibrary.jaas_unix"; permission java.lang.RuntimePermission "loadLibrary.jaas_nt"; @@ -107,6 +114,8 @@ grant { permission java.lang.RuntimePermission "writeFileDescriptor"; // needed by hadoop http permission java.lang.RuntimePermission "getProtectionDomain"; + // SolrProcessMgr to list processes + permission java.lang.RuntimePermission "manageProcess"; // These two *have* to be spelled out a separate permission java.lang.management.ManagementPermission "control"; @@ -129,17 +138,19 @@ grant { permission javax.management.MBeanServerPermission "findMBeanServer"; permission javax.management.MBeanServerPermission "releaseMBeanServer"; permission javax.management.MBeanTrustPermission "register"; - - // needed by hadoop auth + + // needed by hadoop hdfs permission javax.security.auth.AuthPermission "getSubject"; permission javax.security.auth.AuthPermission "modifyPrincipals"; permission javax.security.auth.AuthPermission "doAs"; - permission javax.security.auth.AuthPermission "getLoginConfiguration"; - permission javax.security.auth.AuthPermission "setLoginConfiguration"; permission javax.security.auth.AuthPermission "modifyPrivateCredentials"; permission javax.security.auth.AuthPermission "modifyPublicCredentials"; permission javax.security.auth.PrivateCredentialPermission "org.apache.hadoop.security.Credentials * \"*\"", "read"; + // needed by crossdc + permission javax.security.auth.AuthPermission "getLoginConfiguration"; + permission javax.security.auth.AuthPermission "setLoginConfiguration"; + // needed by hadoop security permission java.security.SecurityPermission "putProviderProperty.SaslPlainServer"; permission java.security.SecurityPermission "insertProvider"; @@ -153,6 +164,12 @@ grant { // needed by s3mock permission java.lang.RuntimePermission "accessClassInPackage.sun.nio.fs"; + // needed for kafka mockito + permission java.lang.RuntimePermission "manageProcess"; + permission java.io.FilePermission "${/}proc${/}self${/}io", "read"; + permission java.io.FilePermission "${java.home}${/}bin${/}java", "execute"; + permission java.io.FilePermission "${java.home}${/}bin${/}java.exe", "execute"; + // SSL related properties for Solr tests permission javax.net.ssl.SSLPermission "setDefaultSSLContext"; @@ -242,6 +259,11 @@ grant { // expanded to a wildcard if set, allows all networking everywhere permission java.net.SocketPermission "${solr.internal.network.permission}", "accept,listen,connect,resolve"; + + // Run java + permission java.io.FilePermission "${java.home}${/}-", "execute"; + // Required by SolrProcessManager on Windows to find Solr processes, used by StatusTool (CLI) + permission java.io.FilePermission "<>", "execute"; }; // Grant all permissions to Gradle test runner classes. diff --git a/gradle/testing/slowest-tests-at-end.gradle b/gradle/testing/slowest-tests-at-end.gradle index eaf9cd1a2f1..d24e523394d 100644 --- a/gradle/testing/slowest-tests-at-end.gradle +++ b/gradle/testing/slowest-tests-at-end.gradle @@ -22,7 +22,7 @@ def allSuites = [] allprojects { plugins.withType(JavaPlugin) { - ext { + project.ext { testOptions += [ [propName: 'tests.slowestTests', value: true, description: "Print the summary of the slowest tests."], [propName: 'tests.slowestSuites', value: true, description: "Print the summary of the slowest suites."] diff --git a/gradle/validation/check-environment.gradle b/gradle/validation/check-environment.gradle index d9ea66b694e..f56e9fa4e78 100644 --- a/gradle/validation/check-environment.gradle +++ b/gradle/validation/check-environment.gradle @@ -22,7 +22,7 @@ import org.gradle.util.GradleVersion configure(rootProject) { ext { - expectedGradleVersion = '8.4' + expectedGradleVersion = libs.versions.gradle.get() } wrapper { @@ -31,6 +31,7 @@ configure(rootProject) { } def currentJavaVersion = JavaVersion.current() + def minJavaVersion = JavaVersion.toVersion(libs.versions.java.min.get()) if (currentJavaVersion < minJavaVersion) { throw new GradleException("At least Java ${minJavaVersion} is required, you are running Java ${currentJavaVersion} " + "[${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]") diff --git a/gradle/validation/dependencies.gradle b/gradle/validation/dependencies.gradle new file mode 100644 index 00000000000..cfb78ee15a2 --- /dev/null +++ b/gradle/validation/dependencies.gradle @@ -0,0 +1,346 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Helper function for specifying stable versions for dependency updates +// https://github.com/ben-manes/gradle-versions-plugin +def isNonStable = { String version -> + def stableKeyword = ['RELEASE', 'FINAL', 'GA'].any { it -> version.toUpperCase().contains(it) } + def regex = /^[0-9,.v-]+(-r)?$/ + return !stableKeyword && !(version ==~ regex) +} + +// Configure sanity check for conflicting dependencies across certain configurations +allprojects { + apply plugin: libs.plugins.carrotsearch.dependencychecks.get().pluginId + + def consolidatedConfigurations = project.configurations.matching { + it.name in [ + "annotationProcessor", + "compileClasspath", + "libExt", + "packaging", + "runtimeClasspath", + "runtimeLibs", + "server", + "serverLib", + "solrCore", + "solrFullTgz", + "solrPlatformLibs", + "solrSlimTgz", + "testCompileClasspath", + "testRuntimeClasspath", + ] + } + + dependencyVersionChecks { + lockFileComment = "An inventory of resolved dependency versions. Do not edit this file directly." + + configurationGroups { + // consolidated_dependencies is a configuration group that is used + // to check for conflicting versions of the included configurations + consolidated_dependencies { + include consolidatedConfigurations + } + } + } + + dependencies { + modules { + module("org.hamcrest:hamcrest-core") { + replacedBy("org.hamcrest:hamcrest", "hamcrest-core was renamed to hamcrest") + } + } + + constraints { handler -> + consolidatedConfigurations.configureEach { Configuration conf -> + // Add BOMs as they resolve many dependency conflicts + handler.add(conf.name, libs.amazon.awssdk.bom, { + because 'version alignment with known BOM for consistency across project' + }) + handler.add(conf.name, libs.google.cloud.bom, { + because 'version alignment with known BOM for consistency across project' + }) + handler.add(conf.name, libs.fasterxml.jackson.bom, { + because 'version alignment with known BOM for consistency across project' + }) + handler.add(conf.name, libs.opentelemetry.bom, { + because 'version alignment with known BOM for consistency across project' + }) + handler.add(conf.name, libs.grpc.bom, { + because 'version alignment with known BOM for consistency across project' + }) + handler.add(conf.name, libs.netty.bom, { + because 'version alignment with known BOM for consistency across project' + }) + + // Add known dependencies that have multiple versions as constraints + // to align versions + handler.add(conf.name, libs.google.guava, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.google.errorprone.annotations, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.apache.commons.exec, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.xerial.snappy.java, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.grpc.context, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.commonscli.commonscli, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.commonscodec.commonscodec, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.commonsio.commonsio, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.junit.junit, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.grpc.core, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.grpc.protobuf, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.jakarta.annotation.api, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.apache.commons.lang3, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.grpc.stub, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.eclipse.jetty.server, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.fasterxml.woodstox.core, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.slf4j.api, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.slf4j.jultoslf4j, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.apache.commons.compress, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.benmanes.caffeine, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.carrotsearch.hppc, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.apache.log4j.api, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.grpc.api, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.slf4j.jcloverslf4j, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.netty.codechttp, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.bc.jose4j, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.lmax.disruptor, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.apache.httpcomponents.httpclient, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.apache.httpcomponents.httpcore, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.apache.httpcomponents.httpmime, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.apache.zookeeper.zookeeper, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.apache.zookeeper.jute, { + because 'version alignment for consistency across project' + }) + handler.add(conf.name, libs.hamcrest.hamcrest, { + because 'version alignment for consistency across project' + }) + + // Add transitive dependencies as constraints to align versions + handler.add(conf.name, libs.checkerframework.qual, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.google.guava)} uses 3.42.0" + + "\n- ${getFullName(libs.benmanes.caffeine)} uses 3.37.0" + + "\n- ${getFullName(libs.google.cloud.storage)} uses 3.44.0" + }) + handler.add(conf.name, libs.ow2.asm, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.apache.lucene.expressions)} uses 7.2" + + "\n- ${getFullName(libs.apache.tika.parsers)} uses 9.3" + }) + handler.add(conf.name, libs.google.protobuf.java, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.google.errorprone.core)} uses 3.19.6" + + "\n- ${getFullName(libs.apache.tika.parsers)} uses 3.21.5" + + "\n- ${getFullName(libs.apache.calcite.avatica.core)} uses 3.21.9" + + "\n- ${getFullName(libs.google.cloud.storage)} uses 3.25.3" + + "\n- ${getFullName(libs.google.cloud.core)} uses 3.25.3" + }) + handler.add(conf.name, libs.google.gson, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.grpc.core)} uses 2.10.1" + + "\n- ${getFullName(libs.apache.tika.parsers)} uses 2.9.1" + + "\n- ${getFullName(libs.google.cloud.storage)} uses 2.11.0 and 2.10.1" + + "\n- ${getFullName(libs.google.protobuf.java)} uses 2.8.9" + + "\n- ${getFullName(libs.google.cloud.core)} uses 2.8.9 and 2.10.1" + + "\n- ${getFullName(libs.google.auth.oauth2http)} uses 2.10.1" + }) + handler.add(conf.name, libs.google.autovalue.annotations, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.google.cloud.storage)} uses 1.10.4" + + "\n- ${getFullName(libs.google.cloud.corehttp)} uses 1.10.4" + + "\n- ${getFullName(libs.google.cloud.core)} uses 1.10.4" + + "\n- ${getFullName(libs.google.api.gax)} uses 1.10.4" + + "\n- ${getFullName(libs.google.auth.oauth2http)} uses 1.10.4" + + "\n- ${getFullName(libs.google.cloud.bom)} uses 1.10.4" + + "\n- ${getFullName(libs.google.errorprone.core)} uses 1.9" + }) + handler.add(conf.name, libs.apache.commons.text, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.apache.calcite.core)} uses 1.11.0" + + "\n- ${getFullName(libs.apache.commons.configuration2)} uses 1.12.0" + }) + handler.add(conf.name, libs.grpc.util, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.google.cloud.storage)} uses 1.62.2" + + "\n- ${getFullName(libs.grpc.netty)} uses 1.65.1" + }) + handler.add(conf.name, libs.jodatime.jodatime, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.amazon.awssdk.sdkcore)} uses 2.8.1" + + "\n- ${getFullName(libs.apache.tika.parsers)} uses 2.2" + }) + handler.add(conf.name, libs.google.api.grpc.proto, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.grpc.protobuf.asProvider())} uses 2.29.0" + + "\n- ${getFullName(libs.google.api.gax)} uses 2.41.0" + + "\n- ${getFullName(libs.google.api.grpc.proto)} uses 2.41.0" + + "\n- ${getFullName(libs.google.cloud.core)} uses 2.41.0" + + "\n- ${getFullName(libs.google.cloud.storage)} uses 2.41.0" + }) + handler.add(conf.name, libs.netty.handler, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.apache.zookeeper.zookeeper)} uses 4.1.105.Final" + + "\n- ${getFullName(libs.netty.codechttp)} uses 4.1.112.Final" + }) + handler.add(conf.name, libs.grpc.protobuf.lite, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.google.cloud.storage)} uses 1.62.2" + + "\n- ${getFullName(libs.grpc.protobuf.asProvider())} uses 1.65.1" + }) + handler.add(conf.name, libs.jaxb.runtime, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.apache.tika.parsers)} uses 2.3.5" + + "\n- ${getFullName(libs.adobe.testing.s3mock.testsupportcommon)} uses 2.3.8" + }) + handler.add(conf.name, libs.perfmark.api, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.grpc.core)} uses 0.26.0" + + "\n- ${getFullName(libs.grpc.netty)} uses 0.26.0" + + "\n- ${getFullName(libs.google.cloud.storage)} uses 0.27.0" + }) + handler.add(conf.name, libs.netty.tcnative.boringssl, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.netty.bom)} uses 2.0.66.Final" + + "\n- ${getFullName(libs.apache.zookeeper.zookeeper)} uses 2.0.61.Final" + }) + handler.add(conf.name, libs.netty.transport.classes.epoll, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.netty.bom)} uses 4.1.114.Final" + + "\n- ${getFullName(libs.apache.zookeeper.zookeeper)} uses 4.1.105.Final" + }) + handler.add(conf.name, libs.netty.transport.native.epoll, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.netty.bom)} uses 4.1.114.Final" + + "\n- ${getFullName(libs.apache.zookeeper.zookeeper)} uses 4.1.105.Final" + }) + handler.add(conf.name, libs.google.j2objc.annotations, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.google.guava)} uses 3.0.0" + + "\n- ${getFullName(libs.google.protobuf.javautils)} uses 2.8" + }) + handler.add(conf.name, libs.apiguardian.api, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.apache.calcite.core)} uses 1.1.2" + + "\n- ${getFullName(libs.junit.junit)} (api) uses 1.1.0" + }) + handler.add(conf.name, libs.hk2.locator, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.jersey.inject.hk2)} uses 3.0.6" + + "\n- ${getFullName(libs.hk2.api)} uses 3.1.1" + }) + } + } + } +} + +static def getFullName(Provider dependency) { + def resolvedDep = dependency.get() + return "${resolvedDep.module.group}:${resolvedDep.module.name}" +} + +// Configure version catalog cleanups plugin. +configure(rootProject) { + apply plugin: libs.plugins.littlerobots.versioncatalogupdate.get().pluginId + + versionCatalogUpdate { + sortByKey = true + } + + tasks.matching { it.name == "tidy" }.configureEach { + it.dependsOn(":versionCatalogFormat") + } + + tasks.matching { + it.path in [":versionCatalogUpdate"] + }.configureEach { + it.interactive = true + } + + tasks.register("updateLibs", { + dependsOn ":versionCatalogUpdate" + }) + + // on dependencyUpdates get only stable versions recommended if current version is stable + // https://github.com/ben-manes/gradle-versions-plugin + tasks.named("dependencyUpdates").configure { + checkConstraints = true + checkBuildEnvironmentConstraints = true + rejectVersionIf { + isNonStable(it.candidate.version) && !isNonStable(it.currentVersion) + } + } +} diff --git a/gradle/validation/dependency-analyze.gradle b/gradle/validation/dependency-analyze.gradle index 1f35012ecf2..92125aba11c 100644 --- a/gradle/validation/dependency-analyze.gradle +++ b/gradle/validation/dependency-analyze.gradle @@ -20,7 +20,7 @@ allprojects { prj -> plugins.withId("java", { - prj.apply plugin: 'ca.cutterslade.analyze' + prj.apply plugin: libs.plugins.cutterslade.analyze.get().pluginId analyzeClassesDependencies { warnUsedUndeclared = false // means fail build if UsedUndeclared found diff --git a/gradle/validation/ecj-lint.gradle b/gradle/validation/ecj-lint.gradle index f47f70587a1..86f30cd5f1f 100644 --- a/gradle/validation/ecj-lint.gradle +++ b/gradle/validation/ecj-lint.gradle @@ -23,7 +23,7 @@ configure(rootProject) { } dependencies { - ecjDeps "org.eclipse.jdt:ecj:${scriptDepVersions['ecj']}" + ecjDeps libs.eclipse.jdt.ecj } } diff --git a/gradle/validation/ecj-lint/ecj.javadocs.prefs b/gradle/validation/ecj-lint/ecj.javadocs.prefs index 975707055ff..74278547699 100644 --- a/gradle/validation/ecj-lint/ecj.javadocs.prefs +++ b/gradle/validation/ecj-lint/ecj.javadocs.prefs @@ -5,8 +5,8 @@ org.eclipse.jdt.core.compiler.annotation.nonnullbydefault=org.eclipse.jdt.annota org.eclipse.jdt.core.compiler.annotation.nonnullisdefault=disabled org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled -org.eclipse.jdt.core.compiler.codegen.targetPlatform=11 -org.eclipse.jdt.core.compiler.compliance=11 +org.eclipse.jdt.core.compiler.codegen.targetPlatform=17 +org.eclipse.jdt.core.compiler.compliance=17 org.eclipse.jdt.core.compiler.doc.comment.support=enabled org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=error org.eclipse.jdt.core.compiler.problem.assertIdentifier=error @@ -93,4 +93,4 @@ org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disa org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=ignore org.eclipse.jdt.core.compiler.problem.unusedWarningToken=ignore org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=error -org.eclipse.jdt.core.compiler.source=11 +org.eclipse.jdt.core.compiler.source=17 diff --git a/gradle/validation/error-prone.gradle b/gradle/validation/error-prone.gradle index 00e14ed0eab..b7242b566c0 100644 --- a/gradle/validation/error-prone.gradle +++ b/gradle/validation/error-prone.gradle @@ -37,24 +37,25 @@ if (skipReason) { allprojects { prj -> plugins.withType(JavaPlugin) { - // LUCENE-9650: Errorprone on master/gradle does not work when running as plugin - // inside a forked Javac process. Javac running inside Gradle works, because we have - // additional module system opens in place. - // This is a hack to keep the dependency (so that palantir's version check doesn't complain) - // but don't include the plugin (which fails on JDK16+). + // LUCENE-9650: Errorprone does not work when running as a plugin inside a forked Javac process. + // Javac running inside Gradle works, because we have additional module system opens in place. if (skipReason) { tasks.withType(JavaCompile) { task -> task.dependsOn ":errorProneSkipped" } + + // Error prone plugin adds error prone to test classpath. We need to add it here too + // (manually) so that versions.lock is consistent with or without error prone. configurations { errorprone } dependencies { - errorprone("com.google.errorprone:error_prone_core") + errorprone libs.google.errorprone.core } + configurations.annotationProcessor.extendsFrom(configurations.errorprone) } else { - prj.apply plugin: 'net.ltgt.errorprone' + prj.apply plugin: libs.plugins.ltgt.errorprone.get().pluginId dependencies { - errorprone("com.google.errorprone:error_prone_core") + errorprone libs.google.errorprone.core } tasks.withType(JavaCompile) { task -> @@ -179,6 +180,7 @@ allprojects { prj -> '-Xep:MathRoundIntLong:ERROR', // '-Xep:MislabeledAndroidString:OFF', // we don't use android '-Xep:MisplacedScopeAnnotations:ERROR', + // '-Xep:MissingRuntimeRetention:ERROR', // todo check if useful or comment why not // '-Xep:MissingSuperCall:OFF', // we don't use this annotation // '-Xep:MissingTestCall:OFF', // we don't use this annotation '-Xep:MisusedDayOfYear:ERROR', @@ -218,12 +220,15 @@ allprojects { prj -> '-Xep:RandomCast:ERROR', '-Xep:RandomModInteger:ERROR', // '-Xep:RectIntersectReturnValueIgnored:OFF', // we don't use android + // '-Xep:RedundantSetterCall:ERROR', // todo check if useful or comment why not // '-Xep:RequiredModifiers:OFF', // we don't use this annotation // '-Xep:RestrictedApiChecker:OFF', // we don't use this annotation // '-Xep:ReturnValueIgnored:OFF', // todo there are problems that should be fixed + // '-Xep:SelfAssertion:ERROR', // todo check if useful or comment why not '-Xep:SelfAssignment:ERROR', '-Xep:SelfComparison:ERROR', '-Xep:SelfEquals:ERROR', + // '-Xep:SetUnrecognized:ERROR', // todo check if useful or comment why not // '-Xep:ShouldHaveEvenArgs:OFF', // we don't use truth '-Xep:SizeGreaterThanOrEqualsZero:ERROR', '-Xep:StreamToString:ERROR', @@ -236,7 +241,6 @@ allprojects { prj -> // '-Xep:ThrowIfUncheckedKnownChecked:OFF', // we don't use this annotation '-Xep:ThrowNull:ERROR', '-Xep:TreeToString:ERROR', - // '-Xep:TruthSelfEquals:OFF', // we don't use truth '-Xep:TryFailThrowable:ERROR', '-Xep:TypeParameterQualifier:ERROR', '-Xep:UnicodeDirectionalityCharacters:ERROR', @@ -265,6 +269,7 @@ allprojects { prj -> '-Xep:AssertionFailureIgnored:WARN', '-Xep:AssistedInjectAndInjectOnSameConstructor:WARN', '-Xep:AttemptedNegativeZero:WARN', + // '-Xep:AutoValueBoxedValues:WARN', // todo check if useful or comment why not // '-Xep:AutoValueFinalMethods:OFF', // we don't use autovalue // '-Xep:AutoValueImmutableFields:OFF', // we don't use autovalue // '-Xep:AutoValueSubclassLeaked:OFF', // we don't use autovalue @@ -285,6 +290,7 @@ allprojects { prj -> '-Xep:ChainedAssertionLosesContext:WARN', '-Xep:CharacterGetNumericValue:WARN', '-Xep:ClassCanBeStatic:WARN', + // '-Xep:ClassInitializationDeadlock:WARN', // todo check if useful or comment why not '-Xep:ClassNewInstance:WARN', // '-Xep:CloseableProvides:OFF', // we don't use this annotation '-Xep:ClosingStandardOutputStreams:WARN', @@ -296,6 +302,8 @@ allprojects { prj -> '-Xep:DateChecker:WARN', '-Xep:DateFormatConstant:WARN', // '-Xep:DefaultCharset:OFF', // we have forbiddenapis for that + //'-Xep:DeeplyNested:WARN', // todo check if useful or comment why not + //'-Xep:DefaultLocale:WARN', // todo check if useful or comment why not '-Xep:DefaultPackage:WARN', '-Xep:DeprecatedVariable:WARN', '-Xep:DirectInvocationOnMock:WARN', @@ -309,6 +317,7 @@ allprojects { prj -> '-Xep:EmptyBlockTag:WARN', // '-Xep:EmptyCatch:OFF', // todo check if useful or comment why not - might be handled by ECJ? // '-Xep:EmptySetMultibindingContributions:OFF', // we don't use this annotation + // '-Xep:EnumOrdinal:WARN', // todo check if useful or comment why not '-Xep:EqualsGetClass:WARN', '-Xep:EqualsIncompatibleType:WARN', '-Xep:EqualsUnsafeCast:WARN', @@ -330,6 +339,7 @@ allprojects { prj -> // '-Xep:FragmentNotInstantiable:OFF', // we don't use android // '-Xep:FutureReturnValueIgnored:OFF', // todo there are problems that should be fixed '-Xep:GetClassOnEnum:WARN', + // '-Xep:GuiceNestedCombine:WARN', // todo check if useful or comment why not '-Xep:HidingField:WARN', '-Xep:ICCProfileGetInstance:WARN', '-Xep:IdentityHashMapUsage:WARN', @@ -383,6 +393,7 @@ allprojects { prj -> '-Xep:JodaPlusMinusLong:WARN', '-Xep:JodaTimeConverterManager:WARN', '-Xep:JodaWithDurationAddedLong:WARN', + // '-Xep:JUnitIncompatibleType:WARN', // todo check if useful or comment why not // '-Xep:LabelledBreakTarget:OFF', // stylistic '-Xep:LiteEnumValueOf:WARN', '-Xep:LiteProtoToString:WARN', @@ -403,10 +414,12 @@ allprojects { prj -> // '-Xep:MissingSummary:OFF', // style preference that we don't want to enforce // '-Xep:MixedMutabilityReturnType:OFF', // todo check if useful or comment why not '-Xep:MockNotUsedInProduction:WARN', + // '-Xep:MockitoDoSetup:WARN', // todo check if useful or comment why not '-Xep:ModifiedButNotUsed:WARN', '-Xep:ModifyCollectionInEnhancedForLoop:WARN', '-Xep:ModifySourceCollectionInStream:WARN', '-Xep:MultimapKeys:WARN', + // '-Xep:MultipleNullnessAnnotations:WARN', // todo check if useful or comment why not '-Xep:MultipleParallelOrSequentialCalls:WARN', '-Xep:MultipleUnaryOperatorsInMethodCall:WARN', // '-Xep:MutableGuiceModule:OFF', // we don't use guice @@ -428,7 +441,9 @@ allprojects { prj -> '-Xep:NullableOptional:WARN', // '-Xep:NullablePrimitive:OFF', // we don't use this annotation // '-Xep:NullablePrimitiveArray:OFF', // we don't use this annotation + // '-Xep:NullableTypeParameter:WARN', // todo check if useful or comment why not // '-Xep:NullableVoid:OFF', // we don't use this annotation + // '-Xep:NullableWildcard:WARN', // todo check if useful or comment why not '-Xep:ObjectEqualsForPrimitives:WARN', // '-Xep:ObjectToString:OFF', // todo check if useful or comment why not '-Xep:ObjectsHashCodePrimitive:WARN', @@ -442,6 +457,7 @@ allprojects { prj -> '-Xep:Overrides:WARN', // '-Xep:OverridesGuiceInjectableMethod:OFF', // we don't use guice '-Xep:ParameterName:WARN', + '-Xep:PatternMatchingInstanceof:WARN', '-Xep:PreconditionsCheckNotNullRepeated:WARN', '-Xep:PrimitiveAtomicReference:WARN', '-Xep:ProtectedMembersInFinalClass:WARN', @@ -459,6 +475,7 @@ allprojects { prj -> // '-Xep:SameNameButDifferent:OFF', // todo check if useful or comment why not '-Xep:SelfAlwaysReturnsThis:WARN', // '-Xep:ShortCircuitBoolean:OFF', // todo check if useful or comment why not + // '-Xep:StatementSwitchToExpressionSwitch:WARN', // todo check if useful or comment why not // '-Xep:StaticAssignmentInConstructor:OFF', // we assign SolrTestCaseJ4.configString in many tests, difficult to untangle '-Xep:StaticAssignmentOfThrowable:WARN', // '-Xep:StaticGuardedByInstance:OFF', // todo check if useful or comment why not @@ -469,9 +486,12 @@ allprojects { prj -> '-Xep:StringCharset:WARN', '-Xep:StringFormatWithLiteral:WARN', // '-Xep:StringSplitter:OFF', // todo check if useful or comment why not - might be able to use forbidden-apis for this? + // '-Xep:SunApi:WARN', // todo check if useful or comment why not + // '-Xep:SuperCallToObjectMethod:WARN', // todo check if useful or comment why not '-Xep:SuperEqualsIsObjectEquals:WARN', // '-Xep:SwigMemoryLeak:OFF', // we don't use swig // '-Xep:SynchronizeOnNonFinalField:OFF', // todo check if useful or comment why not + // '-Xep:SystemConsoleNull:WARN', // todo check if useful or comment why not // '-Xep:ThreadJoinLoop:OFF', // todo check if useful or comment why not // '-Xep:ThreadLocalUsage:OFF', // todo check if useful or comment why not // '-Xep:ThreadPriorityCheck:OFF', // todo check if useful or comment why not @@ -493,6 +513,7 @@ allprojects { prj -> // '-Xep:UnicodeEscape:OFF', // can't enable since Lucene/Solr tests use unicode a bunch // '-Xep:UnnecessaryAssignment:OFF', // we don't use these annotations '-Xep:UnnecessaryAsync:WARN', + // '-Xep:UnnecessaryBreakInSwitch:WARN', // todo check if useful or comment why not '-Xep:UnnecessaryLambda:WARN', '-Xep:UnnecessaryLongToIntConversion:WARN', '-Xep:UnnecessaryMethodInvocationMatcher:WARN', @@ -513,6 +534,7 @@ allprojects { prj -> // '-Xep:UseBinds:OFF', // we don't use this annotation // '-Xep:UseCorrectAssertInTests:OFF', // we inherit from LuceneTestCase which extends Assert '-Xep:VariableNameSameAsType:WARN', + // '-Xep:VoidUsed:WARN', // todo check if useful or comment why not // '-Xep:WaitNotInLoop:OFF', // todo check if useful or comment why not // '-Xep:WakelockReleasedDangerously:OFF', // we don't use android // '-Xep:WithSignatureDiscouraged:OFF', // we aren't using this error-prone internal api diff --git a/gradle/validation/forbidden-apis.gradle b/gradle/validation/forbidden-apis.gradle index 0ffb3a3a272..2a09ff49dda 100644 --- a/gradle/validation/forbidden-apis.gradle +++ b/gradle/validation/forbidden-apis.gradle @@ -23,7 +23,7 @@ def resources = scriptResources(buildscript) // Only apply forbidden-apis to java projects. allprojects { prj -> plugins.withId("java", { - prj.apply plugin: 'de.thetaphi.forbiddenapis' + prj.apply plugin: libs.plugins.thetaphi.forbiddenapis.get().pluginId // This helper method appends signature files based on a set of true // dependencies from a given configuration. diff --git a/gradle/validation/forbidden-apis/commons-cli.commons-cli.all.txt b/gradle/validation/forbidden-apis/commons-cli.commons-cli.all.txt new file mode 100644 index 00000000000..469fef8238f --- /dev/null +++ b/gradle/validation/forbidden-apis/commons-cli.commons-cli.all.txt @@ -0,0 +1,16 @@ +@defaultMessage Use a org.apache.commons.cli.Option instead of a String value +org.apache.commons.cli.CommandLine#hasOption(java.lang.String) +org.apache.commons.cli.CommandLine#getOptionValue(java.lang.String) +org.apache.commons.cli.CommandLine#getOptionValue(java.lang.String, java.lang.String) +org.apache.commons.cli.CommandLine#getParsedOptionValue(java.lang.String, java.lang.Object) +org.apache.commons.cli.CommandLine#hasOption(char) +org.apache.commons.cli.CommandLine#getOptionValue(char) +org.apache.commons.cli.CommandLine#getOptionValue(char, java.lang.String) +#org.apache.commons.cli.CommandLine#getOptionValue(char, Supplier) +org.apache.commons.cli.CommandLine#getOptionValues(char) +org.apache.commons.cli.CommandLine#getOptionValues(java.lang.String) +org.apache.commons.cli.CommandLine#getParsedOptionValue(char) +# org.apache.commons.cli.CommandLine#getParsedOptionValue(char, Supplier) +org.apache.commons.cli.CommandLine#getParsedOptionValue(char, java.lang.Object) +org.apache.commons.cli.CommandLine#getParsedOptionValue(java.lang.String) +# org.apache.commons.cli.CommandLine#getParsedOptionValue(String, Supplier) diff --git a/gradle/validation/git-status.gradle b/gradle/validation/git-status.gradle index b34cf831ef7..8a43e7c7b3d 100644 --- a/gradle/validation/git-status.gradle +++ b/gradle/validation/git-status.gradle @@ -33,7 +33,7 @@ buildscript { } dependencies { - classpath "org.eclipse.jgit:org.eclipse.jgit:${scriptDepVersions['jgit']}" + classpath libs.eclipse.jgit.jgit } } diff --git a/gradle/validation/jar-checks.gradle b/gradle/validation/jar-checks.gradle index d416a9561fd..650a3b3337b 100644 --- a/gradle/validation/jar-checks.gradle +++ b/gradle/validation/jar-checks.gradle @@ -1,3 +1,5 @@ +import java.util.stream.Collectors + /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -24,8 +26,6 @@ // Because of this all tasks here must always execute together, so they cannot define task outputs. // TODO: Rewrite the internal state to use state files containing the ext.jarInfos and its referencedFiles -import org.apache.commons.codec.digest.DigestUtils - // This should be false only for debugging. def failOnError = true @@ -36,7 +36,7 @@ buildscript { } dependencies { - classpath "commons-codec:commons-codec:${scriptDepVersions['commons-codec']}" + classpath libs.commonscodec.commonscodec } } @@ -76,14 +76,14 @@ subprojects { // Configure jarValidation configuration for all projects. Any dependency // declared on this configuration (or any configuration it extends from) will // be verified. - configurations { + project.configurations { jarValidation } // For Java projects, add all dependencies from the following configurations // to jar validation plugins.withType(JavaPlugin) { - configurations { + project.configurations { jarValidation { extendsFrom runtimeClasspath extendsFrom compileClasspath @@ -109,16 +109,24 @@ subprojects { } def excludeRules = configurations.jarValidation.excludeRules + List> excludeRuleMaps; + if (excludeRules && excludeRules.size() > 0) { + excludeRuleMaps = excludeRules.stream().map {rule -> + if (rule.module != null) { + Map.of("group", rule.group, "module", rule.module) + } else { + Map.of("group", rule.group) + } + }.collect(Collectors.toList()) + } ArrayDeque queue = new ArrayDeque<>() configurations.jarValidation.extendsFrom.each { conf -> - if (excludeRules) { + if (excludeRules && excludeRules.size() > 0) { conf = conf.copyRecursive() conf.canBeResolved = true conf.canBeConsumed = true - def newConfExcludeRules = new HashSet<>(conf.excludeRules) - newConfExcludeRules.addAll(excludeRules) - conf.excludeRules = newConfExcludeRules + excludeRuleMaps.forEach {conf.exclude(it)} } if (conf.canBeResolved) { queue.addAll(conf.resolvedConfiguration.firstLevelModuleDependencies) @@ -152,7 +160,7 @@ subprojects { jarName : file.toPath().getFileName().toString(), path : file, module : resolvedArtifact.moduleVersion, - checksum : provider { new DigestUtils(DigestUtils.sha1Digest).digestAsHex(file).trim() }, + checksum : provider { buildinfra.sha1Digest().digestAsHex(file).trim() }, // We keep track of the files referenced by this dependency (sha, license, notice, etc.) // so that we can determine unused dangling files later on. referencedFiles: [] diff --git a/gradle/validation/owasp-dependency-check.gradle b/gradle/validation/owasp-dependency-check.gradle index eb5961e8269..f6352877dda 100644 --- a/gradle/validation/owasp-dependency-check.gradle +++ b/gradle/validation/owasp-dependency-check.gradle @@ -26,7 +26,7 @@ configure(rootProject) { dependencyCheck { failBuildOnCVSS = propertyOrDefault("validation.owasp.threshold", 7) as Integer formats = ['ALL'] - skipProjects = [':solr:solr-ref-guide', ':solr-missing-doclet'] + skipProjects = [':solr:solr-ref-guide', ':missing-doclet'] skipConfigurations = ['unifiedClasspath', 'permitUnusedDeclared'] suppressionFile = file("${resources}/exclusions.xml") analyzers { diff --git a/gradle/validation/precommit.gradle b/gradle/validation/precommit.gradle index 8c2fe6cfd98..cc298b8771f 100644 --- a/gradle/validation/precommit.gradle +++ b/gradle/validation/precommit.gradle @@ -23,8 +23,6 @@ configure(rootProject) { description = "All precommit checks" // Root-level validation tasks. - dependsOn ":verifyLocks" - dependsOn ":versionsPropsAreSorted" dependsOn ":checkWorkingCopyClean" // Solr validation tasks. diff --git a/gradle/validation/rat-sources.gradle b/gradle/validation/rat-sources.gradle index 91f2278e249..5b600f956c0 100644 --- a/gradle/validation/rat-sources.gradle +++ b/gradle/validation/rat-sources.gradle @@ -24,7 +24,7 @@ configure(rootProject) { } dependencies { - ratDeps "org.apache.rat:apache-rat:${scriptDepVersions['apache-rat']}" + ratDeps libs.apache.rat.rat } } @@ -96,20 +96,16 @@ allprojects { exclude "dev-tools/scripts/README.md" exclude "dev-tools/scripts/create_line_file_docs.py" - // The root project also includes patterns for the boostrap (buildSrc) and composite + // The root project also includes patterns for the include composite // projects. Include their sources in the scan. - include "buildSrc/src/**" - include "dev-tools/solr-missing-doclet/src/**" + include "build-tools/build-infra/src/**" + include "build-tools/missing-doclet/src/**" break case ":solr:modules:clustering": exclude "src/test-files/META-INF/services/*" break - case ":solr:modules:hadoop-auth": - exclude "src/test-files/**/*.conf" - break - case ":solr:modules:hdfs": exclude "src/test-files/**/*.aff" exclude "src/test-files/**/*.dic" diff --git a/gradle/validation/spotless.gradle b/gradle/validation/spotless.gradle index 95607c67327..32234c93a4a 100644 --- a/gradle/validation/spotless.gradle +++ b/gradle/validation/spotless.gradle @@ -20,13 +20,11 @@ * spotless and Google Java Format. */ -def resources = scriptResources(buildscript) - -configure(project(":solr").subprojects) { prj -> +configure(allprojects) { prj -> plugins.withType(JavaPlugin) { - prj.apply plugin: 'com.diffplug.spotless' + prj.apply plugin: libs.plugins.diffplug.spotless.get().pluginId - ext { + project.ext { spotlessJavaSetup = (Action){ it.toggleOffOn() // obviously, only to be used sparingly. // TODO: Work out how to support multiple different header files (we have @@ -36,7 +34,7 @@ configure(project(":solr").subprojects) { prj -> // it.licenseHeaderFile(file("${resources}/asl-header.txt"), '^(\\s*package)') it.setLineEndings(Enum.valueOf(rootProject.buildscript.classLoader.loadClass("com.diffplug.spotless.LineEnding"), "UNIX")) it.endWithNewline() - it.googleJavaFormat('1.18.1') + it.googleJavaFormat(libs.versions.google.javaformat.get()) it.custom('Refuse wildcard imports', { line -> // Wildcard imports can't be resolved by spotless itself. @@ -95,23 +93,20 @@ configure(project(":solr").subprojects) { prj -> // Emit a custom message about how to fix formatting errors. tasks.matching { task -> task.name == "spotlessJavaCheck" }.configureEach { - runToFixMessage.set("\nIMPORTANT: run the top-level './gradlew tidy' to format code automatically (see help/formatting.txt for more info).") + it.runToFixMessage.set("\nIMPORTANT: run the top-level './gradlew tidy' to format code automatically (see help/formatting.txt for more info).") } - // Add an alias to 'spotlessApply' simply called 'tidy' and wire up - // spotlessCheck to convention's check. - task tidy() { - description "Applies formatters and cleanups to sources." - group "verification" + // Hook up spotless to tidy and check tasks. + + tasks.matching { it.name == "tidy" }.configureEach { v -> + v.dependsOn tasks.matching { it.name == "spotlessApply" } } - tasks.matching { task -> task.name == "spotlessApply" }.configureEach { v -> - tidy.dependsOn v - v.dependsOn ":checkJdkInternalsExportedToGradle" + tasks.matching { it.name == "check" }.configureEach { v -> + v.dependsOn tasks.matching { it.name == "spotlessCheck" } } - tasks.matching { task -> task.name == "spotlessCheck" }.configureEach { v -> - check.dependsOn v + tasks.matching { task -> task.name in ["spotlessApply", "spotlessCheck"] }.configureEach { v -> v.dependsOn ":checkJdkInternalsExportedToGradle" } } diff --git a/gradle/validation/validate-log-calls.gradle b/gradle/validation/validate-log-calls.gradle index fb1a81c5403..44bc82f9ac1 100644 --- a/gradle/validation/validate-log-calls.gradle +++ b/gradle/validation/validate-log-calls.gradle @@ -118,7 +118,7 @@ class ValidateLogCallsTask extends DefaultTask { if (hasPlus) { cause = "hasPlus: " + hasPlus - violation = true + violation = level != "error" } if (violation == false) { def m = stripped =~ "\\(.*?\\)" diff --git a/gradle/validation/validate-source-patterns.gradle b/gradle/validation/validate-source-patterns.gradle index ec44c804a9b..9c4c93353e0 100644 --- a/gradle/validation/validate-source-patterns.gradle +++ b/gradle/validation/validate-source-patterns.gradle @@ -29,7 +29,7 @@ buildscript { } dependencies { - classpath "org.apache.rat:apache-rat:${scriptDepVersions['apache-rat']}" + classpath libs.apache.rat.rat } } diff --git a/gradle/wrapper/gradle-wrapper.jar.sha256 b/gradle/wrapper/gradle-wrapper.jar.sha256 index f78f56fee75..67dead8f441 100644 --- a/gradle/wrapper/gradle-wrapper.jar.sha256 +++ b/gradle/wrapper/gradle-wrapper.jar.sha256 @@ -1 +1 @@ -0336f591bc0ec9aa0c9988929b93ecc916b3c1d52aed202c7381db144aa0ef15 +2db75c40782f5e8ba1fc278a5574bab070adccb2d21ca5a6e5ed840888448046 diff --git a/gradle/wrapper/gradle-wrapper.jar.version b/gradle/wrapper/gradle-wrapper.jar.version index a2f28f43be3..dd78a707858 100644 --- a/gradle/wrapper/gradle-wrapper.jar.version +++ b/gradle/wrapper/gradle-wrapper.jar.version @@ -1 +1 @@ -8.4.0 +8.10.2 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 744c64d1277..9355b415575 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.10-bin.zip networkTimeout=10000 +validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index c0f76e91038..c8ad2977471 100755 --- a/gradlew +++ b/gradlew @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -55,7 +57,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -84,7 +86,8 @@ done # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s +' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -158,10 +161,10 @@ fi GRADLE_WRAPPER_JAR="$APP_HOME/gradle/wrapper/gradle-wrapper.jar" if [ ! -e "$GRADLE_WRAPPER_JAR" ]; then - "$JAVACMD" $JAVA_OPTS "$APP_HOME/buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java" "$GRADLE_WRAPPER_JAR" + "$JAVACMD" $JAVA_OPTS "$APP_HOME/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java" "$GRADLE_WRAPPER_JAR" WRAPPER_STATUS=$? if [ "$WRAPPER_STATUS" -eq 1 ]; then - echo "ERROR: Something went wrong. Make sure you're using Java version between 11 and 21." + echo "ERROR: Something went wrong. Make sure you're using Java version between 21 and 23." exit $WRAPPER_STATUS elif [ "$WRAPPER_STATUS" -ne 0 ]; then exit $WRAPPER_STATUS @@ -173,7 +176,7 @@ CLASSPATH=$GRADLE_WRAPPER_JAR # START OF LUCENE CUSTOMIZATION # Generate gradle.properties if they don't exist if [ ! -e "$APP_HOME/gradle.properties" ]; then - "$JAVACMD" $JAVA_OPTS "$APP_HOME/buildSrc/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java" "$APP_HOME/gradle/template.gradle.properties" "$APP_HOME/gradle.properties" + "$JAVACMD" $JAVA_OPTS "$APP_HOME/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java" "$APP_HOME/gradle/template.gradle.properties" "$APP_HOME/gradle.properties" GENERATOR_STATUS=$? if [ "$GENERATOR_STATUS" -ne 0 ]; then exit $GENERATOR_STATUS diff --git a/gradlew.bat b/gradlew.bat index 172618e3ea4..ff65d8f6012 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,6 +13,8 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem @if "%DEBUG%"=="" @echo off @rem ########################################################################## @@ -48,11 +50,11 @@ set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 if %ERRORLEVEL% equ 0 goto execute -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail @@ -62,11 +64,11 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe if exist "%JAVA_EXE%" goto execute -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail @@ -76,7 +78,7 @@ goto fail @rem LUCENE-9266: verify and download the gradle wrapper jar if we don't have one. set GRADLE_WRAPPER_JAR=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar IF NOT EXIST "%GRADLE_WRAPPER_JAR%" ( - "%JAVA_EXE%" %JAVA_OPTS% "%APP_HOME%/buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java" "%GRADLE_WRAPPER_JAR%" + "%JAVA_EXE%" %JAVA_OPTS% "%APP_HOME%/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java" "%GRADLE_WRAPPER_JAR%" IF %ERRORLEVEL% EQU 1 goto failWithJvmMessage IF %ERRORLEVEL% NEQ 0 goto fail ) @@ -89,7 +91,7 @@ set CLASSPATH=%GRADLE_WRAPPER_JAR% IF NOT EXIST "%APP_HOME%\gradle.properties" ( @rem local expansion is needed to check ERRORLEVEL inside control blocks. setlocal enableDelayedExpansion - "%JAVA_EXE%" %JAVA_OPTS% "%APP_HOME%/buildSrc/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java" "%APP_HOME%\gradle\template.gradle.properties" "%APP_HOME%\gradle.properties" + "%JAVA_EXE%" %JAVA_OPTS% "%APP_HOME%/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java" "%APP_HOME%\gradle\template.gradle.properties" "%APP_HOME%\gradle.properties" IF %ERRORLEVEL% NEQ 0 goto fail endlocal ) @@ -108,7 +110,7 @@ goto fail :failWithJvmMessage @rem https://github.com/apache/lucene/pull/819 -echo Error: Something went wrong. Make sure you're using Java version between 11 and 21. +echo Error: Something went wrong. Make sure you're using Java version between 21 and 23. :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of diff --git a/help/dependencies.txt b/help/dependencies.txt index c1c81c560b0..fd1bc68b711 100644 --- a/help/dependencies.txt +++ b/help/dependencies.txt @@ -7,7 +7,7 @@ and each configuration can have dependencies attached to it. There are some standard conventions so, for example, the Java plugin adds standard configurations such as "api", "implementation", "testImplementation" and others. These configurations can also inherit -from each other; more about this typic can be found here: +from each other; more about this topic can be found here: https://docs.gradle.org/current/userguide/dependency_management_for_java_projects.html#dependency_management_for_java_projects https://docs.gradle.org/current/userguide/java_library_plugin.html#sec:java_library_separation @@ -29,60 +29,126 @@ testImplementation - makes a dependency only available for test classes. Adding a library dependency --------------------------- -Let's say we wish to add a dependency on library "foo.bar:baz" in +Let's say we wish to add a new dependency on library "foo.bar:baz" in version 1.2 to :solr:core. Let's assume this library is only -used internally by the project. The :solr:core project is configured -by solr/core/build.gradle and we would add (or modify) the dependency -block as follows: +used internally by the project. For new dependencies, we would add +the dependency and its version to gradle/libs.versions.toml first: + +[versions] +... +foo-bar-baz = "1.2" +... + +[libraries] +... +foo-bar-baz = { module = "foo.bar:baz", version.ref = "foo-bar-baz" } + +Note that the used names separated by dashes are later referenced with dots +instead of dashes, but more on that later. + +The chosen name for the module should more or less reflect the module's +group name and module id in a way that it groups related dependencies under +the same "prefix" (see below). There is no specific convention here and +group prefixes for domain names like "com" and "io" are avoided, as they +do not add any value and increase the size of the reference / alias name. + +The :solr:core project is configured by solr/core/build.gradle, and we would +add (or modify) the dependency block as follows: dependencies { - implementation "foo.bar:baz" + implementation libs.foo.bar.baz } +In the project we use the default name "libs" that is used to reference +the version catalog gradle/libs.versions.toml. + The "implementation" here is a named configuration; we don't need to declare it because it is declared for us by the java-library plugin. -In "normal" gradle the version of the dependency would be present -directly inside the declaration but we use a plugin -(palantir-consistent-versions) to manage all dependency versions -from the top-level (so that conflicts can be resolved globally). +In case the IDE does not auto-completes the reference, you may have +to sync your project so that the newly added library is found. -If this is the first time "foo.bar:baz" is added to the project, we'd have -to add its version to "versions.props" file at the top level of the -checkout: +As mentioned before, we can use the dashes to group related libraries +together. So let's assume we have another dependency on "foo.bar:biz", +which is part of the same project as "foo.bar:baz" and therefore share +the same version. -foo.bar:baz=1.2 +In this case we would want to use the same version for both libraries +and add them as follows to the version catalog gradle/libs.versions.toml: -and then regenerate the "versions.lock" file using the following -command: +[versions] +... +foo-bar = "1.2" # Use a shared name for both libraries +... -gradlew --write-locks +[libraries] +... +foo-bar-biz = { module = "foo.bar:biz", version.ref = "foo-bar" } +foo-bar-baz = { module = "foo.bar:baz", version.ref = "foo-bar" } -IMPORTANT: The versions.lock file will contain the actual version -of the dependency picked based on other project dependencies and -their transitive dependencies. This selected version may be -different from what each of these actually requires (the highest -version number will be typically selected). To see which dependencies -require which version of the library use: +This way, both libraries use the same version reference and updates +would affect both. -gradlew why --hash=... +Adding new libraries requires additional actions. The first you want +to do is to run versionCatalogFormat to sort the version catalog. -where the hash code comes from versions.lock file. For example, at -the time of writing, jackson-databind has the following entry: +This command does also remove unused libraries. You can use "# @keep" +with a reason why the library should not be removed. This is sometimes +necessary if the usage of a library is not identified by the plugin, +like when using it with "classpath [dependency]". -com.fasterxml.jackson.core:jackson-databind:2.10.0 (3 constraints: 931a7796) +The next you want to regenerate the "versions.lock" file using the +following command: -and "gradlew why --hash=931a7796" prints: +gradlew writeLocks -com.fasterxml.jackson.core:jackson-databind:2.10.0 - projects -> 2.10.0 - net.thisptr:jackson-jq -> 2.7.0 - org.carrot2:carrot2-mini -> 2.9.9.3 +Since we are responsible to provide and maintain the versions of +libraries, the lock file will reflect the versions of the version +catalog. -Once the dependency is added it always makes sense to see the -tree of all module dependencies and maybe exclude transitive -dependencies of foo.bar:baz that we won't need. +The locking will fail if multiple versions of the same dependency are found. +This may be the case if libraries have a used library as transitive +dependency with a different version. If that is the case, you have to add +a constraint to the modules in gradle/dependencies.gradle with a reason +why the constraint is applied. The below example adds a constraint for +"foo.bar:baz" with the given version from the version catalog, enforcing +the version to all transitive dependencies as well: +dependencies { + ... + constraints { handler -> + consolidatedConfigurations.configureEach { Configuration conf -> + ... + handler.add(conf.name, libs.foo.bar.baz, { + because 'version alignment for consistency across project' + }) + } + } +} + +Because the constraints have to be maintained by the contributors and cleaned +up manually, you should always provide additional information why the constraint +is needed. This information may include the current version of the libraries +that add transitively the dependency of the constraint. This helps others later +find and cleanup constraints if they update dependencies that use newer versions +of the conflicting dependency. For example: + +handler.add(conf.name, libs.ow2.asm, { + because "transitive version alignment for consistency across project" + + "\n- ${getFullName(libs.apache.lucene.expressions)} uses 7.2" + + "\n- ${getFullName(libs.apache.tika.parsers)} uses 9.3" + }) + +In this case, the module org.ow2.asm:asm is used by both org.apache.lucene:lucene-expressions +and org.apache.tika:tika-parsers, but with completely different versions. +The constraint syncs the transitive dependency and lets the maintainers know +which dependencies use it. So if at some point someone else updates the +lucene-expressions to a newer version that uses asm version 9.3, the constraint +would be obsolete and could be removed. + +The hashes from the versions.lock file can be used to look up +which modules use a specific library. Simply look up the hash in the +versions.lock and you will find a group of modules that use it. Update Lucene prerelease ------------------------ @@ -100,12 +166,12 @@ If you want to upgrade Lucene to a newer build proceed like the following: queued) - remember the build number of Jenkins (left side, first build in list, prefixed by '#') -- Edit ./versions.props and change Lucene's version to '9.0.0-prereleaseX', +- Edit gradle/libs.versions.toml and change Lucene's version to '9.0.0-prereleaseX', with 'X' is the jenkins build number - Edit ./gradle/globals.gradle and change jenkins build number, too (this directs the repository to the one created by latest build): def lucenePrereleaseBuild = 'X' -- Run: gradlew --write-locks (as described before) +- Run: gradlew writeLocks (as described before) Lucene local dependency substitution @@ -173,7 +239,7 @@ crucial for the functioning of "foo.bar:baz". We can exclude it by adding an exclusion block to the original declaration: dependencies { - implementation("foo.bar:baz", { + implementation(libs.foo.bar.biz, { exclude group: "foo.bar", module: "irrelevant" }) } @@ -194,3 +260,31 @@ gradlew licenses To update JAR checksums (sha1) for licenses use: gradlew updateLicenses + +Note that this Gradle task does only update the checksums and does not make +any changes to license or notice files. + +Whenever the library's LICENSE or NOTICE file changes, these changes should +be reflected in our copies under solr/licenses. Currently it is necessary to +manually review and update these files. This process is subject to change, +progress can be tracked in SOLR-15929. + +License and notice files may be picked from the libraries' repositories. +When looking up the libraries in Maven Central (https://search.maven.org) +almost all projects have a direct reference to the source code (right side), +usually a GitHub repository, where you can find the License and Notice file +in the root directory. + +Remember to check out the correct tag / release before copying any license +or notice file. Some multi-module projects that publish multiple artifacts +may have subdirectories for each artifact. These directories sometimes +hold a different license for that specific artifact, so make sure to copy +the right license file. Other multi-module projects may have only a single +license and notice file for all modules, like netty, so multiple dependencies +fromt he same group may reference the same license and notice file. + +Other places where you may find a license and notice file are in the pom.xml +file as a URL under a tag if there is no reference to a repository +in Maven Central, or in the artifact downloaded by maven when the library +is added as a dependency (in IntelliJ IDEA the libraries can be found +in the project view under External Libraries at the bottom). diff --git a/settings-gradle.lockfile b/settings-gradle.lockfile new file mode 100644 index 00000000000..709a43f74f8 --- /dev/null +++ b/settings-gradle.lockfile @@ -0,0 +1,4 @@ +# This is a Gradle generated file for dependency locking. +# Manual edits can break the build and are not advised. +# This file is expected to be part of source control. +empty=incomingCatalogForLibs0 diff --git a/settings.gradle b/settings.gradle index c4812ea0478..1cd64f9a04a 100644 --- a/settings.gradle +++ b/settings.gradle @@ -20,10 +20,12 @@ pluginManagement { mavenCentral() gradlePluginPortal() } + + includeBuild("build-tools/build-infra") } plugins { - id 'com.gradle.develocity' version '3.17.6' + id 'com.gradle.develocity' version '3.18.1' id 'com.gradle.common-custom-user-data-gradle-plugin' version '2.0.2' } @@ -31,23 +33,25 @@ apply from: file('gradle/develocity.gradle') rootProject.name = "solr-root" -includeBuild("dev-tools/solr-missing-doclet") +includeBuild("build-tools/missing-doclet") include "solr:api" include "solr:solrj" include "solr:solrj-zookeeper" include "solr:solrj-streaming" include "solr:core" +include "solr:cross-dc-manager" include "solr:server" include "solr:modules:analysis-extras" include "solr:modules:clustering" +include "solr:modules:cross-dc" include "solr:modules:opentelemetry" include "solr:modules:extraction" include "solr:modules:gcs-repository" -include "solr:modules:hadoop-auth" include "solr:modules:hdfs" include "solr:modules:jwt-auth" include "solr:modules:langid" +include "solr:modules:llm" include "solr:modules:ltr" include "solr:modules:s3-repository" include "solr:modules:scripting" diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index a8b54deb3ab..b7291e9a23e 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -10,6 +10,10 @@ New Features --------------------- * SOLR-14496: Solr CLI commands now can interact with a Solr secured using Basic Authentication. (Eric Pugh) +* SOLR-17467: Solr CLI bin/solr start defaults to starting Solr in Cloud mode, use --user-managed switch for User Managed (aka Standalone) mode. (Eric Pugh) + +* SOLR-14673: Solr CLI now has bin/solr stream tool that executates streaming expressions via command line, either locally or on solr cluster. (Eric Pugh) + Improvements --------------------- @@ -21,9 +25,19 @@ Improvements * SOLR-17077: When a shard rejoins leader election, leave previous election only once to save unneeded calls to Zookeeper. (Pierre Salagnac) +* SOLR-16116: Apache Curator is now used to manage all Solr Zookeeper interactions. This should provide more stability in the Solr-Zookeeper interactions. + The solrj-zookeeper module, now has a dependency on curator. (Houston Putman, Kevin Risden, Mike Drob, David Smiley) + +* SOLR-17544: Solr CLI will now stop when you combine mutually exclusive options. Combining -s and -z options is a common example. (Eric Pugh, Christos Malliaridis) + +* SOLR-17495: Change Solr CLI delete command to not delete configs by default. Decouple lifecycle of collections from configsets. (Eric Pugh) + +* SOLR-17516: `LBHttp2SolrClient` is now generic, adding support for `HttpJdkSolrClient`. (James Dyer) + Optimizations --------------------- -(No changes) +* SOLR-17568: The CLI bin/solr export tool now contacts the appropriate nodes directly for data instead of proxying through one. + (David Smiley) Bug Fixes --------------------- @@ -60,7 +74,7 @@ Deprecation Removals in WordBreakSolrSpellChecker (Andrey Bozhko via Eric Pugh) * SOLR-14763: Remove deprecated asynchronous request methods from `Http2SolrClient`, `HttpJdkSolrClient` and `LBHttp2SolrClient` - in favor of the new CompletableFuture based methods. Remove the related deprecated interfaces `AsyncListener` and ``Cancellable` + in favor of the new CompletableFuture based methods. Remove the related deprecated interfaces `AsyncListener` and `Cancellable` (James Dyer) * SOLR-14115: Remove deprecated zkcli script in favour of equivalent bin/solr sub commmands. (Eric Pugh) @@ -71,6 +85,23 @@ Deprecation Removals * SOLR-17400: Remove deprecated script snapshotcli.sh. bin/solr snapshot-* commands have replaced this. (Eric Pugh) +* SOLR-17494: Remove language specific writer types (i.e wt= ruby, python, php, and phps). (Eric Pugh) + +* SOLR-17352: Remove deprecated Solr CLI options. Run bin/solr yourcommand -h to see current options. (Eric Pugh, Christos Malliardis) + +* SOLR-17256: Previously deprecated `SolrRequest` methods `setBasePath` and `getBasePath` have been removed. SolrJ users + wishing to temporarily override an HTTP client's base URL may use `Http2SolrClient.requestWithBaseUrl` instead. (Jason Gerlowski) + +* SOLR-17564: Remove code in Assign used for backwards compatibility with Collections created prior to 7.0 (Paul McArthur) + +* SOLR-17576: Remove deprecated master/slave option language from ReplicationHandler. (Eric Pugh) + +* SOLR-16781: Support for `` directives (used in solrconfig.xml to add JARs on a core-by-core basis) has been removed. Users + looking for similar functionality can use Solr's package manager. Users that don't need to vary JAR access on a per-core basis + have many options, including the `` tag and directly modifying Solr's classpath prior to JVM startup. (Jason Gerlowski) + +* SOLR-17540: Removed the Hadoop Auth module, and thus Kerberos authentication and other exotic options. (Eric Pugh) + Dependency Upgrades --------------------- (No changes) @@ -102,13 +133,69 @@ Other Changes * SOLR-17279: Introduce SecurityJson.java file to Test Framework to consolidate setting up authentication in tests. (Rudy Seitz via Eric Pugh) -================== 9.8.0 ================== +* SOLR-17285: SolrJ RemoteSolrException moved to SolrClient. (@samuelrivascoding) + +* SOLR-17321: Minimum Java version for Apache Solr is now 21, and for SolrJ, it is 17. (Sanjay Dutt, David Smiley) + +* SOLR-16903: Update CLI tools to use java.nio.file.Path instead of java.io.File (Andrey Bozhko) + +* SOLR-17568: SolrCloud no longer reroutes/proxies a core request to another node if not found locally. (David Smiley) + +* SOLR-17548: Switch all public Java APIs from File to Path. (Matthew Biscocho via Eric Pugh) + +================== 9.9.0 ================== New Features --------------------- +* SOLR-17582: The CLUSTERSTATUS API will now stream each collection's status to the response, + fetching and computing it on the fly. To avoid a backwards compatibilty concern, this won't work + for wt=javabin. (Matthew Biscocho, David Smiley) + +Improvements +--------------------- +* SOLR-15751: The v2 API now has parity with the v1 "COLSTATUS" and "segments" APIs, which can be used to fetch detailed information about + specific collections or cores. Collection information can be fetched by a call to `GET /api/collections/collectionName`, and core + information with a call to `GET /api/cores/coreName/segments`. (Jason Gerlowski) + +* SOLR-16396: All v2 configset APIs have been moved to the slightly different path: `/api/configsets`, to better align with the design of + other v2 APIs. SolrJ now offers (experimental) SolrRequest implementations for all v2 configset APIs in + `org.apache.solr.client.solrj.request.ConfigsetsApi`. (Jason Gerlowski) + +Optimizations +--------------------- +* SOLR-17578: Remove ZkController internal core supplier, for slightly faster reconnection after Zookeeper session loss. (Pierre Salagnac) + +Bug Fixes +--------------------- (No changes) +Dependency Upgrades +--------------------- +* SOLR-17471: Upgrade Lucene to 9.12.1. (Pierre Salagnac, Christine Poerschke) + +Other Changes +--------------------- +* SOLR-17579: Remove unused code and other refactorings in ReplicationHandler and tests. Removed unused public + LOCAL_ACTIVITY_DURING_REPLICATION variable. (Eric Pugh) + +================== 9.8.0 ================== +New Features +--------------------- +* SOLR-17065: The Solr Cross-DC Project has graduated from the sandbox repository. + It will now be released as a fully supported Solr feature. + This feature closes SIP-13: Cross Data Center Replication. (Mark Miller, Andrzej Bialecki, Jason Gerlowski, Houston Putman) + +* SOLR-17150: Implement `memAllowed` parameter to limit per-thread memory allocations during request processing. (Andrzej Bialecki, Gus Heck) + +* SOLR-17525: Added knn_text_to_vector query parser to encode text to vector at query time through external LLM services. (Alessandro Benedetti) + Improvements --------------------- +* SOLR-17158: Users using query limits (timeAllowed, cpuTimeAllowed) for whom partial results are uninteresting + may set partialResults=false. This parameter has been enhanced to reduce time spent processing partial results + and omit partialResults from the response. Since this is requested behavior, no exception is thrown and the + partialResults response header will always exist if the result was short circuited. + (Gus Heck, Andrzej Bialecki, hossman) + * SOLR-17397: SkipExistingDocumentsProcessor now functions correctly with child documents. (Tim Owens via Eric Pugh) * SOLR-17180: Deprecate snapshotscli.sh in favour of bin/solr snapshot sub commands. Now able to manage Snapshots from the CLI. HDFS module specific snapshot script now ships as part of that module in the modules/hdfs/bin directory. (Eric Pugh) @@ -117,9 +204,32 @@ Improvements which may help reduce distributed-search latency in collections with many shards, especially when PKI is used between nodes. (Jason Gerlowski) -* SOLR-17382: Deprecate -a and -addlopts in favour of --jvm-opts for passing options into the JVM in bin/solr. (Eric Pugh, Christos Malliaridis) +* SOLR-17383: Resolved overlapping arguments in the Solr CLI. Removed duplicative but differing arguments, + consolidated use of short form arguments -v to not have differing meanings based on tool. Provide deprecation warning + in command line when deprecated arguments are used. (Eric Pugh, Christos Malliaridis) + +* SOLR-17256: Deprecate SolrRequest `setBasePath` and `getBasePath` methods. SolrJ users wishing to temporarily + override an HTTP client's base URL may use `Http2SolrClient.requestWithBaseUrl` instead. (Jason Gerlowski, + Sanjay Dutt, David Smiley) + +* SOLR-17414: When searching with multiThreaded=true, the internal tasks may now block instead of + enqueuing with a risk of rejection. Solr will use less resources under stress but to get the most + of your machine, you may want to increase the thread pool. (David Smiley) + +* SOLR-17528: Introduce -y short option to bin/solr start --no-prompt option. Aligns with bin/solr package tool. (Eric Pugh) + +* SOLR-17390: EmbeddedSolrServer now considers the ResponseParser (David Smiley) + +* SOLR-16390: v2 "cluster prop" APIs have been updated to be more REST-ful. Cluster prop creation/update are now available + at `PUT /api/cluster/properties/somePropName`. Deletion is now available at `DELETE /api/cluster/properties/somePropName`. + New APIs for listing-all and fetching-single cluster props are also now available at `GET /api/cluster/properties` and + `GET /api/cluster/properties/somePropName`, respectively. (Carlos Ugarte via Jason Gerlowski) + +* SOLR-16470: Replication "fetch file" API now has a v2 equivalent, available at `GET /api/cores/coreName/replication/files/fileName` + (Matthew Biscocho via Jason Gerlowski) + +* SOLR-17554: Suppress printing out of password to console when using auth CLI command. (Christos Malliaridis via Eric Pugh) -* SOLR-17431: Deprecate -p parameter where it doesn't refer to a port in bin/solr. (Eric Pugh, Christos Malliaridis) Optimizations --------------------- @@ -131,7 +241,9 @@ Optimizations that which consumes almost no memory, saving 1MB of memory per SolrCore. (David Smiley) * SOLR-17381: Make CLUSTERSTATUS request configurable to improve performance by allowing retrieval of specific information, - reducing unnecessary data fetching. (Aparna Suresh, David Smiley) + reducing unnecessary data fetching. Enhanced CloudSolrClient's HTTP ClusterStateProvider to use it, and to scale to + more collections better as well. + (Aparna Suresh, David Smiley) * SOLR-17396: Reduce thread contention in ZkStateReader.getCollectionProperties(). (Aparna Suresh, David Smiley, Paul McArthur) @@ -140,6 +252,13 @@ Optimizations * SOLR-3913: Optimize PostTool to call just optimize when both commit and optimize requested. (Eric Pugh) +* SOLR-17441: Improve system metrics collection by skipping unreadable MXBean properties, making /admin/info/system calls faster (Haythem Khiri) + +* SOLR-17592: Switched from HTTP1 to HTTP2 in SolrCloudManager via HttpClient change from Apache to Jetty. + (Sanjay Dutt, David Smiley) + +* SOLR-17453: Leverage waitForState() instead of busy waiting in CREATE, MIGRATE, REINDEXCOLLECTION, MOVEREPLICA commands, and in some tests. (Pierre Salagnac) + Bug Fixes --------------------- * SOLR-12429: Uploading a configset with a symbolic link produces a IOException. Now a error message to user generated instead. (Eric Pugh) @@ -151,6 +270,26 @@ Bug Fixes * SOLR-16254: Clarify when a bin/solr create needs to be run on the same server as Solr. (Eric Pugh) +* SOLR-6962: bin/solr stop/start/restart should complain about missing value for options that expect a value. (Eric Pugh, Rahul Goswami) + +* SOLR-17464: Fixed Http2SolrClient bug in that 'requestAsync' triggered NPE when using a shared Jetty client (Jason Gerlowski, James Dyer) + +* SOLR-17413: Fixed UpdateLog replay bug that shared thread-unsafe SolrQueryRequest objects across threads (Jason Gerlowski, David Smiley, Houston Putman) + +* SOLR-11191: Splitting shards now routes child-docs with their _root_ field when available so they maintain parent relationship. (Zack Kendall) + +* SOLR-16976: Remove log4j-jul jar and use slf4j bridge for JUL to prevent exception from being logged when remote JMX + is enabled (Shawn Heisey, Stephen Zhou, Eric Pugh, Christine Poerschke, David Smiley) + +* SOLR-17575: Fixed broken backwards compatibility with the legacy "langid.whitelist" config in Solr Langid. (Jan Høydahl, Alexander Zagniotov) + +* SOLR-17574: Fix AllowListUrlChecker when liveNodes changes. Remove ClusterState.getHostAllowList (Bruno Roustant, David Smiley) + +* SOLR-17595: Fix two issues in Solr CLI that prevent Solr from starting with the techproducts example and from + correctly parsing arguments on Windows that start with -D and have multiple values separated by "," or spaces. (Christos Malliaridis) + +* SOLR-17306: fix replication problem on follower restart (Martin Anzinger and Peter Kroiss via Eric Pugh) + Dependency Upgrades --------------------- (No changes) @@ -163,8 +302,35 @@ Other Changes * SOLR-17142: Fix Gradle build sometimes gives spurious "unreferenced license file" warnings. (Uwe Schindler) +* SOLR-17448: Fixed inadvertent suppression of exceptions in the background tasks across the codebase. For certain +tasks that were scheduled via ExecutorService#submit, the results of the task execution were never examined which +led to the suppression of exceptions. (Andrey Bozhko) + * SOLR-11318: Introduce unit testing for AssertTool. (Eric Pugh, Jason Gerlowski) +* SOLR-17534: Introduce ClusterState.getCollectionNames, a convenience method (David Smiley) + +* SOLR-17535: Introduce ClusterState.collectionStream to replace getCollectionStates, getCollectionsMap, + and forEachCollection, which are now deprecated. (David Smiley) + +* SOLR-17545: Upgrade to Gradle 8.10 (Houston Putman) + +* SOLR-17504: CoreContainer calls UpdateHandler.commit when closing a read-only core (Bruno Roustant) + +* SOLR-17556: "home" and "data" directories used by Solr examples have been updated to align with documented best practices. (Eric Pugh, Houston Putman) + +* SOLR-17577: Remove "solr.indexfetcher.sotimeout" system property that was for optimizing replication tests. It was disabled, but not removed. (Eric Pugh) + +* SOLR-14680: NamedList: deprecating methods: forEachEntry, forEachKey, abortableForEachKey, abortableForEach, + asMap (no-arg only), get(key, default). Added getOrDefault. Deprecated the SimpleMap interface as well as the + entirety of the SolrJ package org.apache.solr.cluster.api, which wasn't used except for SimpleMap. (David Smiley) + +================== 9.7.1 ================== +Bug Fixes +--------------------- +* SOLR-17530: Metrics: Thew new Prometheus response writer wasn't detecting TLOG or PULL replicas properly. + (Matthew Biscocho) + ================== 9.7.0 ================== New Features --------------------- @@ -315,7 +481,7 @@ Other Changes * GITHUB#2454: Refactor preparePutOrPost method in HttpJdkSolrClient (Andy Webb) -* SOLR-16503: Use Jetty HTTP2 for SyncStrategy and PeerSyncWithLeader for "recovery" operations (Sanjay Dutt, David Smiley) +* SOLR-17290: Use Jetty HTTP2 for SyncStrategy and PeerSyncWithLeader for "recovery" operations (Sanjay Dutt, David Smiley) * SOLR-16796: Include cyclonedx SBOMs with maven artifacts (Arnout Engelen, Houston Putman, Kevin Risden) @@ -2643,6 +2809,18 @@ Bug Fixes * SOLR-16164: ConfigSet API returns error if untrusted user creates from _default configset (Eric Pugh, Kevin Risden) +================== 8.11.4 ================== + +Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. + +Bug Fixes +--------------------- +* SOLR-17168: Add netty-transport-classes-epoll dependency (Colvin Cowie) + +* SOLR-17417: Remove unnecessary code in PKIAuthPlugin and HttpSolrCall (Houston Putman, janhoy, Liu Huajin) + +* SOLR-17418: Streamline ConfigSet modification logic. (Houston Putman, Liu Huajin) + ================== 8.11.3 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/README.adoc b/solr/README.adoc index 61d0b8d96e6..8ddabdffa92 100644 --- a/solr/README.adoc +++ b/solr/README.adoc @@ -26,6 +26,9 @@ Benchmarking module for Solr. link:bin/[]:: Scripts to start up, manage and interact with Solr instances. +link:cross-dc-manager/[]:: +Contains a separate application to manage Cross-Datacenter indexing. + link:core/[]:: Base Solr code. diff --git a/solr/api/build.gradle b/solr/api/build.gradle index 1f8118116c6..adad0302602 100644 --- a/solr/api/build.gradle +++ b/solr/api/build.gradle @@ -16,8 +16,8 @@ */ plugins { - id 'io.swagger.core.v3.swagger-gradle-plugin' version '2.2.2' - id "org.openapi.generator" version "6.0.1" + alias(libs.plugins.swagger3.core) + alias(libs.plugins.openapi.generator) } apply plugin: 'java-library' @@ -58,18 +58,18 @@ resolve { } dependencies { - runtimeOnly 'org.slf4j:slf4j-api' + runtimeOnly libs.slf4j.api - implementation 'jakarta.ws.rs:jakarta.ws.rs-api' - implementation 'com.fasterxml.jackson.core:jackson-annotations' - api 'io.swagger.core.v3:swagger-annotations-jakarta' - implementation 'org.semver4j:semver4j' + implementation libs.jakarta.ws.rsapi + implementation libs.fasterxml.jackson.core.annotations + api libs.swagger3.annotations.jakarta + implementation libs.semver4j.semver4j testImplementation project(':solr:test-framework') testImplementation project(':solr:api') - testImplementation 'org.apache.lucene:lucene-test-framework' + testImplementation libs.apache.lucene.testframework - swaggerBuild 'io.swagger.core.v3:swagger-jaxrs2-jakarta' + swaggerBuild libs.swagger3.jaxrs2.jakarta } // Non-Java client generation tasks below: diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/ClusterPropertyApis.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/ClusterPropertyApis.java new file mode 100644 index 00000000000..5c75eec0c60 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/ClusterPropertyApis.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.endpoint; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.parameters.RequestBody; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import java.util.Map; +import org.apache.solr.client.api.model.ListClusterPropertiesResponse; +import org.apache.solr.client.api.model.SetClusterPropertyRequestBody; +import org.apache.solr.client.api.model.SolrJerseyResponse; + +/** Definitions for v2 JAX-RS cluster properties APIs. */ +@Path("/cluster/properties") +public interface ClusterPropertyApis { + @GET + @Operation( + summary = "List all cluster properties in this Solr cluster.", + tags = {"cluster-properties"}) + ListClusterPropertiesResponse listClusterProperties(); + + @GET + @Path("/{propertyName}") + @Operation( + summary = "Get a cluster property in this Solr cluster.", + tags = {"cluster-properties"}) + SolrJerseyResponse getClusterProperty( + @Parameter(description = "The name of the property being retrieved.", required = true) + @PathParam("propertyName") + String propertyName); + + @PUT + @Path("/{propertyName}") + @Operation( + summary = "Set a single new or existing cluster property in this Solr cluster.", + tags = {"cluster-properties"}) + SolrJerseyResponse createOrUpdateClusterProperty( + @Parameter(description = "The name of the property being set.", required = true) + @PathParam("propertyName") + String propertyName, + @RequestBody(description = "Value to set for the property", required = true) + SetClusterPropertyRequestBody requestBody) + throws Exception; + + @PUT + @Operation( + summary = "Set nested cluster properties in this Solr cluster.", + tags = {"cluster-properties"}) + SolrJerseyResponse createOrUpdateNestedClusterProperty( + @RequestBody(description = "Property/ies to be set", required = true) + Map propertyValuesByName) + throws Exception; + + @DELETE + @Path("/{propertyName}") + @Operation( + summary = "Delete a cluster property in this Solr cluster.", + tags = {"cluster-properties"}) + SolrJerseyResponse deleteClusterProperty( + @Parameter(description = "The name of the property being deleted.", required = true) + @PathParam("propertyName") + String propertyName); +} diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionBackupApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionBackupApi.java new file mode 100644 index 00000000000..5c6c9d3bbbe --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionBackupApi.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.endpoint; + +import io.swagger.v3.oas.annotations.Operation; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import org.apache.solr.client.api.model.CreateCollectionBackupRequestBody; +import org.apache.solr.client.api.model.RestoreCollectionRequestBody; +import org.apache.solr.client.api.model.SolrJerseyResponse; +import org.apache.solr.client.api.model.SubResponseAccumulatingJerseyResponse; + +/** + * V2 API definition for creating a new "backup" of a specified collection + * + *

This API is analogous to the v1 /admin/collections?action=BACKUP command. + */ +public interface CollectionBackupApi { + + @Path("/collections/{collectionName}/backups/{backupName}/versions") + interface Create { + @POST + @Operation( + summary = "Creates a new backup point for a collection", + tags = {"collection-backups"}) + SolrJerseyResponse createCollectionBackup( + @PathParam("collectionName") String collectionName, + @PathParam("backupName") String backupName, + CreateCollectionBackupRequestBody requestBody) + throws Exception; + } + + @Path("/backups/{backupName}/restore") + interface Restore { + @POST + @Operation( + summary = "Restores an existing backup point to a (potentially new) collection.", + tags = {"collection-backups"}) + SubResponseAccumulatingJerseyResponse restoreCollection( + @PathParam("backupName") String backupName, RestoreCollectionRequestBody requestBody) + throws Exception; + } +} diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionSnapshotApis.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionSnapshotApis.java new file mode 100644 index 00000000000..21c1dc44224 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionSnapshotApis.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.endpoint; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.parameters.RequestBody; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import org.apache.solr.client.api.model.CreateCollectionSnapshotRequestBody; +import org.apache.solr.client.api.model.CreateCollectionSnapshotResponse; +import org.apache.solr.client.api.model.DeleteCollectionSnapshotResponse; +import org.apache.solr.client.api.model.ListCollectionSnapshotsResponse; + +/** V2 API definitions for creating, accessing, and deleting collection-level snapshots. */ +public interface CollectionSnapshotApis { + + @Path("/collections/{collName}/snapshots") + interface Create { + @POST + @Path("/{snapshotName}") + @Operation( + summary = "Creates a new snapshot of the specified collection.", + tags = {"collection-snapshots"}) + CreateCollectionSnapshotResponse createCollectionSnapshot( + @Parameter(description = "The name of the collection.", required = true) + @PathParam("collName") + String collName, + @Parameter(description = "The name of the snapshot to be created.", required = true) + @PathParam("snapshotName") + String snapshotName, + @RequestBody(description = "Contains user provided parameters", required = true) + CreateCollectionSnapshotRequestBody requestBody) + throws Exception; + } + + @Path("/collections/{collName}/snapshots/{snapshotName}") + interface Delete { + @DELETE + @Operation( + summary = "Delete an existing collection-snapshot by name.", + tags = {"collection-snapshots"}) + DeleteCollectionSnapshotResponse deleteCollectionSnapshot( + @Parameter(description = "The name of the collection.", required = true) + @PathParam("collName") + String collName, + @Parameter(description = "The name of the snapshot to be deleted.", required = true) + @PathParam("snapshotName") + String snapshotName, + @Parameter(description = "A flag that treats the collName parameter as a collection alias.") + @DefaultValue("false") + @QueryParam("followAliases") + boolean followAliases, + @QueryParam("async") String asyncId) + throws Exception; + } + + @Path("/collections/{collName}/snapshots") + interface List { + @GET + @Operation( + summary = "List the snapshots available for a specified collection.", + tags = {"collection-snapshots"}) + ListCollectionSnapshotsResponse listSnapshots( + @Parameter(description = "The name of the collection.", required = true) + @PathParam("collName") + String collName) + throws Exception; + } +} diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionStatusApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionStatusApi.java new file mode 100644 index 00000000000..d07982cab76 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionStatusApi.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.endpoint; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import org.apache.solr.client.api.model.CollectionStatusResponse; + +/** + * V2 API definition for fetching collection metadata + * + *

This API (GET /v2/collections/collectionName) is analogous to the v1 + * /admin/collections?action=COLSTATUS command. + */ +@Path("/collections/{collectionName}") +public interface CollectionStatusApi { + + // TODO Query parameters currently match those offered by the v1 + // /admin/collections?action=COLSTATUS. Should param names be updated/clarified? + @GET + @Operation( + summary = "Fetches metadata about the specified collection", + tags = {"collections"}) + CollectionStatusResponse getCollectionStatus( + @Parameter(description = "The name of the collection return metadata for", required = true) + @PathParam("collectionName") + String collectionName, + @Parameter(description = SegmentsApi.CORE_INFO_PARAM_DESC) @QueryParam("coreInfo") + Boolean coreInfo, + @Parameter( + description = + "Boolean flag to include metadata and statistics about the segments used by each shard leader. Implicitly set to true by 'fieldInfo' and 'sizeInfo'") + @QueryParam("segments") + Boolean segments, + @Parameter( + description = + SegmentsApi.FIELD_INFO_PARAM_DESC + + " Implicitly sets the 'segments' flag to 'true'") + @QueryParam("fieldInfo") + Boolean fieldInfo, + @Parameter(description = SegmentsApi.RAW_SIZE_PARAM_DESC) @QueryParam("rawSize") + Boolean rawSize, + @Parameter(description = SegmentsApi.RAW_SIZE_SUMMARY_DESC) @QueryParam("rawSizeSummary") + Boolean rawSizeSummary, + @Parameter(description = SegmentsApi.RAW_SIZE_DETAILS_DESC) @QueryParam("rawSizeDetails") + Boolean rawSizeDetails, + @Parameter(description = SegmentsApi.RAW_SIZE_SAMPLING_PERCENT_DESC) + @QueryParam("rawSizeSamplingPercent") + Float rawSizeSamplingPercent, + @Parameter( + description = + SegmentsApi.SIZE_INFO_PARAM_DESC + + ". Implicitly sets the 'segment' flag to 'true'") + @QueryParam("sizeInfo") + Boolean sizeInfo) + throws Exception; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/ConfigsetsApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/ConfigsetsApi.java new file mode 100644 index 00000000000..9961b4c9f28 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/ConfigsetsApi.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.endpoint; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.parameters.RequestBody; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import java.io.IOException; +import java.io.InputStream; +import org.apache.solr.client.api.model.CloneConfigsetRequestBody; +import org.apache.solr.client.api.model.ListConfigsetsResponse; +import org.apache.solr.client.api.model.SolrJerseyResponse; + +public interface ConfigsetsApi { + + /** V2 API definition for listing the configsets available to this SolrCloud cluster. */ + @Path("/configsets") + interface List { + @GET + @Operation( + summary = "List the configsets available to Solr.", + tags = {"configsets"}) + ListConfigsetsResponse listConfigSet() throws Exception; + } + + /** + * V2 API definition for creating a (possibly slightly modified) copy of an existing configset + * + *

Equivalent to the existing v1 API /admin/configs?action=CREATE + */ + @Path("/configsets") + interface Clone { + @POST + @Operation( + summary = "Create a new configset modeled on an existing one.", + tags = {"configsets"}) + SolrJerseyResponse cloneExistingConfigSet(CloneConfigsetRequestBody requestBody) + throws Exception; + } + + /** + * V2 API definition for deleting an existing configset. + * + *

Equivalent to the existing v1 API /admin/configs?action=DELETE + */ + @Path("/configsets/{configSetName}") + interface Delete { + @DELETE + @Operation(summary = "Delete an existing configset.", tags = "configsets") + SolrJerseyResponse deleteConfigSet(@PathParam("configSetName") String configSetName) + throws Exception; + } + + /** + * V2 API definitions for uploading a configset, in whole or part. + * + *

Equivalent to the existing v1 API /admin/configs?action=UPLOAD + */ + @Path("/configsets/{configSetName}") + interface Upload { + @PUT + @Operation(summary = "Create a new configset.", tags = "configsets") + SolrJerseyResponse uploadConfigSet( + @PathParam("configSetName") String configSetName, + @QueryParam("overwrite") Boolean overwrite, + @QueryParam("cleanup") Boolean cleanup, + @RequestBody(required = true) InputStream requestBody) + throws IOException; + + @PUT + @Path("{filePath:.+}") + SolrJerseyResponse uploadConfigSetFile( + @PathParam("configSetName") String configSetName, + @PathParam("filePath") String filePath, + @QueryParam("overwrite") Boolean overwrite, + @QueryParam("cleanup") Boolean cleanup, + @RequestBody(required = true) InputStream requestBody) + throws IOException; + } +} diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/ListConfigsetsApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/CreateAliasApi.java similarity index 71% rename from solr/api/src/java/org/apache/solr/client/api/endpoint/ListConfigsetsApi.java rename to solr/api/src/java/org/apache/solr/client/api/endpoint/CreateAliasApi.java index 7e0cf620b7f..78b9b4376c0 100644 --- a/solr/api/src/java/org/apache/solr/client/api/endpoint/ListConfigsetsApi.java +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/CreateAliasApi.java @@ -17,16 +17,16 @@ package org.apache.solr.client.api.endpoint; import io.swagger.v3.oas.annotations.Operation; -import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; import jakarta.ws.rs.Path; -import org.apache.solr.client.api.model.ListConfigsetsResponse; +import org.apache.solr.client.api.model.CreateAliasRequestBody; +import org.apache.solr.client.api.model.SolrJerseyResponse; -/** V2 API definition for listing configsets. */ -@Path("/cluster/configs") -public interface ListConfigsetsApi { - @GET +@Path("/aliases") +public interface CreateAliasApi { + @POST @Operation( - summary = "List the configsets available to Solr.", - tags = {"configsets"}) - ListConfigsetsResponse listConfigSet() throws Exception; + summary = "Create a traditional or 'routed' alias", + tags = {"aliases"}) + SolrJerseyResponse createAlias(CreateAliasRequestBody requestBody) throws Exception; } diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/CreateCollectionSnapshotApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/CreateCollectionSnapshotApi.java deleted file mode 100644 index 70240a02e65..00000000000 --- a/solr/api/src/java/org/apache/solr/client/api/endpoint/CreateCollectionSnapshotApi.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.client.api.endpoint; - -import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.parameters.RequestBody; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import org.apache.solr.client.api.model.CreateCollectionSnapshotRequestBody; -import org.apache.solr.client.api.model.CreateCollectionSnapshotResponse; - -/** V2 API definition for creating a collection-level snapshot. */ -@Path("/collections/{collName}/snapshots") -public interface CreateCollectionSnapshotApi { - @POST - @Path("/{snapshotName}") - @Operation( - summary = "Creates a new snapshot of the specified collection.", - tags = {"collection-snapshots"}) - CreateCollectionSnapshotResponse createCollectionSnapshot( - @Parameter(description = "The name of the collection.", required = true) - @PathParam("collName") - String collName, - @Parameter(description = "The name of the snapshot to be created.", required = true) - @PathParam("snapshotName") - String snapshotName, - @RequestBody(description = "Contains user provided parameters", required = true) - CreateCollectionSnapshotRequestBody requestBody) - throws Exception; -} diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/DeleteCollectionSnapshotApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/DeleteCollectionSnapshotApi.java deleted file mode 100644 index 8fed7eb4ff9..00000000000 --- a/solr/api/src/java/org/apache/solr/client/api/endpoint/DeleteCollectionSnapshotApi.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.solr.client.api.endpoint; - -import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.QueryParam; -import org.apache.solr.client.api.model.DeleteCollectionSnapshotResponse; - -@Path("/collections/{collName}/snapshots/{snapshotName}") -public interface DeleteCollectionSnapshotApi { - - /** This API is analogous to V1's (POST /solr/admin/collections?action=DELETESNAPSHOT) */ - @DELETE - @Operation( - summary = "Delete an existing collection-snapshot by name.", - tags = {"collection-snapshots"}) - DeleteCollectionSnapshotResponse deleteCollectionSnapshot( - @Parameter(description = "The name of the collection.", required = true) - @PathParam("collName") - String collName, - @Parameter(description = "The name of the snapshot to be deleted.", required = true) - @PathParam("snapshotName") - String snapshotName, - @Parameter(description = "A flag that treats the collName parameter as a collection alias.") - @DefaultValue("false") - @QueryParam("followAliases") - boolean followAliases, - @QueryParam("async") String asyncId) - throws Exception; -} diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/GetSchemaApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/GetSchemaApi.java index 8cec1d02f7a..119bf9d19ce 100644 --- a/solr/api/src/java/org/apache/solr/client/api/endpoint/GetSchemaApi.java +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/GetSchemaApi.java @@ -22,8 +22,16 @@ import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; import jakarta.ws.rs.QueryParam; +import org.apache.solr.client.api.model.SchemaGetDynamicFieldInfoResponse; +import org.apache.solr.client.api.model.SchemaGetFieldInfoResponse; +import org.apache.solr.client.api.model.SchemaGetFieldTypeInfoResponse; import org.apache.solr.client.api.model.SchemaInfoResponse; +import org.apache.solr.client.api.model.SchemaListCopyFieldsResponse; +import org.apache.solr.client.api.model.SchemaListDynamicFieldsResponse; +import org.apache.solr.client.api.model.SchemaListFieldTypesResponse; +import org.apache.solr.client.api.model.SchemaListFieldsResponse; import org.apache.solr.client.api.model.SchemaNameResponse; import org.apache.solr.client.api.model.SchemaSimilarityResponse; import org.apache.solr.client.api.model.SchemaUniqueKeyResponse; @@ -34,6 +42,67 @@ @Path(INDEX_PATH_PREFIX + "/schema") public interface GetSchemaApi { + @Path(INDEX_PATH_PREFIX + "/schema") + interface Fields { + + @GET + @Path("/fields") + @StoreApiParameters + @Operation( + summary = "List all non-dynamic fields in the schema of the specified core or collection", + tags = {"schema"}) + SchemaListFieldsResponse listSchemaFields(); + + @GET + @Path("/fields/{fieldName}") + @StoreApiParameters + @Operation( + summary = "Get detailed info about a single non-dynamic field", + tags = {"schema"}) + SchemaGetFieldInfoResponse getFieldInfo(@PathParam("fieldName") String fieldName); + + @GET + @Path("/copyfields") + @StoreApiParameters + @Operation( + summary = "List all copy-fields in the schema of the specified core or collection", + tags = {"schema"}) + SchemaListCopyFieldsResponse listCopyFields(); + + @GET + @Path("/dynamicfields") + @StoreApiParameters + @Operation( + summary = "List all dynamic-fields in the schema of the specified core or collection", + tags = {"schema"}) + SchemaListDynamicFieldsResponse listDynamicFields(); + + @GET + @Path("/dynamicfields/{fieldName}") + @StoreApiParameters + @Operation( + summary = "Get detailed info about a single dynamic field", + tags = {"schema"}) + SchemaGetDynamicFieldInfoResponse getDynamicFieldInfo(@PathParam("fieldName") String fieldName); + + @GET + @Path("/fieldtypes") + @StoreApiParameters + @Operation( + summary = "List all field types in the schema used by the specified core or collection", + tags = {"schema"}) + SchemaListFieldTypesResponse listSchemaFieldTypes(); + + @GET + @Path("/fieldtypes/{fieldTypeName}") + @StoreApiParameters + @Operation( + summary = "Get detailed info about a single field type", + tags = {"schema"}) + SchemaGetFieldTypeInfoResponse getFieldTypeInfo( + @PathParam("fieldTypeName") String fieldTypeName); + } + @GET @StoreApiParameters @Operation( diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/NodeLoggingApis.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/NodeLoggingApis.java new file mode 100644 index 00000000000..c5b5cd95807 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/NodeLoggingApis.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.endpoint; + +import io.swagger.v3.oas.annotations.Operation; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.QueryParam; +import java.util.List; +import org.apache.solr.client.api.model.ListLevelsResponse; +import org.apache.solr.client.api.model.LogLevelChange; +import org.apache.solr.client.api.model.LogMessagesResponse; +import org.apache.solr.client.api.model.LoggingResponse; +import org.apache.solr.client.api.model.SetThresholdRequestBody; + +@Path("/node/logging") +public interface NodeLoggingApis { + + @GET + @Path("/levels") + @Operation( + summary = "List all log-levels for the target node.", + tags = {"logging"}) + ListLevelsResponse listAllLoggersAndLevels(); + + @PUT + @Path("/levels") + @Operation( + summary = "Set one or more logger levels on the target node.", + tags = {"logging"}) + LoggingResponse modifyLocalLogLevel(List requestBody); + + @GET + @Path("/messages") + @Operation( + summary = "Fetch recent log messages on the targeted node.", + tags = {"logging"}) + LogMessagesResponse fetchLocalLogMessages(@QueryParam("since") Long boundingTimeMillis); + + @PUT + @Path("/messages/threshold") + @Operation( + summary = "Set a threshold level for the targeted node's log message watcher.", + tags = {"logging"}) + LoggingResponse setMessageThreshold(SetThresholdRequestBody requestBody); +} diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/ReplicationApis.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/ReplicationApis.java new file mode 100644 index 00000000000..3fe5ac14f45 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/ReplicationApis.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.endpoint; + +import static org.apache.solr.client.api.util.Constants.OMIT_FROM_CODEGEN_PROPERTY; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.extensions.Extension; +import io.swagger.v3.oas.annotations.extensions.ExtensionProperty; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.StreamingOutput; +import java.io.IOException; +import org.apache.solr.client.api.model.FileListResponse; +import org.apache.solr.client.api.model.IndexVersionResponse; +import org.apache.solr.client.api.util.CoreApiParameters; + +@Path("/cores/{coreName}/replication") +public interface ReplicationApis { + + @GET + @CoreApiParameters + @Path("/indexversion") + @Operation( + summary = "Return the index version of the specified core.", + tags = {"replication"}) + IndexVersionResponse fetchIndexVersion() throws IOException; + + @GET + @CoreApiParameters + @Path("/files") + @Operation( + summary = "Return the list of index file that make up the specified core.", + tags = {"replication"}) + FileListResponse fetchFileList( + @Parameter(description = "The generation number of the index", required = true) + @QueryParam("generation") + long gen); + + @GET + @CoreApiParameters + @Operation( + summary = "Get a stream of a specific file path of a core", + tags = {"core-replication"}, + extensions = { // TODO Remove as a part of SOLR-17562 + @Extension( + properties = {@ExtensionProperty(name = OMIT_FROM_CODEGEN_PROPERTY, value = "true")}) + }) + @Path("/files/{filePath}") + StreamingOutput fetchFile( + @PathParam("filePath") String filePath, + @Parameter( + description = + "Directory type for specific filePath (cf or tlogFile). Defaults to Lucene index (file) directory if empty", + required = true) + @QueryParam("dirType") + String dirType, + @Parameter(description = "Output stream read/write offset", required = false) + @QueryParam("offset") + String offset, + @Parameter(required = false) @QueryParam("len") String len, + @Parameter(description = "Compress file output", required = false) + @QueryParam("compression") + @DefaultValue("false") + Boolean compression, + @Parameter(description = "Write checksum with output stream", required = false) + @QueryParam("checksum") + @DefaultValue("false") + Boolean checksum, + @Parameter( + description = "Limit data write per seconds. Defaults to no throttling", + required = false) + @QueryParam("maxWriteMBPerSec") + double maxWriteMBPerSec, + @Parameter(description = "The generation number of the index", required = false) + @QueryParam("generation") + Long gen); +} diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/CreateCollectionBackupApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/ReplicationBackupApis.java similarity index 56% rename from solr/api/src/java/org/apache/solr/client/api/endpoint/CreateCollectionBackupApi.java rename to solr/api/src/java/org/apache/solr/client/api/endpoint/ReplicationBackupApis.java index 3e08bad6d45..ef28f902ecc 100644 --- a/solr/api/src/java/org/apache/solr/client/api/endpoint/CreateCollectionBackupApi.java +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/ReplicationBackupApis.java @@ -17,27 +17,27 @@ package org.apache.solr.client.api.endpoint; import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.parameters.RequestBody; import jakarta.ws.rs.POST; import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import org.apache.solr.client.api.model.CreateCollectionBackupRequestBody; -import org.apache.solr.client.api.model.SolrJerseyResponse; +import org.apache.solr.client.api.model.ReplicationBackupRequestBody; +import org.apache.solr.client.api.model.ReplicationBackupResponse; +import org.apache.solr.client.api.util.CoreApiParameters; /** - * V2 API definition for creating a new "backup" of a specified collection + * V2 endpoint for Backup API used for User-Managed clusters and Single-Node Installation. * - *

This API is analogous to the v1 /admin/collections?action=BACKUP command. + * @see ReplicationApis */ -@Path("/collections/{collectionName}/backups/{backupName}/versions") -public interface CreateCollectionBackupApi { +@Path("/cores/{coreName}/replication") +public interface ReplicationBackupApis { @POST + @CoreApiParameters + @Path("/backups") @Operation( - summary = "Creates a new backup point for a collection", - tags = {"collection-backups"}) - SolrJerseyResponse createCollectionBackup( - @PathParam("collectionName") String collectionName, - @PathParam("backupName") String backupName, - CreateCollectionBackupRequestBody requestBody) - throws Exception; + summary = "Create a backup of a single core using Solr's 'Replication Handler'", + tags = {"replication-backups"}) + ReplicationBackupResponse createBackup( + @RequestBody ReplicationBackupRequestBody backupReplicationPayload); } diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/SegmentsApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/SegmentsApi.java new file mode 100644 index 00000000000..1f6f089642e --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/SegmentsApi.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.endpoint; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.QueryParam; +import org.apache.solr.client.api.model.GetSegmentDataResponse; +import org.apache.solr.client.api.util.CoreApiParameters; + +/** + * V2 API definition for fetching metadata about a core's segments + * + *

This API (GET /v2/cores/coreName/segments) is analogous to the v1 + * /solr/coreName/admin/segments API + */ +@Path("/cores/{coreName}/segments") +public interface SegmentsApi { + + String CORE_INFO_PARAM_DESC = + "Boolean flag to include metadata (e.g. index an data directories, IndexWriter configuration, etc.) about each shard leader's core"; + String FIELD_INFO_PARAM_DESC = + "Boolean flag to include statistics about the indexed fields present on each shard leader."; + String RAW_SIZE_PARAM_DESC = + "Boolean flag to include simple estimates of the disk size taken up by each field (e.g. \"id\", \"_version_\") and by each index data structure (e.g. 'storedFields', 'docValues_numeric')."; + String RAW_SIZE_SUMMARY_DESC = + "Boolean flag to include more involved estimates of the disk size taken up by index data structures, on a per-field basis (e.g. how much data does the \"id\" field contribute to 'storedField' index files). More detail than 'rawSize', less detail than 'rawSizeDetails'."; + String RAW_SIZE_DETAILS_DESC = + "Boolean flag to include detailed statistics about the disk size taken up by various fields and data structures. More detail than 'rawSize' and 'rawSizeSummary'."; + String RAW_SIZE_SAMPLING_PERCENT_DESC = + "Percentage (between 0 and 100) of data to read when estimating index size and statistics. Defaults to 5.0 (i.e. 5%)."; + String SIZE_INFO_PARAM_DESC = + "Boolean flag to include information about the largest index files for each Lucene segment."; + + @GET + @CoreApiParameters + @Operation( + summary = "Fetches metadata about the segments in use by the specified core", + tags = {"segments"}) + GetSegmentDataResponse getSegmentData( + @Parameter(description = CORE_INFO_PARAM_DESC) @QueryParam("coreInfo") Boolean coreInfo, + @Parameter(description = FIELD_INFO_PARAM_DESC) @QueryParam("fieldInfo") Boolean fieldInfo, + @Parameter(description = RAW_SIZE_PARAM_DESC) @QueryParam("rawSize") Boolean rawSize, + @Parameter(description = RAW_SIZE_SUMMARY_DESC) @QueryParam("rawSizeSummary") + Boolean rawSizeSummary, + @Parameter(description = RAW_SIZE_DETAILS_DESC) @QueryParam("rawSizeDetails") + Boolean rawSizeDetails, + @Parameter(description = RAW_SIZE_SAMPLING_PERCENT_DESC) @QueryParam("rawSizeSamplingPercent") + Float rawSizeSamplingPercent, + @Parameter(description = SIZE_INFO_PARAM_DESC) @QueryParam("sizeInfo") Boolean sizeInfo) + throws Exception; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/ZooKeeperReadApis.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/ZooKeeperReadApis.java new file mode 100644 index 00000000000..f41e8de3d63 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/ZooKeeperReadApis.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.endpoint; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.MediaType; +import org.apache.solr.client.api.model.ZooKeeperFileResponse; +import org.apache.solr.client.api.model.ZooKeeperListChildrenResponse; + +/** V2 API definitions for Solr's ZooKeeper ready-proxy endpoint */ +@Path("/cluster/zookeeper/") +public interface ZooKeeperReadApis { + + @GET + @Path("/data{zkPath:.+}") + @Operation( + summary = "Return the data stored in a specified ZooKeeper node", + tags = {"zookeeper-read"}) + @Produces({"application/vnd.apache.solr.raw", MediaType.APPLICATION_JSON}) + ZooKeeperFileResponse readNode( + @Parameter(description = "The path of the node to read from ZooKeeper") @PathParam("zkPath") + String zkPath); + + // The 'Operation' annotation is omitted intentionally here to ensure this API isn't picked up in + // the OpenAPI spec and consequent code-generation. The server side needs this method to be + // different from 'readNode' above for security reasons (more privileges are needed to access + // security.json), but it's the same logical API expressed by the 'readNode' signature above. + @GET + @Path("/data/security.json") + @Produces({"application/vnd.apache.solr.raw", MediaType.APPLICATION_JSON}) + ZooKeeperFileResponse readSecurityJsonNode(); + + @GET + @Path("/children{zkPath:.*}") + @Produces({"application/json", "application/javabin"}) + @Operation( + summary = "List and stat all children of a specified ZooKeeper node", + tags = {"zookeeper-read"}) + ZooKeeperListChildrenResponse listNodes( + @Parameter(description = "The path of the ZooKeeper node to stat and list children of") + @PathParam("zkPath") + String zkPath, + @Parameter( + description = + "Controls whether stat information for child nodes is included in the response. 'true' by default.") + @QueryParam("children") + Boolean includeChildren) + throws Exception; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/CategoryRoutedAliasProperties.java b/solr/api/src/java/org/apache/solr/client/api/model/CategoryRoutedAliasProperties.java new file mode 100644 index 00000000000..c3882e0b8a3 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/CategoryRoutedAliasProperties.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class CategoryRoutedAliasProperties extends RoutedAliasProperties { + @JsonProperty("maxCardinality") + public Long maxCardinality; + + @JsonProperty("mustMatch") + public String mustMatch; +} diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/beans/CreateConfigPayload.java b/solr/api/src/java/org/apache/solr/client/api/model/CloneConfigsetRequestBody.java similarity index 70% rename from solr/solrj/src/java/org/apache/solr/client/solrj/request/beans/CreateConfigPayload.java rename to solr/api/src/java/org/apache/solr/client/api/model/CloneConfigsetRequestBody.java index 5f7f2e6687d..14e22225986 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/beans/CreateConfigPayload.java +++ b/solr/api/src/java/org/apache/solr/client/api/model/CloneConfigsetRequestBody.java @@ -14,19 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.solr.client.solrj.request.beans; +package org.apache.solr.client.api.model; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Map; -import org.apache.solr.common.annotation.JsonProperty; -import org.apache.solr.common.util.ReflectMapWriter; -public class CreateConfigPayload implements ReflectMapWriter { - public static final String DEFAULT_CONFIGSET = - "_default"; // TODO Better location for this in SolrJ? +/** Request body for ConfigsetsApi.Clone */ +public class CloneConfigsetRequestBody { + public static final String DEFAULT_CONFIGSET = "_default"; @JsonProperty(required = true) public String name; - @JsonProperty public String baseConfigSet = DEFAULT_CONFIGSET; + @JsonProperty(defaultValue = DEFAULT_CONFIGSET) + public String baseConfigSet; + @JsonProperty public Map properties; } diff --git a/solr/api/src/java/org/apache/solr/client/api/model/ClusterPropertyDetails.java b/solr/api/src/java/org/apache/solr/client/api/model/ClusterPropertyDetails.java new file mode 100644 index 00000000000..9619e96ac1e --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/ClusterPropertyDetails.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.media.Schema; + +public class ClusterPropertyDetails { + @JsonProperty("name") + @Schema(description = "The name of the cluster property.") + public String name; + + @JsonProperty("value") + @Schema(description = "The value of the cluster property.") + public Object value; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/CollectionStatusResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/CollectionStatusResponse.java new file mode 100644 index 00000000000..82109edb915 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/CollectionStatusResponse.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Response of the CollectionStatusApi.getCollectionStatus() API + * + *

Note that the corresponding v1 API has a slightly different response format. Users should not + * attempt to convert a v1 response into this type. + */ +public class CollectionStatusResponse extends SolrJerseyResponse { + + @JsonProperty public String name; + @JsonProperty public Integer znodeVersion; + + // TODO - consider 'Instant' once SOLR-17608 is finished + @JsonProperty + @JsonFormat(shape = JsonFormat.Shape.NUMBER) + public Date creationTimeMillis; + + @JsonProperty public CollectionMetadata properties; + @JsonProperty public Integer activeShards; + @JsonProperty public Integer inactiveShards; + @JsonProperty public List schemaNonCompliant; + + @JsonProperty public Map shards; + + // Always present in response + public static class CollectionMetadata { + @JsonProperty public String configName; + @JsonProperty public Integer nrtReplicas; + @JsonProperty public Integer pullReplicas; + @JsonProperty public Integer tlogReplicas; + @JsonProperty public Map router; + @JsonProperty public Integer replicationFactor; + + private Map unknownFields = new HashMap<>(); + + @JsonAnyGetter + public Map unknownProperties() { + return unknownFields; + } + + @JsonAnySetter + public void setUnknownProperty(String field, Object value) { + unknownFields.put(field, value); + } + } + + // Always present in response + public static class ShardMetadata { + @JsonProperty public String state; // TODO Make this an enum? + @JsonProperty public String range; + @JsonProperty public ReplicaSummary replicas; + @JsonProperty public LeaderSummary leader; + } + + // Always present in response + public static class ReplicaSummary { + @JsonProperty public Integer total; + @JsonProperty public Integer active; + @JsonProperty public Integer down; + @JsonProperty public Integer recovering; + + @JsonProperty("recovery_failed") + public Integer recoveryFailed; + } + + // Always present in response unless otherwise specified + public static class LeaderSummary { + @JsonProperty public String coreNode; + @JsonProperty public String core; + @JsonProperty public Boolean leader; + + @JsonProperty("node_name") + public String nodeName; + + @JsonProperty("base_url") + public String baseUrl; + + @JsonProperty public String state; // TODO Make this an enum? + @JsonProperty public String type; // TODO Make this an enum? + + @JsonProperty("force_set_state") + public Boolean forceSetState; + + // Present with coreInfo=true || sizeInfo=true unless otherwise specified + @JsonProperty public SegmentInfo segInfos; + + private Map unknownFields = new HashMap<>(); + + @JsonAnyGetter + public Map unknownProperties() { + return unknownFields; + } + + @JsonAnySetter + public void setUnknownProperty(String field, Object value) { + unknownFields.put(field, value); + } + } + + // Present with segments=true || coreInfo=true || sizeInfo=true || fieldInfo=true unless otherwise + // specified + + /** + * Same properties as {@link GetSegmentDataResponse}, but uses a different class to avoid + * inheriting "responseHeader", etc. + */ + public static class SegmentInfo { + @JsonProperty public GetSegmentDataResponse.SegmentSummary info; + + @JsonProperty public Map runningMerges; + + // Present with segments=true || sizeInfo=true || fieldInfo=true + @JsonProperty public Map segments; + + // Present with rawSize=true + @JsonProperty public GetSegmentDataResponse.RawSize rawSize; + + // Present only with fieldInfo=true + @JsonProperty public List fieldInfoLegend; + } +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/CreateAliasRequestBody.java b/solr/api/src/java/org/apache/solr/client/api/model/CreateAliasRequestBody.java new file mode 100644 index 00000000000..f4fee3a2d39 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/CreateAliasRequestBody.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.media.Schema; +import java.util.List; + +public class CreateAliasRequestBody { + @JsonProperty(required = true) + public String name; + + @JsonProperty("collections") + public List collections; + + @JsonProperty("async") + public String async; + + @JsonProperty("routers") + public List routers; + + @Schema( + description = + "Parameters to be used for any collections created by this alias. Only used for 'routed' aliases", + name = "collCreationParameters") + @JsonProperty("create-collection") + public CreateCollectionRequestBody collCreationParameters; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/DeleteCollectionSnapshotResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/DeleteCollectionSnapshotResponse.java index 569d4fbd096..905b0937f1d 100644 --- a/solr/api/src/java/org/apache/solr/client/api/model/DeleteCollectionSnapshotResponse.java +++ b/solr/api/src/java/org/apache/solr/client/api/model/DeleteCollectionSnapshotResponse.java @@ -21,12 +21,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.v3.oas.annotations.media.Schema; -import org.apache.solr.client.api.endpoint.DeleteCollectionSnapshotApi; -/** - * The Response for {@link DeleteCollectionSnapshotApi#deleteCollectionSnapshot(String, String, - * boolean, String)} - */ +/** The Response for {@link org.apache.solr.client.api.endpoint.CollectionSnapshotApis.Delete} */ public class DeleteCollectionSnapshotResponse extends AsyncJerseyResponse { @Schema(description = "The name of the collection.") @JsonProperty(COLLECTION) diff --git a/solr/core/src/java/org/apache/solr/jersey/ExperimentalResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/ExperimentalResponse.java similarity index 92% rename from solr/core/src/java/org/apache/solr/jersey/ExperimentalResponse.java rename to solr/api/src/java/org/apache/solr/client/api/model/ExperimentalResponse.java index 7824b7bc314..c460537e3fb 100644 --- a/solr/core/src/java/org/apache/solr/jersey/ExperimentalResponse.java +++ b/solr/api/src/java/org/apache/solr/client/api/model/ExperimentalResponse.java @@ -15,10 +15,9 @@ * limitations under the License. */ -package org.apache.solr.jersey; +package org.apache.solr.client.api.model; import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.solr.client.api.model.SolrJerseyResponse; /** * {@link SolrJerseyResponse} implementation with a warning field indicating that the format may diff --git a/solr/api/src/java/org/apache/solr/client/api/model/FileListResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/FileListResponse.java new file mode 100644 index 00000000000..d42260449a4 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/FileListResponse.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +/** Response body for the `GET /api/cores/coreName/replication/files` API */ +public class FileListResponse extends SolrJerseyResponse { + @JsonProperty("filelist") + public List fileList; + + @JsonProperty("confFiles") + public List confFiles; + + @JsonProperty("status") + public String status; + + @JsonProperty("message") + public String message; + + @JsonProperty("exception") + public Exception exception; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/FileMetaData.java b/solr/api/src/java/org/apache/solr/client/api/model/FileMetaData.java new file mode 100644 index 00000000000..79f4d659021 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/FileMetaData.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class FileMetaData { + + @JsonProperty("size") + public long size; + + @JsonProperty("name") + public String name; + + @JsonProperty("checksum") + public long checksum; + + @JsonProperty("alias") + public String alias; + + public FileMetaData() {} + + public FileMetaData(long size, String name, long checksum) { + this.size = size; + this.name = name; + this.checksum = checksum; + } +} diff --git a/dev-tools/solr-missing-doclet/build.gradle b/solr/api/src/java/org/apache/solr/client/api/model/GetClusterPropertyResponse.java similarity index 69% rename from dev-tools/solr-missing-doclet/build.gradle rename to solr/api/src/java/org/apache/solr/client/api/model/GetClusterPropertyResponse.java index e85f0a037e4..3ebdd74ef60 100644 --- a/dev-tools/solr-missing-doclet/build.gradle +++ b/solr/api/src/java/org/apache/solr/client/api/model/GetClusterPropertyResponse.java @@ -15,18 +15,13 @@ * limitations under the License. */ -plugins { - id 'java-library' -} - -version = "1.0.0-SNAPSHOT" -group = "org.apache.solr.tools" -description = 'Doclet-based javadoc validation' +package org.apache.solr.client.api.model; -sourceCompatibility = JavaVersion.VERSION_11 -targetCompatibility = JavaVersion.VERSION_11 +import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.media.Schema; -tasks.withType(JavaCompile) { - options.compilerArgs += ["--release", targetCompatibility.toString()] - options.encoding = "UTF-8" +public class GetClusterPropertyResponse extends SolrJerseyResponse { + @JsonProperty("clusterProperty") + @Schema(description = "The requested cluster property.") + public ClusterPropertyDetails clusterProperty; } diff --git a/solr/api/src/java/org/apache/solr/client/api/model/GetSegmentDataResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/GetSegmentDataResponse.java new file mode 100644 index 00000000000..b5e3714bfd3 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/GetSegmentDataResponse.java @@ -0,0 +1,191 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Response for {@link org.apache.solr.client.api.endpoint.SegmentsApi#getSegmentData(Boolean, + * Boolean, Boolean, Boolean, Boolean, Float, Boolean)} API + */ +public class GetSegmentDataResponse extends SolrJerseyResponse { + @JsonProperty public SegmentSummary info; + + @JsonProperty public Map runningMerges; + + @JsonProperty public Map segments; + + // Present only with fieldInfo=true + @JsonProperty public List fieldInfoLegend; + + // Present with rawSize=true + @JsonProperty public RawSize rawSize; + + // Always present in response + public static class SegmentSummary { + @JsonProperty public String minSegmentLuceneVersion; + @JsonProperty public String commitLuceneVersion; + @JsonProperty public Integer numSegments; + @JsonProperty public String segmentsFileName; + @JsonProperty public Integer totalMaxDoc; + // Typically keys are 'commitCommandVer' and 'commitTimeMSec' + @JsonProperty public Map userData; + + // Present for coreInfo=true only + @JsonProperty public CoreSummary core; + } + + // Always present in response, provided that the specified core has segments + public static class SingleSegmentData { + @JsonProperty public String name; + @JsonProperty public Integer delCount; + @JsonProperty public Integer softDelCount; + @JsonProperty public Boolean hasFieldUpdates; + @JsonProperty public Long sizeInBytes; + @JsonProperty public Integer size; + + // TODO - consider 'Instant' once SOLR-17608 is finished + @JsonProperty + @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "YYYY-MM-DD'T'hh:mm:ss.S'Z'") + public Date age; + + @JsonProperty public String source; + @JsonProperty public String version; + @JsonProperty public Integer createdVersionMajor; + @JsonProperty public String minVersion; + @JsonProperty public SegmentDiagnosticInfo diagnostics; + @JsonProperty public Map attributes; + // Only present when index-sorting is in use + @JsonProperty public String sort; + @JsonProperty public Boolean mergeCandidate; + + // Present only when fieldInfo=true + @JsonProperty public Map fields; + + // Present only when sizeInfo=true + @JsonProperty("largestFiles") + public Map largestFilesByName; + } + + // Always present in response, provided that the specified core has segments + public static class SegmentSingleFieldInfo { + @JsonProperty public String flags; + @JsonProperty public Integer docCount; + @JsonProperty public Long termCount; + @JsonProperty public Long sumDocFreq; + @JsonProperty public Long sumTotalTermFreq; + @JsonProperty public String schemaType; + @JsonProperty public Map nonCompliant; + } + + // Always present in response + public static class SegmentDiagnosticInfo { + @JsonProperty("os.version") + public String osVersion; + + @JsonProperty("lucene.version") + public String luceneVersion; + + @JsonProperty public String source; + + // TODO - consider 'Instant' once SOLR-17608 is finished + @JsonProperty + @JsonFormat(shape = JsonFormat.Shape.NUMBER) + public Date timestamp; + + @JsonProperty("java.runtime.version") + public String javaRuntimeVersion; + + @JsonProperty public String os; + + @JsonProperty("java.vendor") + public String javaVendor; + + @JsonProperty("os.arch") + public String osArchitecture; + + private Map additionalDiagnostics = new HashMap<>(); + + @JsonAnyGetter + public Map getAdditionalDiagnostics() { + return additionalDiagnostics; + } + + @JsonAnySetter + public void getAdditionalDiagnostics(String field, Object value) { + additionalDiagnostics.put(field, value); + } + } + + // Present with coreInfo=true unless otherwise specified + public static class CoreSummary { + @JsonProperty public String startTime; + @JsonProperty public String dataDir; + @JsonProperty public String indexDir; + @JsonProperty public Double sizeInGB; + @JsonProperty public IndexWriterConfigSummary indexWriterConfig; + } + + // Present with coreInfo=true unless otherwise specified + + /** A serializable representation of Lucene's "LiveIndexWriterConfig" */ + public static class IndexWriterConfigSummary { + @JsonProperty public String analyzer; + @JsonProperty public Double ramBufferSizeMB; + @JsonProperty public Integer maxBufferedDocs; + @JsonProperty public String mergedSegmentWarmer; + @JsonProperty public String delPolicy; + @JsonProperty public String commit; + @JsonProperty public String openMode; + @JsonProperty public String similarity; + @JsonProperty public String mergeScheduler; + @JsonProperty public String codec; + @JsonProperty public String infoStream; + @JsonProperty public String mergePolicy; + @JsonProperty public Boolean readerPooling; + @JsonProperty public Integer perThreadHardLimitMB; + @JsonProperty public Boolean useCompoundFile; + @JsonProperty public Boolean commitOnClose; + @JsonProperty public String indexSort; + @JsonProperty public Boolean checkPendingFlushOnUpdate; + @JsonProperty public String softDeletesField; + @JsonProperty public Long maxFullFlushMergeWaitMillis; + @JsonProperty public String leafSorter; + @JsonProperty public String eventListener; + @JsonProperty public String parentField; + @JsonProperty public String writer; + } + + // Present with rawSize=true unless otherwise specified + public static class RawSize { + @JsonProperty public Map fieldsBySize; + @JsonProperty public Map typesBySize; + + // Present with rawSizeDetails=true + @JsonProperty public Object details; + + // Present with rawSizeSummary=true + @JsonProperty public Map summary; + } +} diff --git a/gradle/validation/versions-props-sorted.gradle b/solr/api/src/java/org/apache/solr/client/api/model/IndexVersionResponse.java similarity index 56% rename from gradle/validation/versions-props-sorted.gradle rename to solr/api/src/java/org/apache/solr/client/api/model/IndexVersionResponse.java index 3282faf8391..0d5633ca6e8 100644 --- a/gradle/validation/versions-props-sorted.gradle +++ b/solr/api/src/java/org/apache/solr/client/api/model/IndexVersionResponse.java @@ -14,22 +14,27 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package org.apache.solr.client.api.model; -// This ensures 'versions.props' file is sorted lexicographically. +import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.stream.Collectors +/** Response body for the `GET /api/cores/coreName/replication/indexversion` API */ +public class IndexVersionResponse extends SolrJerseyResponse { -configure(rootProject) { - task versionsPropsAreSorted() { - doFirst { - def versionsProps = file('versions.props') - // remove # commented lines and blank lines - def lines = versionsProps.readLines("UTF-8").stream().filter(l -> !l.matches(/^(#.*|\s*)$/)).collect(Collectors.toList()) - def sorted = lines.toSorted() + @JsonProperty("indexversion") + public Long indexVersion; - if (!Objects.equals(lines, sorted)) { - throw new GradleException("${versionsProps} file is not sorted lexicographically.") - } - } + @JsonProperty("generation") + public Long generation; + + @JsonProperty("status") + public String status; + + public IndexVersionResponse() {} + + public IndexVersionResponse(Long indexVersion, Long generation, String status) { + this.indexVersion = indexVersion; + this.generation = generation; + this.status = status; } } diff --git a/solr/api/src/java/org/apache/solr/client/api/model/ListClusterPropertiesResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/ListClusterPropertiesResponse.java new file mode 100644 index 00000000000..46504fb23f1 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/ListClusterPropertiesResponse.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.media.Schema; +import java.util.List; + +public class ListClusterPropertiesResponse extends SolrJerseyResponse { + @JsonProperty("clusterProperties") + @Schema(description = "The list of cluster properties.") + public List clusterProperties; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/ListCollectionSnapshotsResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/ListCollectionSnapshotsResponse.java new file mode 100644 index 00000000000..962cfda7014 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/ListCollectionSnapshotsResponse.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.media.Schema; +import java.util.Map; + +/** The Response for the v2 "list collection snapshots" API */ +public class ListCollectionSnapshotsResponse extends AsyncJerseyResponse { + + // TODO In practice, map values are of the CollectionSnapshotMetaData type, but that cannot be + // used here until the class is made into more of a POJO and can join the 'api' module here + @Schema(description = "The snapshots for the collection.") + @JsonProperty("snapshots") + public Map snapshots; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/ListLevelsResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/ListLevelsResponse.java new file mode 100644 index 00000000000..b7ad63ac4c1 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/ListLevelsResponse.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +/** Response format for the 'GET /api/node/logging/levels' API. */ +public class ListLevelsResponse extends LoggingResponse { + @JsonProperty public List levels; + @JsonProperty public List loggers; +} diff --git a/solr/core/src/java/org/apache/solr/servlet/BaseSolrServlet.java b/solr/api/src/java/org/apache/solr/client/api/model/LogLevelChange.java similarity index 65% rename from solr/core/src/java/org/apache/solr/servlet/BaseSolrServlet.java rename to solr/api/src/java/org/apache/solr/client/api/model/LogLevelChange.java index 85aca08cd86..31443a5dc8d 100644 --- a/solr/core/src/java/org/apache/solr/servlet/BaseSolrServlet.java +++ b/solr/api/src/java/org/apache/solr/client/api/model/LogLevelChange.java @@ -14,19 +14,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.solr.servlet; +package org.apache.solr.client.api.model; -import javax.servlet.http.HttpServlet; +import com.fasterxml.jackson.annotation.JsonProperty; -/** - * All Solr servlets available to the user's webapp should extend this class and not {@link - * HttpServlet}. This class ensures that the logging configuration is correct before any Solr - * specific code is executed. - */ -@SuppressWarnings("serial") -abstract class BaseSolrServlet extends HttpServlet { +/** A user-requested modification in the level that a specified logger reports at. */ +public class LogLevelChange { + public LogLevelChange() {} - static { - CheckLoggingConfiguration.check(); + public LogLevelChange(String logger, String level) { + this.logger = logger; + this.level = level; } + + @JsonProperty public String logger; + @JsonProperty public String level; } diff --git a/solr/api/src/java/org/apache/solr/client/api/model/LogLevelInfo.java b/solr/api/src/java/org/apache/solr/client/api/model/LogLevelInfo.java new file mode 100644 index 00000000000..9bde4b1657a --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/LogLevelInfo.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Representation of a single logger and its current state. */ +public class LogLevelInfo { + public LogLevelInfo() {} + + public LogLevelInfo(String name, String level, boolean set) { + this.name = name; + this.level = level; + this.set = set; + } + + @JsonProperty("name") + public String name; + + @JsonProperty("level") + public String level; + + @JsonProperty("set") + public boolean set; +} diff --git a/solr/modules/hadoop-auth/src/test/org/apache/solr/security/hadoop/HadoopAuthFakeGroupMapping.java b/solr/api/src/java/org/apache/solr/client/api/model/LogMessageInfo.java similarity index 61% rename from solr/modules/hadoop-auth/src/test/org/apache/solr/security/hadoop/HadoopAuthFakeGroupMapping.java rename to solr/api/src/java/org/apache/solr/client/api/model/LogMessageInfo.java index 053f3e55061..7596cc28f68 100644 --- a/solr/modules/hadoop-auth/src/test/org/apache/solr/security/hadoop/HadoopAuthFakeGroupMapping.java +++ b/solr/api/src/java/org/apache/solr/client/api/model/LogMessageInfo.java @@ -14,22 +14,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.solr.security.hadoop; +package org.apache.solr.client.api.model; -import java.util.Collections; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; -import org.apache.hadoop.security.GroupMappingServiceProvider; -/** Fake mapping for Hadoop to prevent falling back to Shell group provider */ -public class HadoopAuthFakeGroupMapping implements GroupMappingServiceProvider { - @Override - public List getGroups(String user) { - return Collections.singletonList("supergroup"); - } +/** Metadata about the log messages returned by the 'GET /api/node/logging/messages' API */ +public class LogMessageInfo { + @JsonProperty("since") + public Long boundingTimeMillis; - @Override - public void cacheGroupsRefresh() {} + @JsonProperty public Boolean found; + @JsonProperty public List levels; - @Override - public void cacheGroupsAdd(List groups) {} + @JsonProperty("last") + public long lastRecordTimestampMillis; + + @JsonProperty public int buffer; + @JsonProperty public String threshold; } diff --git a/solr/api/src/java/org/apache/solr/client/api/model/LogMessagesResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/LogMessagesResponse.java new file mode 100644 index 00000000000..fb979afdfeb --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/LogMessagesResponse.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Response format for the 'GET /api/node/logging/messages' API. */ +public class LogMessagesResponse extends LoggingResponse { + @JsonProperty public LogMessageInfo info; + + // TODO Make this declaration more specific. Value on the server side is currently a + // SolrDocumentList, which cannot live in 'api' + @JsonProperty("history") + public Object docs; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/LoggingResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/LoggingResponse.java new file mode 100644 index 00000000000..1ba4be2c0d3 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/LoggingResponse.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Generic logging response that includes the name of the log watcher (e.g. "Log4j2") */ +public class LoggingResponse extends SolrJerseyResponse { + @JsonProperty("watcher") + public String watcherName; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/ReplicationBackupRequestBody.java b/solr/api/src/java/org/apache/solr/client/api/model/ReplicationBackupRequestBody.java new file mode 100644 index 00000000000..65b93a1d829 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/ReplicationBackupRequestBody.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.media.Schema; + +public class ReplicationBackupRequestBody { + + public ReplicationBackupRequestBody() {} + + public ReplicationBackupRequestBody( + String location, String name, int numberToKeep, String repository, String commitName) { + this.location = location; + this.name = name; + this.numberToKeep = numberToKeep; + this.repository = repository; + this.commitName = commitName; + } + + @Schema(description = "The path where the backup will be created") + @JsonProperty + public String location; + + @Schema(description = "The backup will be created in a directory called snapshot.") + @JsonProperty + public String name; + + @Schema(description = "The number of backups to keep.") + @JsonProperty + public int numberToKeep; + + @Schema(description = "The name of the repository to be used for e backup.") + @JsonProperty + public String repository; + + @Schema( + description = + "The name of the commit which was used while taking a snapshot using the CREATESNAPSHOT command.") + @JsonProperty + public String commitName; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/ReplicationBackupResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/ReplicationBackupResponse.java new file mode 100644 index 00000000000..15581fa734e --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/ReplicationBackupResponse.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Response for the v2 "replication backup" API */ +public class ReplicationBackupResponse extends SolrJerseyResponse { + + @JsonProperty("result") + public Object result; + + @JsonProperty("status") + public String status; + + @JsonProperty("message") + public String message; + + @JsonProperty("exception") + public Exception exception; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/RestoreCollectionRequestBody.java b/solr/api/src/java/org/apache/solr/client/api/model/RestoreCollectionRequestBody.java new file mode 100644 index 00000000000..9a592d41e3b --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/RestoreCollectionRequestBody.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.media.Schema; + +/** Request body for the v2 "restore collection" API. */ +public class RestoreCollectionRequestBody { + + @JsonProperty(required = true) + public String collection; + + @JsonProperty public String location; + @JsonProperty public String repository; + @JsonProperty public Integer backupId; + + @Schema( + description = + "Parameters to be used for any collections created by this restore. Only used if the collection specified by the 'collection' property does not exist.", + name = "createCollectionParams") + @JsonProperty("create-collection") + public CreateCollectionRequestBody createCollectionParams; + + @JsonProperty public String async; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/RoutedAliasProperties.java b/solr/api/src/java/org/apache/solr/client/api/model/RoutedAliasProperties.java new file mode 100644 index 00000000000..8355937e70b --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/RoutedAliasProperties.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonSubTypes({ + @JsonSubTypes.Type(value = TimeRoutedAliasProperties.class, name = "time"), + @JsonSubTypes.Type(value = CategoryRoutedAliasProperties.class, name = "category") +}) +public abstract class RoutedAliasProperties { + @JsonProperty(required = true) + public String field; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/SchemaGetDynamicFieldInfoResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/SchemaGetDynamicFieldInfoResponse.java new file mode 100644 index 00000000000..626828c42cd --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/SchemaGetDynamicFieldInfoResponse.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class SchemaGetDynamicFieldInfoResponse extends SolrJerseyResponse { + + // TODO Server code sets this field as 'SimpleOrderedMap'; make this type declaration more + // specific once SOLR-12959 is completed + @JsonProperty("dynamicField") + public Object dynamicFieldInfo; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/SchemaGetFieldInfoResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/SchemaGetFieldInfoResponse.java new file mode 100644 index 00000000000..9b94dbda428 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/SchemaGetFieldInfoResponse.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class SchemaGetFieldInfoResponse extends SolrJerseyResponse { + + // TODO Server code sets this field as 'SimpleOrderedMap'; make this type declaration more + // specific once SOLR-12959 is completed + @JsonProperty("field") + public Object fieldInfo; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/SchemaGetFieldTypeInfoResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/SchemaGetFieldTypeInfoResponse.java new file mode 100644 index 00000000000..8e243cc111c --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/SchemaGetFieldTypeInfoResponse.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class SchemaGetFieldTypeInfoResponse extends SolrJerseyResponse { + + // TODO Server code sets this field as 'SimpleOrderedMap'; make this type declaration more + // specific once SOLR-12959 is completed + @JsonProperty("fieldType") + public Object fieldTypeInfo; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/SchemaListCopyFieldsResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/SchemaListCopyFieldsResponse.java new file mode 100644 index 00000000000..ca18e9fa711 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/SchemaListCopyFieldsResponse.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +public class SchemaListCopyFieldsResponse extends SolrJerseyResponse { + @JsonProperty("copyFields") + public List copyFields; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/SchemaListDynamicFieldsResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/SchemaListDynamicFieldsResponse.java new file mode 100644 index 00000000000..7c43baf6020 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/SchemaListDynamicFieldsResponse.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +public class SchemaListDynamicFieldsResponse extends SolrJerseyResponse { + @JsonProperty("dynamicFields") + public List dynamicFields; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/SchemaListFieldTypesResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/SchemaListFieldTypesResponse.java new file mode 100644 index 00000000000..1b8033352a5 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/SchemaListFieldTypesResponse.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +public class SchemaListFieldTypesResponse extends SolrJerseyResponse { + @JsonProperty("fieldTypes") + public List fieldTypes; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/SchemaListFieldsResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/SchemaListFieldsResponse.java new file mode 100644 index 00000000000..4ab6f737c2d --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/SchemaListFieldsResponse.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +public class SchemaListFieldsResponse extends SolrJerseyResponse { + @JsonProperty("fields") + public List fields; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/SetClusterPropertyRequestBody.java b/solr/api/src/java/org/apache/solr/client/api/model/SetClusterPropertyRequestBody.java new file mode 100644 index 00000000000..057f4bcb1d5 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/SetClusterPropertyRequestBody.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.media.Schema; + +public class SetClusterPropertyRequestBody { + @Schema(description = "The value to assign to the property.") + @JsonProperty("value") + public String value; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/SetThresholdRequestBody.java b/solr/api/src/java/org/apache/solr/client/api/model/SetThresholdRequestBody.java new file mode 100644 index 00000000000..3dd5b070798 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/SetThresholdRequestBody.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** The request body for the 'PUT /api/node/logging/messages/threshold' API. */ +public class SetThresholdRequestBody { + public SetThresholdRequestBody() {} + + public SetThresholdRequestBody(String level) { + this.level = level; + } + + @JsonProperty(required = true) + public String level; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/TimeRoutedAliasProperties.java b/solr/api/src/java/org/apache/solr/client/api/model/TimeRoutedAliasProperties.java new file mode 100644 index 00000000000..8a953cfaf94 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/TimeRoutedAliasProperties.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class TimeRoutedAliasProperties extends RoutedAliasProperties { + // Expected to be a date/time in ISO format, or 'NOW' + @JsonProperty(required = true) + public String start; + + // TODO Change this to 'timezone' or something less abbreviated + @JsonProperty("tz") + public String tz; + + @JsonProperty(required = true) + public String interval; + + @JsonProperty("maxFutureMs") + public Long maxFutureMs; + + @JsonProperty("preemptiveCreateMath") + public String preemptiveCreateMath; + + @JsonProperty("autoDeleteAge") + public String autoDeleteAge; +} diff --git a/buildSrc/scriptDepVersions.gradle b/solr/api/src/java/org/apache/solr/client/api/model/ZooKeeperFileResponse.java similarity index 66% rename from buildSrc/scriptDepVersions.gradle rename to solr/api/src/java/org/apache/solr/client/api/model/ZooKeeperFileResponse.java index a1d2bc9467d..d09302fa168 100644 --- a/buildSrc/scriptDepVersions.gradle +++ b/solr/api/src/java/org/apache/solr/client/api/model/ZooKeeperFileResponse.java @@ -14,18 +14,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package org.apache.solr.client.api.model; -// Declare script dependency versions outside of palantir's -// version unification control. These are not our main dependencies -// but are reused in buildSrc and across applied scripts. +import com.fasterxml.jackson.annotation.JsonProperty; -ext { - scriptDepVersions = [ - "apache-rat": "0.15", - "commons-codec": "1.16.0", - "ecj": "3.33.0", - "javacc": "7.0.12", - "jgit": "6.7.0.202309050840-r", - "flexmark": "0.64.8", - ] +public class ZooKeeperFileResponse extends SolrJerseyResponse { + // TODO Should be switched over to using StreamingOutput as a part of SOLR-17562 + @JsonProperty("content") // A flag value that RawResponseWriter handles specially + public Object output; + + @JsonProperty("zkData") + public String zkData; } diff --git a/solr/api/src/java/org/apache/solr/client/api/model/ZooKeeperListChildrenResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/ZooKeeperListChildrenResponse.java new file mode 100644 index 00000000000..be7a69575dc --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/ZooKeeperListChildrenResponse.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.HashMap; +import java.util.Map; + +public class ZooKeeperListChildrenResponse extends ExperimentalResponse { + @JsonProperty("stat") + public ZooKeeperStat stat; + + // TODO Currently the list response (when child information is fetched) consists primarily of an + // object with only one key - the name of the root node - with separate objects under there for + // each child. The additional nesting under the root node doesn't serve much purpose afaict + // and should be removed. + public Map> unknownFields = new HashMap<>(); + + @JsonAnyGetter + public Map> unknownProperties() { + return unknownFields; + } + + @JsonAnySetter + public void setUnknownProperty(String field, Map value) { + unknownFields.put(field, value); + } +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/ZooKeeperStat.java b/solr/api/src/java/org/apache/solr/client/api/model/ZooKeeperStat.java new file mode 100644 index 00000000000..302b3885095 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/ZooKeeperStat.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Represents the data returned by a ZooKeeper 'stat' call */ +public class ZooKeeperStat { + @JsonProperty("version") + public int version; + + @JsonProperty("aversion") + public int aversion; + + @JsonProperty("children") + public int children; + + @JsonProperty("ctime") + public long ctime; + + @JsonProperty("cversion") + public int cversion; + + @JsonProperty("czxid") + public long czxid; + + @JsonProperty("ephemeralOwner") + public long ephemeralOwner; + + @JsonProperty("mtime") + public long mtime; + + @JsonProperty("mzxid") + public long mzxid; + + @JsonProperty("pzxid") + public long pzxid; + + @JsonProperty("dataLength") + public int dataLength; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/util/Constants.java b/solr/api/src/java/org/apache/solr/client/api/util/Constants.java index b4ef56c2050..49e69d37fac 100644 --- a/solr/api/src/java/org/apache/solr/client/api/util/Constants.java +++ b/solr/api/src/java/org/apache/solr/client/api/util/Constants.java @@ -27,6 +27,8 @@ private Constants() { public static final String INDEX_PATH_PREFIX = "/{" + INDEX_TYPE_PATH_PARAMETER + ":cores|collections}/{" + INDEX_NAME_PATH_PARAMETER + "}"; + public static final String CORE_NAME_PATH_PARAMETER = "coreName"; + public static final String OMIT_FROM_CODEGEN_PROPERTY = "omitFromCodegen"; public static final String GENERIC_ENTITY_PROPERTY = "genericEntity"; diff --git a/solr/api/src/java/org/apache/solr/client/api/util/CoreApiParameters.java b/solr/api/src/java/org/apache/solr/client/api/util/CoreApiParameters.java new file mode 100644 index 00000000000..7151ee9eda7 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/util/CoreApiParameters.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.util; + +import static org.apache.solr.client.api.util.Constants.CORE_NAME_PATH_PARAMETER; + +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.enums.ParameterIn; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Concisely collects the parameters shared by APIs that interact with contents of a specific core. + * + *

Not to be used on APIs that apply to both cores AND collections. {@link StoreApiParameters} + * should be used in those cases. + * + *

Used primarily as a way to avoid duplicating these parameter definitions on each relevant + * interface method in {@link org.apache.solr.client.api.endpoint} + */ +@Target({ElementType.METHOD, ElementType.TYPE, ElementType.PARAMETER, ElementType.FIELD}) +@Retention(RetentionPolicy.RUNTIME) +@Parameter(name = CORE_NAME_PATH_PARAMETER, in = ParameterIn.PATH) +public @interface CoreApiParameters {} diff --git a/solr/api/src/java/org/apache/solr/client/api/util/StoreApiParameters.java b/solr/api/src/java/org/apache/solr/client/api/util/StoreApiParameters.java index b82f4cd107a..1d9a66bfa9c 100644 --- a/solr/api/src/java/org/apache/solr/client/api/util/StoreApiParameters.java +++ b/solr/api/src/java/org/apache/solr/client/api/util/StoreApiParameters.java @@ -29,8 +29,10 @@ import org.apache.solr.client.api.model.IndexType; /** - * Concisely collects the parameters shared by APIs that interact with contents of a specific - * core/collection. + * Concisely collects the parameters shared by APIs that interact with contents of a specific core + * OR collection. + * + *

Not to be used on APIs that are only available on cores or only on collections. * *

Used primarily as a way to avoid duplicating these parameter definitions on each relevant * interface method in {@link org.apache.solr.client.api.endpoint} diff --git a/solr/benchmark/README.md b/solr/benchmark/README.md index 7075ef111a7..9b1b8cdf623 100644 --- a/solr/benchmark/README.md +++ b/solr/benchmark/README.md @@ -1,356 +1,418 @@ -JMH-Benchmarks module -===================== +# Solr JMH Benchmark Module -This module contains benchmarks written using [JMH](https://openjdk.java.net/projects/code-tools/jmh/) from OpenJDK. -Writing correct micro-benchmarks in Java (or another JVM language) is difficult and there are many non-obvious -pitfalls (many due to compiler optimizations). JMH is a framework for running and analyzing benchmarks (micro or macro) -written in Java (or another JVM language). +![](https://user-images.githubusercontent.com/448788/140059718-de183e23-414e-4499-883a-34ec3cfbd2b6.png) -* [JMH-Benchmarks module](#jmh-benchmarks-module) - * [Running benchmarks](#running-benchmarks) - * [Using JMH with async profiler](#using-jmh-with-async-profiler) - * [Using JMH GC profiler](#using-jmh-gc-profiler) - * [Using JMH Java Flight Recorder profiler](#using-jmh-java-flight-recorder-profiler) - * [JMH Options](#jmh-options) - * [Writing benchmarks](#writing-benchmarks) - * [SolrCloud MiniCluster Benchmark Setup](#solrcloud-minicluster-benchmark-setup) - * [MiniCluster Metrics](#minicluster-metrics) - * [Benchmark Repeatability](#benchmark-repeatability) +**_`profile, compare and introspect`_** -## Running benchmarks +**A flexible, developer-friendly, microbenchmark framework** -If you want to set specific JMH flags or only run certain benchmarks, passing arguments via gradle tasks is cumbersome. -The process has been simplified by the provided `jmh.sh` script. +![](https://img.shields.io/badge/developer-tool-blue) -The default behavior is to run all benchmarks: +## Table Of Content -`./jmh.sh` +- [](#) + - [Table Of Content](#table-of-content) + - [Overview](#overview) + - [Getting Started](#getting-started) + - [Running `jmh.sh` with no Arguments](#running-jmhsh-with-no-arguments) + - [Pass a regex pattern or name after the command to select the benchmark(s) to run](#pass-a-regex-pattern-or-name-after-the-command-to-select-the-benchmarks-to-run) + - [The argument `-l` will list all the available benchmarks](#the-argument--l-will-list-all-the-available-benchmarks) + - [Check which benchmarks will run by entering a pattern after the -l argument](#check-which-benchmarks-will-run-by-entering-a-pattern-after-the--l-argument) + - [Further Pattern Examples](#further-pattern-examples) + - [`jmh.sh` accepts all the standard arguments that the standard JMH main-class handles](#jmhsh-accepts-all-the-standard-arguments-that-the-standard-jmh-main-class-handles) + - [Overriding Benchmark Parameters](#overriding-benchmark-parameters) + - [Format and Write Results to Files](#format-and-write-results-to-files) + - [JMH Command-Line Arguments](#jmh-command-line-arguments) + - [The JMH Command-Line Syntax](#the-jmh-command-line-syntax) + - [The Full List of JMH Arguments](#the-full-list-of-jmh-arguments) + - [Writing JMH benchmarks](#writing-jmh-benchmarks) + - [Continued Documentation](#continued-documentation) -Pass a pattern or name after the command to select the benchmarks: +--- -`./jmh.sh CloudIndexing` +## Overview -Check which benchmarks match the provided pattern: +JMH is a Java **microbenchmark** framework from some of the developers that work on +OpenJDK. Not surprisingly, OpenJDK is where you will find JMH's home today, alongside some +other useful little Java libraries such as JOL (Java Object Layout). -`./jmh.sh -l CloudIndexing` +The significant value in JMH is that you get to stand on the shoulders of some brilliant +engineers that have done some tricky groundwork that many an ambitious Java benchmark writer +has merrily wandered past. -Run a specific test and overrides the number of forks, iterations and sets warm-up iterations to `2`: +Rather than simply providing a boilerplate framework for driving iterations and measuring +elapsed times, which JMH does happily do, the focus is on the many forces that +deceive and disorient the earnest benchmark enthusiast. -`./jmh.sh -f 2 -i 2 -wi 2 CloudIndexing` +From spinning your benchmark into all new generated source code +in an attempt to avoid falling victim to undesirable optimizations, to offering +**BlackHoles** and a solid collection of convention and cleverly thought out yet +simple boilerplate, the goal of JMH is to lift the developer off the +microbenchmark floor and at least to their knees. -Run a specific test with async and GC profilers on Linux and flame graph output: +JMH reaches out a hand to both the best and most regular among us in a solid, cautious +effort to promote the willing into the real, often-obscured world of the microbenchmark. -`./jmh.sh -prof gc -prof async:libPath=/path/to/libasyncProfiler.so\;output=flamegraph\;dir=profile-results CloudIndexing` +## Code Organization Breakdown -### Using JMH with async profiler +![](https://img.shields.io/badge/data-...move-blue) -It's good practice to check profiler output for micro-benchmarks in order to verify that they represent the expected -application behavior and measure what you expect to measure. Some example pitfalls include the use of expensive mocks or -accidental inclusion of test setup code in the benchmarked code. JMH includes -[async-profiler](https://github.com/jvm-profiling-tools/async-profiler) integration that makes this easy: +- **JMH:** microbenchmark classes and some common base code to support them. -`./jmh.sh -prof async:libPath=/path/to/libasyncProfiler.so\;dir=profile-results` +- **Random Data:** a framework for easily generating specific and repeatable random data. + +## Getting Started + +Running **JMH** is handled via the `jmh.sh` shell script. This script uses Gradle to +extract the correct classpath and configures a handful of helpful Java +command prompt arguments and system properties. For the most part, `jmh.sh` script +will pass any arguments it receives directly to JMH. You run the script +from the root benchmark module directory (i.e. `solr/benchmark`). + +### Running `jmh.sh` with no Arguments -With flame graph output: - -`./jmh.sh -prof async:libPath=/path/to/libasyncProfiler.so\;output=flamegraph\;dir=profile-results` - -Simultaneous cpu, allocation and lock profiling with async profiler 2.0 and jfr output: - -`./jmh.sh -prof async:libPath=/path/to/libasyncProfiler.so\;output=jfr\;alloc\;lock\;dir=profile-results CloudIndexing` - -A number of arguments can be passed to configure async profiler, run the following for a description: - -`./jmh.sh -prof async:help` - -You can also skip specifying libPath if you place the async profiler lib in a predefined location, such as one of the -locations in the env variable `LD_LIBRARY_PATH` if it has been set (many Linux distributions set this env variable, Arch -by default does not), or `/usr/lib` should work. - -#### OS Permissions for Async Profiler - -Async Profiler uses perf to profile native code in addition to Java code. It will need the following for the necessary -access. - -```bash -echo 0 > /proc/sys/kernel/kptr_restrict -echo 1 > /proc/sys/kernel/perf_event_paranoid -``` - -or - -```bash -sudo sysctl -w kernel.kptr_restrict=0 -sudo sysctl -w kernel.perf_event_paranoid=1 -``` - -### Using JMH GC profiler - -You can run a benchmark with `-prof gc` to measure its allocation rate: - -`./jmh.sh -prof gc:dir=profile-results` - -Of particular importance is the `norm` alloc rates, which measure the allocations per operation rather than allocations -per second. - -### Using JMH Java Flight Recorder profiler - -JMH comes with a variety of built-in profilers. Here is an example of using JFR: - -`./jmh.sh -prof jfr:dir=profile-results\;configName=jfr-profile.jfc` - -In this example we point to the included configuration file with configName, but you could also do something like -settings=default or settings=profile. - -### Benchmark Outputs - -By default, output that benchmarks generate is created in the build/work directory. You can change this location by setting the workBaseDir system property like this: - - -jvmArgsAppend -DworkBaseDir=/data3/bench_work - -If a profiler generates output, it will generally be written to the current working directory - that is the benchmark module directory itself. You can usually change this via the dir option, for example: - - ./jmh.sh -prof jfr:dir=build/work/profile-results JsonFaceting - -### Using a Separate MiniCluster Base Directory - -If you have a special case MiniCluster you have generated, such as one you have prepared with very large indexes for a search benchmark run, you can change the base directory used by the profiler -for the MiniCluster with the miniClusterBaseDir system property. This is for search based benchmarks in general and the MiniCluster wil not be removed automatically by the benchmark. - -### JMH Options - -Some common JMH options are: - -```text +> +> ```zsh +> # run all benchmarks found in subdirectories +> ./jmh.sh +> ``` + +### Pass a regex pattern or name after the command to select the benchmark(s) to run + +> +> ```zsh +> ./jmh.sh BenchmarkClass +> ``` + +### The argument `-l` will list all the available benchmarks + +> +> ```zsh +> ./jmh.sh -l +> ``` + +### Check which benchmarks will run by entering a pattern after the -l argument + +Use the full benchmark class name, the simple class name, the benchmark +method name, or a substring. + +> +> ```zsh +> ./jmh.sh -l Ben +> ``` + +### Further Pattern Examples + +> +> ```shell +>./jmh.sh -l org.apache.solr.benchmark.search.BenchmarkClass +>./jmh.sh -l BenchmarkClass +>./jmh.sh -l BenchmarkClass.benchmethod +>./jmh.sh -l Bench +>./jmh.sh -l benchme + +### The JMH Script Accepts _ALL_ of the Standard JMH Arguments + +Here we tell JMH to run the trial iterations twice, forking a new JVM for each +trial. We also explicitly set the number of warmup iterations and the +measured iterations to 2. + +> +> ```zsh +> ./jmh.sh -f 2 -wi 2 -i 2 BenchmarkClass +> ``` + +### Overriding Benchmark Parameters + +> ![](https://img.shields.io/badge/overridable-params-blue) +> +> ```java +> @Param("1000") +> private int numDocs; +> ``` + +The state objects that can be specified in benchmark classes will often have a +number of input parameters that benchmark method calls will access. The notation +above will default numDocs to 1000 and also allow you to override that value +using the `-p` argument. A benchmark might also use a @Param annotation such as: + +> ![](https://img.shields.io/badge/sequenced-params-blue) +> +> ```java +> @Param("1000","5000","1000") +> private int numDocs; +> ``` + +By default, that would cause the benchmark +to be run enough times to use each of the specified values. If multiple input +parameters are specified this way, the number of runs needed will quickly +expand. You can pass multiple `-p` +arguments and each will completely replace the behavior of any default +annotation values. + +> +> ```zsh +> # use 2000 docs instead of 1000 +> ./jmh.sh BenchmarkClass -p numDocs=2000 +> +> +> # use 5 docs, then 50, then 500 +> ./jmh.sh BenchmarkClass -p numDocs=5,50,500 +> +> +> # run the benchmark enough times to satisfy every combination of two +> # multi-valued input parameters +> ./jmh.sh BenchmarkClass -p numDocs=10,20,30 -p docSize 250,500 +> ``` + +### Format and Write Results to Files + +Rather than just dumping benchmark results to the console, you can specify the +`-rf` argument to control the output format; for example, you can choose CSV or +JSON. The `-rff` argument will dictate the filename and output location. + +> +> ```zsh +> # format output to JSON and write the file to the `work` directory relative to +> # the JMH working directory. +> ./jmh.sh BenchmarkClass -rf json -rff work/jmh-results.json +> ``` +> +> 💡 **If you pass only the `-rf` argument, JMH will write out a file to the +> current working directory with the appropriate extension, e.g.,** `jmh-results.csv`. + +## JMH Command-Line Arguments + +### The JMH Command-Line Syntax + +> ![](https://img.shields.io/badge/Help-output-blue) +> +> ```zsh +> Usage: ./jmh.sh [regexp*] [options] +> [opt] means optional argument. +> means required argument. +> "+" means comma-separated list of values. +> "time" arguments accept time suffixes, like "100ms". +> +> Command-line options usually take precedence over annotations. +> ``` + +### The Full List of JMH Arguments + +```zsh Usage: ./jmh.sh [regexp*] [options] [opt] means optional argument. means required argument. - "+" means comma-separated list of values. + "+" means a comma-separated list of values. "time" arguments accept time suffixes, like "100ms". -Command line options usually take precedence over annotations. +Command-line options usually take precedence over annotations. [arguments] Benchmarks to run (regexp+). (default: .*) - -bm Benchmark mode. Available modes are: [Throughput/thrpt, - AverageTime/avgt, SampleTime/sample, SingleShotTime/ss, + -bm Benchmark mode. Available modes are: + [Throughput/thrpt, AverageTime/avgt, + SampleTime/sample, SingleShotTime/ss, All/all]. (default: Throughput) -bs Batch size: number of benchmark method calls per operation. Some benchmark modes may ignore this - setting, please check this separately. (default: - 1) + setting; please check this separately. + (default: 1) -e Benchmarks to exclude from the run. - -f How many times to fork a single benchmark. Use 0 to - disable forking altogether. Warning: disabling - forking may have detrimental impact on benchmark - and infrastructure reliability, you might want - to use different warmup mode instead. (default: - 5) - - -foe Should JMH fail immediately if any benchmark had - experienced an unrecoverable error? This helps - to make quick sanity tests for benchmark suites, - as well as make the automated runs with checking error + -f How many times to fork a single benchmark. Use 0 + to disable forking altogether. Warning: + disabling forking may have a detrimental impact on + benchmark and infrastructure reliability. You might + want to use a different warmup mode instead. (default: 1) + + -foe Should JMH fail immediately if any benchmark has + experienced an unrecoverable error? Failing fast + helps to make quick sanity tests for benchmark + suites and allows automated runs to do error + checking. codes. (default: false) -gc Should JMH force GC between iterations? Forcing - the GC may help to lower the noise in GC-heavy benchmarks, - at the expense of jeopardizing GC ergonomics decisions. + GC may help lower the noise in GC-heavy benchmarks + at the expense of jeopardizing GC ergonomics + decisions. Use with care. (default: false) - -h Display help, and exit. + -h Displays this help output and exits. - -i Number of measurement iterations to do. Measurement - iterations are counted towards the benchmark score. - (default: 1 for SingleShotTime, and 5 for all other - modes) + -i Number of measurement iterations to do. + Measurement + iterations are counted towards the benchmark + score. + (default: 1 for SingleShotTime, and 5 for all + other modes) - -jvm Use given JVM for runs. This option only affects forked - runs. + -jvm Use given JVM for runs. This option only affects + forked runs. - -jvmArgs Use given JVM arguments. Most options are inherited - from the host VM options, but in some cases you want - to pass the options only to a forked VM. Either single - space-separated option line, or multiple options - are accepted. This option only affects forked runs. + -jvmArgs Use given JVM arguments. Most options are + inherited from the host VM options, but in some + cases, you want to pass the options only to a forked + VM. Either single space-separated option line or + multiple options are accepted. This option only + affects forked runs. - -jvmArgsAppend Same as jvmArgs, but append these options after the - already given JVM args. + -jvmArgsAppend Same as jvmArgs, but append these options after + the already given JVM args. -jvmArgsPrepend Same as jvmArgs, but prepend these options before the already given JVM arg. - -l List the benchmarks that match a filter, and exit. + -l List the benchmarks that match a filter and exit. - -lp List the benchmarks that match a filter, along with + -lp List the benchmarks that match a filter, along + with parameters, and exit. - -lprof List profilers, and exit. + -lprof List profilers and exit. - -lrf List machine-readable result formats, and exit. + -lrf List machine-readable result formats and exit. -o Redirect human-readable output to a given file. - -opi Override operations per invocation, see @OperationsPerInvocation - Javadoc for details. (default: 1) + -opi Override operations per invocation, see + @OperationsPerInvocation Javadoc for details. + (default: 1) - -p Benchmark parameters. This option is expected to - be used once per parameter. Parameter name and parameter - values should be separated with equals sign. Parameter - values should be separated with commas. + -p Benchmark parameters. This option is expected to + be used once per parameter. The parameter name and + parameter values should be separated with an + equal sign. Parameter values should be separated + with commas. - -prof Use profilers to collect additional benchmark data. - Some profilers are not available on all JVMs and/or - all OSes. Please see the list of available profilers - with -lprof. + -prof Use profilers to collect additional benchmark + data. + Some profilers are not available on all JVMs or + all OSes. '-lprof' will list the available + profilers that are available and that can run + with the current OS configuration and installed dependencies. - -r

+ +![](https://user-images.githubusercontent.com/448788/137610116-eff6d0b7-e862-40fb-af04-452aaf585387.png) + +```Shell +./jmh.sh -lprof` +``` + +
+ + +
+ +In our case, we will start with very **minimal** Arch and Ubuntu clean installations, and so we already know there is _**no chance**_ that async-profiler or Perfasm +are going to run. + +In fact, first we have to install a few project build requirements before thinking too much about JMH profiler support. + +We will run on **Arch/Manjaro**, but there should not be any difference than on **Debian/Ubuntu** for this stage. + +
+ +![](https://user-images.githubusercontent.com/448788/137610116-eff6d0b7-e862-40fb-af04-452aaf585387.png) + +```Shell +sudo pacman -S wget jdk-openjdk11 +``` + +
+ +
+ +Here we give **async-profiler** a try on **Arch** anyway and observe the failure indicating that we need to obtain the async-profiler library and +put it in the correct location at a minimum. + +
+ + +```Shell +./jmh.sh BenchMark -prof async +``` + +
+     Profilers failed to initialize, exiting.
+
+    Unable to load async-profiler. Ensure asyncProfiler library is on LD_LIBRARY_PATH (Linux)
+    DYLD_LIBRARY_PATH (Mac OS), or -Djava.library.path.
+
+    Alternatively, point to explicit library location with: '-prof async:libPath={path}'
+
+    no asyncProfiler in java.library.path: [/usr/java/packages/lib, /usr/lib64, /lib64, /lib, /usr/lib]
+    
+ +
+ +### Async-Profiler + +#### Install async-profiler + +
+ +![](https://user-images.githubusercontent.com/448788/137610116-eff6d0b7-e862-40fb-af04-452aaf585387.png) + +```Shell +wget -c https://github.com/jvm-profiling-tools/async-profiler/releases/download/v2.5/async-profiler-2.5-linux-x64.tar.gz -O - | tar -xz +sudo mkdir -p /usr/java/packages/lib +sudo cp async-profiler-2.5-linux-x64/build/* /usr/java/packages/lib +``` + +
+ +
+ +That should work out better, but there is still an issue that will prevent a successful profiling run. async-profiler relies on Linux's perf, +and in any recent Linux kernel, perf is restricted from doing its job without some configuration loosening. + +Manjaro should have perf available, but you may need to install it in the other cases. + +
+ +![](https://user-images.githubusercontent.com/448788/137563908-738a7431-88db-47b0-96a4-baaed7e5024b.png) + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +apt-get install linux-tools-common linux-tools-generic linux-tools-`uname -r` +``` + +
+ +
+ +![](https://user-images.githubusercontent.com/448788/137563725-0195a732-da40-4c8b-a5e8-fd904a43bb79.png) + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +pacman -S perf +``` + +
+ + +
+ +And now the permissions issue. The following changes will persist across restarts, and that is likely how you should leave things. + +
+ +```zsh +sudo sysctl -w kernel.kptr_restrict=0 +sudo sysctl -w kernel.perf_event_paranoid=1 +``` + +
+ +
+ +Now we **should** see some success: + +
+ +![](https://user-images.githubusercontent.com/448788/137610116-eff6d0b7-e862-40fb-af04-452aaf585387.png) + +```Shell +./jmh.sh FuzzyQuery -prof async:output=flamegraph +``` + +
+ +
+ +![](https://user-images.githubusercontent.com/448788/138650315-82adeb18-54cd-43ee-810e-24f1e22719c7.png) + +
+ +But you will also find an important _warning_ if you look closely at the logs. + +
+ +![](https://user-images.githubusercontent.com/448788/137613526-a188ff03-545c-465d-928d-bc433d2d204f.png) +[WARN] `Install JVM debug symbols to improve profile accuracy` + +
+ +Ensuring that **Debug symbols** remain available provides the best experience for optimal profiling accuracy and heap-analysis. + +And it also turns out that if we use async-profiler's **alloc** option to sample and create flamegraphs for heap usage, the **debug** symbols +are _required_. + +
+ +#### Install Java Debug Symbols + +--- + +##### Ubuntu + +![](https://user-images.githubusercontent.com/448788/137563908-738a7431-88db-47b0-96a4-baaed7e5024b.png) + +Grab the debug package of OpenJdk using your package manager for the correct Java version. + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +sudo apt update +sudo apt upgrade +sudo apt install openjdk-11-dbg +``` + +
+ +--- + +##### Arch + +![](https://user-images.githubusercontent.com/448788/137563725-0195a732-da40-4c8b-a5e8-fd904a43bb79.png) + +On the **Arch** side we will rebuild the Java 11 package, but turn off the option that strips debug symbols. Often, large OS package and Java repositories originated in SVN and can be a bit a of a bear to wrestle with git about for just a fraction +of the repository, we do so GitHub API workaround efficiency. + +
+ +![](https://user-images.githubusercontent.com/448788/137610116-eff6d0b7-e862-40fb-af04-452aaf585387.png) + +```Shell +sudo pacman -S dkms base-devel linux-headers dkms git vi jq --needed --noconfirm + +curl -sL "https://api.github.com/repos/archlinux/svntogit-packages/contents/java11-openjdk/repos/extra-x86_64" \ +| jq -r '.[] | .download_url' | xargs -n1 wget +``` + +
+ +
+ +Now we need to change that option in PKGBUILD. Choose your favorite editor. (nano, vim, emacs, ne, nvim, tilde etc) + +
+ +![](https://user-images.githubusercontent.com/448788/137610116-eff6d0b7-e862-40fb-af04-452aaf585387.png) + +```Shell +vi PKGBUILD +``` + +
+ +
+ +Insert a single option line: + +
+ +```Diff +arch=('x86_64') +url='https://openjdk.java.net/' +license=('custom') ++ options=('debug' '!strip') +makedepends=('java-environment>=10' 'java-environment<12' 'cpio' 'unzip' 'zip' 'libelf' 'libcups' 'libx11' 'libxrender' 'libxtst' 'libxt' 'libxext' 'libxrandr' 'alsa-lib' 'pandoc' +``` + +
+ +
+ +Then build and install. (`-s: --syncdeps -i: --install -f: --force`) + +
+ +![](https://user-images.githubusercontent.com/448788/137610116-eff6d0b7-e862-40fb-af04-452aaf585387.png) + +```Shell +makepkg -sif +``` + +
+ +
+ +When that is done, if everything went well, we should be able to successfully run async-profiler in alloc mode to generate a flame graph based on memory rather than cpu. + +
+ + +
+ +![](https://user-images.githubusercontent.com/448788/137610116-eff6d0b7-e862-40fb-af04-452aaf585387.png) + +```Shell +./jmh.sh FuzzyQuery -prof async:output=flamegraph +``` + +
+ +
+ +![](https://user-images.githubusercontent.com/448788/138661737-333bf265-343a-4002-b8a8-97d72c38ced0.png) + +## Perfasm + +Perfasm will run perf to collect hardware counter infromation (cycles by default) and it will also pass an argument to Java +to cause it to log assembly output (amoung other things). The performance data from perf is married with the assembly from the Java output log and Perfasm then does its thing to produce human parsable output. Java generally cannot output assembly as shipped however, so now +we must install **hsdis** to allow for `-XX+PrintAssembly` + +* * * + +### Arch + +![](https://user-images.githubusercontent.com/448788/137563725-0195a732-da40-4c8b-a5e8-fd904a43bb79.png) + +
+ +[//]: # ( https://aur.archlinux.org/packages/java11-openjdk-hsdis/) + +If you have `yay` or another **AUR** helper available, or if you have the **AUR** enabled in your package manager, simply install `java11-openjdk-hdis` + +If you do not have simple access to **AUR**, set it up or just grab the package manually: + +
+ +![](https://user-images.githubusercontent.com/448788/137610116-eff6d0b7-e862-40fb-af04-452aaf585387.png) + +```Shell +wget -c https://aur.archlinux.org/cgit/aur.git/snapshot/java11-openjdk-hsdis.tar.gz -O - | tar -xz +cd java11-openjdk-hsdis/ +makepkg -si +``` + +
+ +
+ +--- + +### Ubuntu + +![](https://user-images.githubusercontent.com/448788/137563908-738a7431-88db-47b0-96a4-baaed7e5024b.png) + +
+ +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +sudo apt update +sudo apt -y upgrade +sudo apt -y install openjdk-11-jdk git wget jq +``` + +
+ +
+ +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +curl -sL "https://api.github.com/repos/openjdk/jdk11/contents/src/utils/hsdis" | jq -r '.[] | .download_url' | xargs -n1 wget + +# Newer versions of binutils don't appear to compile, must use 2.28 for JDK 11 +wget http://ftp.heanet.ie/mirrors/ftp.gnu.org/gnu/binutils/binutils-2.28.tar.gz +tar xzvf binutils-2.28.tar.gz +make BINUTILS=binutils-2.28 ARCH=amd64 +``` + +
+ +
+ +Now we should be able to do a little Perfasm: + +
+ +![](https://user-images.githubusercontent.com/448788/137610116-eff6d0b7-e862-40fb-af04-452aaf585387.png) + +```Shell +./jmh.sh FuzzyQuery -prof perfasm +``` + +
diff --git a/solr/benchmark/docs/jmh-profilers.md b/solr/benchmark/docs/jmh-profilers.md new file mode 100644 index 00000000000..e700add93ae --- /dev/null +++ b/solr/benchmark/docs/jmh-profilers.md @@ -0,0 +1,189 @@ + + +# JMH Profilers + +- [JMH Profilers](#jmh-profilers) + - [Introduction](#introduction) + - [Using JMH Profilers](#using-jmh-profilers) + - [Using JMH with the Async-Profiler](#using-jmh-with-the-async-profiler) + - [OS Permissions for Async-Profiler](#os-permissions-for-async-profiler) + - [Using JMH with the GC Profiler](#using-jmh-with-the-gc-profiler) + - [Using JMH with the Java Flight Recorder Profiler](#using-jmh-with-the-java-flight-recorder-profiler) + +## Introduction + +Some may think that the appeal of a micro-benchmark is in the relatively easy +learning curve and the often isolated nature of what is being measured. But +this perspective is actually what can often make them dangerous. Benchmarking +can be easy to approach from a non-rigorous, casual angle that results in the +feeling that they are a relatively straightforward part of the developer's +purview. From this viewpoint, microbenchmarks can appear downright easy. But good +benchmarking is hard. Microbenchmarks are very hard. Java and HotSpot make "hard" +even harder. + +JMH was developed by engineers that understood the dark side of benchmarks very +well. They also work on OpenJDK, so they are abnormally suited to building a +java microbenchmark framework that tackles many common issues that naive +approaches and go-it-alone efforts are likely to trip on. Even still, they will +tell you, JMH is a sharp blade. Best to be cautious and careful when swinging it +around. + +The good folks working on JMH did not just build a better than average java +micro-benchmark framework and then leave us to the still many wolves, though. They +also built-in first-class support for the essential tools that the +ambitious developer absolutely needs for defense when bravely trying to +understand performance. This brings us to the JMH profiler options. + +## Using JMH Profilers + +### Using JMH with the Async-Profiler + +It's good practice to check profiler output for micro-benchmarks in order to +verify that they represent the expected application behavior and measure what +you expect to measure. Some example pitfalls include the use of expensive mocks +or accidental inclusion of test setup code in the benchmarked code. JMH includes +[async-profiler](https://github.com/jvm-profiling-tools/async-profiler) +integration that makes this easy: + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +./jmh.sh -prof async:libPath=/path/to/libasyncProfiler.so\;dir=profile-results +``` + +
+ +Run a specific test with async and GC profilers on Linux and flame graph output. + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell + ./jmh.sh -prof gc -prof async:libPath=/path/to/libasyncProfiler.so\;output=flamegraph\;dir=profile-results BenchmarkClass +``` + +
+ +With flame graph output: + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +./jmh.sh -prof async:libPath=/path/to/libasyncProfiler.so\;output=flamegraph\;dir=profile-results +``` + +
+ +Simultaneous CPU, allocation, and lock profiling with async profiler 2.0 and Java Flight Recorder +output: + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +./jmh.sh -prof async:libPath=/path/to/libasyncProfiler.so\;output=jfr\;alloc\;lock\;dir=profile-results BenchmarkClass +``` + +
+ +A number of arguments can be passed to configure async profiler, run the +following for a description: + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +./jmh.sh -prof async:help +``` + +
+ +You can also skip specifying libPath if you place the async profiler lib in a +predefined location, such as one of the locations in the env +variable `LD_LIBRARY_PATH` if it has been set (many Linux distributions set this +env variable, Arch by default does not), or `/usr/lib` should work. + +#### OS Permissions for Async-Profiler + +Async Profiler uses perf to profile native code in addition to Java code. It +will need the following for the necessary access. + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +echo 0 > /proc/sys/kernel/kptr_restrict +echo 1 > /proc/sys/kernel/perf_event_paranoid +``` + +
+ +or + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +sudo sysctl -w kernel.kptr_restrict=0 +sudo sysctl -w kernel.perf_event_paranoid=1 +``` + +
+ +### Using JMH with the GC Profiler + +You can run a benchmark with `-prof gc` to measure its allocation rate: + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +./jmh.sh -prof gc:dir=profile-results +``` + +
+ +Of particular importance is the `norm` alloc rates, which measure the +allocations per operation rather than allocations per second. + +### Using JMH with the Java Flight Recorder Profiler + +JMH comes with a variety of built-in profilers. Here is an example of using JFR: + +
+ +![](https://user-images.githubusercontent.com/448788/137610566-883825b7-e66c-4d8b-a6a5-61542bc08d23.png) + +```Shell +./jmh.sh -prof jfr:dir=profile-results\;configName=jfr-profile.jfc BenchmarkClass +``` + +
+ +In this example, we point to the included configuration file with config name, but +you could also do something like settings=default or settings=profile. diff --git a/solr/benchmark/jmh.sh b/solr/benchmark/jmh.sh index 30c72b7a2e7..18f9875da19 100755 --- a/solr/benchmark/jmh.sh +++ b/solr/benchmark/jmh.sh @@ -51,9 +51,7 @@ echo "running JMH with args: $@" # -XX:+UnlockExperimentalVMOptions -XX:G1NewSizePercent=20 # and this note: Prevents G1 undermining young gen, which otherwise causes a cascade of issues # MRM: I've also seen 15 claimed as a sweet spot. -# -XX:-UseBiasedLocking - should be unreflective in recent JVMs and removed in the latest. - -jvmArgs="-jvmArgs -Djmh.shutdownTimeout=5 -jvmArgs -Djmh.shutdownTimeout.step=3 -jvmArgs -Djava.security.egd=file:/dev/./urandom -jvmArgs -XX:-UseBiasedLocking -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+DebugNonSafepoints -jvmArgs --add-opens=java.base/java.lang.reflect=ALL-UNNAMED" +jvmArgs="-jvmArgs -Djmh.shutdownTimeout=5 -jvmArgs -Djmh.shutdownTimeout.step=3 -jvmArgs -Djava.security.egd=file:/dev/./urandom -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+DebugNonSafepoints -jvmArgs --add-opens=java.base/java.lang.reflect=ALL-UNNAMED" gcArgs="-jvmArgs -XX:+UseG1GC -jvmArgs -XX:+ParallelRefProcEnabled" # -jvmArgs -Dlog4j2.debug diff --git a/solr/benchmark/src/java/org/apache/solr/bench/Docs.java b/solr/benchmark/src/java/org/apache/solr/bench/Docs.java index 5c3b889c7a6..d256739acae 100644 --- a/solr/benchmark/src/java/org/apache/solr/bench/Docs.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/Docs.java @@ -107,10 +107,7 @@ public Iterator preGenerate(int numDocs) throws InterruptedEx new SolrNamedThreadFactory("SolrJMH DocMaker")); for (int i = 0; i < numDocs; i++) { - executorService.submit( - () -> { - docs.add(Docs.this.inputDocument()); - }); + executorService.execute(() -> docs.add(Docs.this.inputDocument())); } executorService.shutdown(); diff --git a/solr/benchmark/src/java/org/apache/solr/bench/MiniClusterState.java b/solr/benchmark/src/java/org/apache/solr/bench/MiniClusterState.java index 5a793448c5c..39c1745d387 100755 --- a/solr/benchmark/src/java/org/apache/solr/bench/MiniClusterState.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/MiniClusterState.java @@ -37,7 +37,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.Http2SolrClient; @@ -84,7 +83,7 @@ public static class MiniClusterBenchState { MiniSolrCloudCluster cluster; /** The Client. */ - public SolrClient client; + public Http2SolrClient client; /** The Run cnt. */ int runCnt = 0; @@ -268,7 +267,7 @@ public void startMiniCluster(int nodeCount) { cluster = new MiniSolrCloudCluster.Builder(nodeCount, miniClusterBaseDir) .formatZkServer(false) - .addConfig("conf", getFile("src/resources/configs/cloud-minimal/conf").toPath()) + .addConfig("conf", getFile("src/resources/configs/cloud-minimal/conf")) .configure(); } catch (Exception e) { if (Files.exists(miniClusterBaseDir)) { @@ -288,7 +287,7 @@ public void startMiniCluster(int nodeCount) { } zkHost = cluster.getZkServer().getZkAddress(); - client = new Http2SolrClient.Builder().useHttp1_1(useHttp1).build(); + client = new Http2SolrClient.Builder(nodes.get(0)).useHttp1_1(useHttp1).build(); log("done starting mini cluster"); log(""); @@ -318,9 +317,8 @@ public void createCollection(String collection, int numShards, int numReplicas) CollectionAdminRequest.Create request = CollectionAdminRequest.createCollection(collection, "conf", numShards, numReplicas); - request.setBasePath(nodes.get(random.nextInt(cluster.getJettySolrRunners().size()))); - - client.request(request); + client.requestWithBaseUrl( + nodes.get(random.nextInt(cluster.getJettySolrRunners().size())), null, request); cluster.waitForActiveCollection( collection, 15, TimeUnit.SECONDS, numShards, numShards * numReplicas); @@ -368,18 +366,19 @@ public void index(String collection, Docs docs, int docCount, boolean parallel) log("committing data ..."); UpdateRequest commitRequest = new UpdateRequest(); - commitRequest.setBasePath(nodes.get(random.nextInt(cluster.getJettySolrRunners().size()))); + final var url = nodes.get(random.nextInt(cluster.getJettySolrRunners().size())); commitRequest.setAction(UpdateRequest.ACTION.COMMIT, false, true); - commitRequest.process(client, collection); + client.requestWithBaseUrl(url, collection, commitRequest); log("done committing data"); } else { cluster.waitForActiveCollection(collection, 15, TimeUnit.SECONDS); } QueryRequest queryRequest = new QueryRequest(new SolrQuery("q", "*:*", "rows", "1")); - queryRequest.setBasePath(nodes.get(random.nextInt(cluster.getJettySolrRunners().size()))); + final var url = nodes.get(random.nextInt(cluster.getJettySolrRunners().size())); + NamedList result = + client.requestWithBaseUrl(url, collection, queryRequest).getResponse(); - NamedList result = client.request(queryRequest, collection); log("sanity check of single row query result: " + result); log(""); @@ -410,22 +409,22 @@ private void indexParallel(String collection, Docs docs, int docCount) 10, TimeUnit.SECONDS); for (int i = 0; i < docCount; i++) { - executorService.submit( + executorService.execute( new Runnable() { final SplittableRandom threadRandom = random.split(); @Override public void run() { UpdateRequest updateRequest = new UpdateRequest(); - updateRequest.setBasePath( - nodes.get(threadRandom.nextInt(cluster.getJettySolrRunners().size()))); + final var url = + nodes.get(threadRandom.nextInt(cluster.getJettySolrRunners().size())); SolrInputDocument doc = docs.inputDocument(); // log("add doc " + doc); updateRequest.add(doc); meter.mark(); try { - client.request(updateRequest, collection); + client.requestWithBaseUrl(url, collection, updateRequest); } catch (Exception e) { throw new RuntimeException(e); } @@ -452,9 +451,8 @@ private void indexBatch(String collection, Docs docs, int docCount, int batchSiz batch.add(docs.inputDocument()); if (i % batchSize == 0) { UpdateRequest updateRequest = new UpdateRequest(); - updateRequest.setBasePath(nodes.get(0)); updateRequest.add(batch); - client.request(updateRequest, collection); + client.requestWithBaseUrl(nodes.get(0), collection, updateRequest); meter.mark(batch.size()); batch.clear(); log(meter.getCount() + " docs at " + (long) meter.getMeanRate() + " doc/s"); @@ -462,9 +460,8 @@ private void indexBatch(String collection, Docs docs, int docCount, int batchSiz } if (!batch.isEmpty()) { UpdateRequest updateRequest = new UpdateRequest(); - updateRequest.setBasePath(nodes.get(0)); updateRequest.add(batch); - client.request(updateRequest, collection); + client.requestWithBaseUrl(nodes.get(0), collection, updateRequest); meter.mark(batch.size()); batch = null; } @@ -500,10 +497,9 @@ public void forceMerge(String collection, int maxMergeSegments) throws Exception } UpdateRequest optimizeRequest = new UpdateRequest(); - optimizeRequest.setBasePath( - nodes.get(random.nextInt(cluster.getJettySolrRunners().size()))); + final var url = nodes.get(random.nextInt(cluster.getJettySolrRunners().size())); optimizeRequest.setAction(UpdateRequest.ACTION.OPTIMIZE, false, true, maxMergeSegments); - optimizeRequest.process(client, collection); + client.requestWithBaseUrl(url, collection, optimizeRequest); } } @@ -555,12 +551,12 @@ public static ModifiableSolrParams params(ModifiableSolrParams params, String... * @param name the name * @return the file */ - public static File getFile(String name) { + public static Path getFile(String name) { final URL url = MiniClusterState.class.getClassLoader().getResource(name.replace(File.separatorChar, '/')); if (url != null) { try { - return new File(url.toURI()); + return Path.of(url.toURI()); } catch (Exception e) { throw new RuntimeException( "Resource was found on classpath, but cannot be resolved to a " @@ -568,12 +564,12 @@ public static File getFile(String name) { + name); } } - File file = new File(name); - if (file.exists()) { + Path file = Path.of(name); + if (Files.exists(file)) { return file; } else { - file = new File("../../../", name); - if (file.exists()) { + file = Path.of("../../../", name); + if (Files.exists(file)) { return file; } } diff --git a/solr/benchmark/src/java/org/apache/solr/bench/generators/RandomDataHistogram.java b/solr/benchmark/src/java/org/apache/solr/bench/generators/RandomDataHistogram.java index 0f9062331ef..8cd55a7964a 100644 --- a/solr/benchmark/src/java/org/apache/solr/bench/generators/RandomDataHistogram.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/generators/RandomDataHistogram.java @@ -436,8 +436,7 @@ public String toString() { @Override public boolean equals(Object o) { if (this == o) return true; - if (!(o instanceof Surrogate)) return false; - Surrogate surrogate = (Surrogate) o; + if (!(o instanceof Surrogate surrogate)) return false; return hashCode.equals(surrogate.hashCode) && identityHashcode.equals(surrogate.identityHashcode); } diff --git a/solr/benchmark/src/java/org/apache/solr/bench/index/CloudIndexing.java b/solr/benchmark/src/java/org/apache/solr/bench/index/CloudIndexing.java index 820f2130a60..0ee950e7231 100755 --- a/solr/benchmark/src/java/org/apache/solr/bench/index/CloudIndexing.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/index/CloudIndexing.java @@ -127,9 +127,9 @@ public void doSetup(MiniClusterState.MiniClusterBenchState miniClusterState) thr public Object indexDoc(MiniClusterState.MiniClusterBenchState miniClusterState, BenchState state) throws Exception { UpdateRequest updateRequest = new UpdateRequest(); - updateRequest.setBasePath( - miniClusterState.nodes.get(miniClusterState.getRandom().nextInt(state.nodeCount))); updateRequest.add(state.getNextDoc()); - return miniClusterState.client.request(updateRequest, BenchState.COLLECTION); + final var url = + miniClusterState.nodes.get(miniClusterState.getRandom().nextInt(state.nodeCount)); + return miniClusterState.client.requestWithBaseUrl(url, BenchState.COLLECTION, updateRequest); } } diff --git a/solr/benchmark/src/java/org/apache/solr/bench/search/FilterCache.java b/solr/benchmark/src/java/org/apache/solr/bench/search/FilterCache.java index e518ff90865..6b7f6bf4a4f 100644 --- a/solr/benchmark/src/java/org/apache/solr/bench/search/FilterCache.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/search/FilterCache.java @@ -68,6 +68,7 @@ public static class BenchState { QueryRequest q1 = new QueryRequest(new SolrQuery("q", "*:*", "fq", "Ea_b:true")); QueryRequest q2 = new QueryRequest(new SolrQuery("q", "*:*", "fq", "FB_b:true")); + String baseUrl; @Setup(Level.Trial) public void setupTrial(MiniClusterState.MiniClusterBenchState miniClusterState) @@ -100,9 +101,7 @@ public Boolean generate(SolrRandomnessSource in) { docs.field("FB_b", booleans); miniClusterState.index(COLLECTION, docs, 30 * 1000); - String base = miniClusterState.nodes.get(0); - q1.setBasePath(base); - q2.setBasePath(base); + baseUrl = miniClusterState.nodes.get(0); } @Setup(Level.Iteration) @@ -110,8 +109,7 @@ public void setupIteration(MiniClusterState.MiniClusterBenchState miniClusterSta throws SolrServerException, IOException { // Reload the collection/core to drop existing caches CollectionAdminRequest.Reload reload = CollectionAdminRequest.reloadCollection(COLLECTION); - reload.setBasePath(miniClusterState.nodes.get(0)); - miniClusterState.client.request(reload); + miniClusterState.client.requestWithBaseUrl(miniClusterState.nodes.get(0), null, reload); } @TearDown(Level.Iteration) @@ -139,14 +137,17 @@ public void dumpMetrics(MiniClusterState.MiniClusterBenchState miniClusterState) public Object filterCacheMultipleQueries( BenchState benchState, MiniClusterState.MiniClusterBenchState miniClusterState) throws SolrServerException, IOException { - return miniClusterState.client.request( - miniClusterState.getRandom().nextBoolean() ? benchState.q1 : benchState.q2, COLLECTION); + return miniClusterState.client.requestWithBaseUrl( + benchState.baseUrl, + COLLECTION, + miniClusterState.getRandom().nextBoolean() ? benchState.q1 : benchState.q2); } @Benchmark public Object filterCacheSingleQuery( BenchState benchState, MiniClusterState.MiniClusterBenchState miniClusterState) throws SolrServerException, IOException { - return miniClusterState.client.request(benchState.q1, COLLECTION); + return miniClusterState.client.requestWithBaseUrl( + benchState.baseUrl, COLLECTION, benchState.q1); } } diff --git a/solr/benchmark/src/java/org/apache/solr/bench/search/JsonFaceting.java b/solr/benchmark/src/java/org/apache/solr/bench/search/JsonFaceting.java index 498faed5814..1595de56d3b 100755 --- a/solr/benchmark/src/java/org/apache/solr/bench/search/JsonFaceting.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/search/JsonFaceting.java @@ -180,11 +180,13 @@ public Object jsonFacet( BenchState state, BenchState.ThreadState threadState) throws Exception { + final var url = miniClusterState.nodes.get(threadState.random.nextInt(state.nodeCount)); QueryRequest queryRequest = new QueryRequest(state.params); - queryRequest.setBasePath( - miniClusterState.nodes.get(threadState.random.nextInt(state.nodeCount))); - - NamedList result = miniClusterState.client.request(queryRequest, state.collection); + NamedList result = + miniClusterState + .client + .requestWithBaseUrl(url, state.collection, queryRequest) + .getResponse(); // MiniClusterState.log("result: " + result); diff --git a/solr/benchmark/src/java/org/apache/solr/bench/search/NumericSearch.java b/solr/benchmark/src/java/org/apache/solr/bench/search/NumericSearch.java index 3c7a72385f9..4d662205230 100644 --- a/solr/benchmark/src/java/org/apache/solr/bench/search/NumericSearch.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/search/NumericSearch.java @@ -101,8 +101,8 @@ public void setupTrial(MiniClusterState.MiniClusterBenchState miniClusterState) q.setParam("facet.field", "numbers_i_dv", "term_low_s", "term_high_s"); q.setParam("facet.limit", String.valueOf(maxCardinality)); QueryRequest req = new QueryRequest(q); - req.setBasePath(basePath); - QueryResponse response = req.process(miniClusterState.client, COLLECTION); + QueryResponse response = + miniClusterState.client.requestWithBaseUrl(basePath, COLLECTION, req); Set numbers = response.getFacetField("numbers_i_dv").getValues().stream() .map(FacetField.Count::getName) @@ -144,8 +144,7 @@ public void setupIteration(MiniClusterState.MiniClusterBenchState miniClusterSta throws SolrServerException, IOException { // Reload the collection/core to drop existing caches CollectionAdminRequest.Reload reload = CollectionAdminRequest.reloadCollection(COLLECTION); - reload.setBasePath(miniClusterState.nodes.get(0)); - miniClusterState.client.request(reload); + miniClusterState.client.requestWithBaseUrl(miniClusterState.nodes.get(0), null, reload); } public QueryRequest intSetQuery(boolean dvs) { @@ -172,7 +171,6 @@ QueryRequest setQuery(String field) { termQueryField + ":" + lowCardTerms.next(), "fq", "{!terms cache=false f='" + field + "'}" + queries.next())); - q.setBasePath(basePath); return q; } } diff --git a/solr/benchmark/src/java/org/apache/solr/bench/search/QueryResponseWriters.java b/solr/benchmark/src/java/org/apache/solr/bench/search/QueryResponseWriters.java index 15b4526d49e..111a1d6ac50 100644 --- a/solr/benchmark/src/java/org/apache/solr/bench/search/QueryResponseWriters.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/search/QueryResponseWriters.java @@ -59,17 +59,7 @@ public class QueryResponseWriters { public static class BenchState { /** See {@link SolrCore#DEFAULT_RESPONSE_WRITERS} */ - @Param({ - CommonParams.JAVABIN, - CommonParams.JSON, - "cbor", - "smile", - "xml", - "python", - "phps", - "ruby", - "raw" - }) + @Param({CommonParams.JAVABIN, CommonParams.JSON, "cbor", "smile", "xml", "raw"}) String wt; private int docs = 100; @@ -98,7 +88,6 @@ public void setup(MiniClusterBenchState miniClusterState) throws Exception { q = new QueryRequest(params); q.setResponseParser(new NoOpResponseParser(wt)); String base = miniClusterState.nodes.get(0); - q.setBasePath(base); } } diff --git a/solr/benchmark/src/java/org/apache/solr/bench/search/SimpleSearch.java b/solr/benchmark/src/java/org/apache/solr/bench/search/SimpleSearch.java index 165d68ecb3b..46982b33b89 100644 --- a/solr/benchmark/src/java/org/apache/solr/bench/search/SimpleSearch.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/search/SimpleSearch.java @@ -65,8 +65,6 @@ public void setupTrial(MiniClusterState.MiniClusterBenchState miniClusterState) miniClusterState.setUseHttp1(useHttp1); miniClusterState.startMiniCluster(1); miniClusterState.createCollection(COLLECTION, 1, 1); - String base = miniClusterState.nodes.get(0); - q.setBasePath(base); } @Setup(Level.Iteration) @@ -74,7 +72,6 @@ public void setupIteration(MiniClusterState.MiniClusterBenchState miniClusterSta throws SolrServerException, IOException { // Reload the collection/core to drop existing caches CollectionAdminRequest.Reload reload = CollectionAdminRequest.reloadCollection(COLLECTION); - reload.setBasePath(miniClusterState.nodes.get(0)); miniClusterState.client.request(reload); total = new AtomicLong(); diff --git a/solr/benchmark/src/test/org/apache/solr/bench/MiniClusterBenchStateTest.java b/solr/benchmark/src/test/org/apache/solr/bench/MiniClusterBenchStateTest.java index e17ed29dcd8..af568e3bb98 100644 --- a/solr/benchmark/src/test/org/apache/solr/bench/MiniClusterBenchStateTest.java +++ b/solr/benchmark/src/test/org/apache/solr/bench/MiniClusterBenchStateTest.java @@ -118,10 +118,7 @@ public void testMiniClusterState() throws Exception { miniBenchState.forceMerge(collection, 15); ModifiableSolrParams params = MiniClusterState.params("q", "*:*"); - QueryRequest queryRequest = new QueryRequest(params); - queryRequest.setBasePath(miniBenchState.nodes.get(0)); - QueryResponse result = queryRequest.process(miniBenchState.client, collection); BaseBenchState.log("match all query result=" + result); diff --git a/solr/bin/solr b/solr/bin/solr index 036775a766b..0c0dc007e48 100755 --- a/solr/bin/solr +++ b/solr/bin/solr @@ -51,7 +51,7 @@ verbose=false THIS_OS=$(uname -s) # What version of Java is required to run this version of Solr. -JAVA_VER_REQ=11 +JAVA_VER_REQ=21 stop_all=false @@ -318,13 +318,13 @@ fi if [ -z "${SOLR_AUTH_TYPE:-}" ] && [ -n "${SOLR_AUTHENTICATION_OPTS:-}" ]; then echo "WARNING: SOLR_AUTHENTICATION_OPTS environment variable configured without associated SOLR_AUTH_TYPE variable" echo " Please configure SOLR_AUTH_TYPE environment variable with the authentication type to be used." - echo " Currently supported authentication types are [kerberos, basic]" + echo " Currently supported authentication types are [basic]" fi if [ -n "${SOLR_AUTH_TYPE:-}" ] && [ -n "${SOLR_AUTHENTICATION_CLIENT_BUILDER:-}" ]; then echo "WARNING: SOLR_AUTHENTICATION_CLIENT_BUILDER and SOLR_AUTH_TYPE environment variables are configured together." echo " Use SOLR_AUTH_TYPE environment variable to configure authentication type to be used. " - echo " Currently supported authentication types are [kerberos, basic]" + echo " Currently supported authentication types are [basic]" echo " The value of SOLR_AUTHENTICATION_CLIENT_BUILDER environment variable will be ignored" fi @@ -333,9 +333,6 @@ if [ -n "${SOLR_AUTH_TYPE:-}" ]; then basic) SOLR_AUTHENTICATION_CLIENT_BUILDER="org.apache.solr.client.solrj.impl.PreemptiveBasicAuthClientBuilderFactory" ;; - kerberos) - SOLR_AUTHENTICATION_CLIENT_BUILDER="org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder" - ;; *) echo "ERROR: Value specified for SOLR_AUTH_TYPE environment variable is invalid." exit 1 @@ -367,14 +364,13 @@ function print_usage() { if [[ "$CMD" == "start" || "$CMD" == "restart" ]]; then echo "" - echo "Usage: solr $CMD [-f] [-c] [--host host] [-p port] [-d directory] [-z zkHost] [-m memory] [-e example] [-s solr.solr.home] [-t solr.data.home] [--jvm-opts \"jvm-opts\"] [-V]" + echo "Usage: solr $CMD [-f] [--user-managed] [--host host] [-p port] [--server-dir directory] [-z zkHost] [-m memory] [-e example] [--solr-home solr.solr.home] [--data-home solr.data.home] [--jvm-opts \"jvm-opts\"] [--verbose]" echo "" - echo " -f Start Solr in foreground; default starts Solr in the background" + echo " -f/--foreground Start Solr in foreground; default starts Solr in the background" echo " and sends stdout / stderr to solr-PORT-console.log" echo "" - echo " -c or --cloud Start Solr in SolrCloud mode; if -z not supplied and ZK_HOST not defined in" - echo " solr.in.sh, an embedded ZooKeeper instance is started on Solr port+1000," - echo " such as 9983 if Solr is bound to 8983" + echo " --user-managed Start Solr in user managed aka standalone mode" + echo " See the Ref Guide for more details: https://solr.apache.org/guide/solr/latest/deployment-guide/cluster-types.html" echo "" echo " --host Specify the hostname for this Solr instance" echo "" @@ -383,9 +379,9 @@ function print_usage() { echo " STOP_PORT=(\$SOLR_PORT-1000) and JMX RMI listen port RMI_PORT=(\$SOLR_PORT+10000). " echo " For instance, if you set -p 8985, then the STOP_PORT=7985 and RMI_PORT=18985" echo "" - echo " -d Specify the Solr server directory; defaults to server" + echo " --server-dir Specify the Solr server directory; defaults to server" echo "" - echo " -z/--zk-host Zookeeper connection string; only used when running in SolrCloud mode using -c" + echo " -z/--zk-host Zookeeper connection string; ignored when running in User Managed (--user-managed) mode." echo " If neither ZK_HOST is defined in solr.in.sh nor the -z parameter is specified," echo " an embedded ZooKeeper instance will be launched." echo " Set the ZK_CREATE_CHROOT environment variable to true if your ZK host has a chroot path, and you want to create it automatically." @@ -393,18 +389,18 @@ function print_usage() { echo " -m/--memory Sets the min (-Xms) and max (-Xmx) heap size for the JVM, such as: -m 4g" echo " results in: -Xms4g -Xmx4g; by default, this script sets the heap size to 512m" echo "" - echo " -s Sets the solr.solr.home system property; Solr will create core directories under" + echo " --solr-home Sets the solr.solr.home system property; Solr will create core directories under" echo " this directory. This allows you to run multiple Solr instances on the same host" - echo " while reusing the same server directory set using the -d parameter. If set, the" + echo " while reusing the same server directory set using the --server-dir parameter. If set, the" echo " specified directory should contain a solr.xml file, unless solr.xml exists in Zookeeper." echo " This parameter is ignored when running examples (-e), as the solr.solr.home depends" echo " on which example is run. The default value is server/solr. If passed relative dir," echo " validation with current dir will be done, before trying default server/" echo "" - echo " -t/--data-home Sets the solr.data.home system property, where Solr will store index data in /data subdirectories." + echo " --data-home Sets the solr.data.home system property, where Solr will store index data in /data subdirectories." echo " If not set, Solr uses solr.solr.home for config and data." echo "" - echo " -e Name of the example to run; available examples:" + echo " -e/--example Name of the example to run; available examples:" echo " cloud: SolrCloud example" echo " techproducts: Comprehensive example illustrating many of Solr's core capabilities" echo " schemaless: Schema-less example (schema is inferred from data during indexing)" @@ -420,18 +416,16 @@ function print_usage() { echo " you could pass: -j \"--include-jetty-dir=/etc/jetty/custom/server/\"" echo " In most cases, you should wrap the additional parameters in double quotes." echo "" - echo " --no-prompt Don't prompt for input; accept all defaults when running examples that accept user input" + echo " -y/--no-prompt Don't prompt for input; accept all defaults when running examples that accept user input" echo "" echo " --force If attempting to start Solr as the root user, the script will exit with a warning that running Solr as \"root\" can cause problems." echo " It is possible to override this warning with the '--force' parameter." echo "" - echo " -v and -q Verbose (-v) or quiet (-q) logging. Sets default log level of Solr to DEBUG or WARN instead of INFO" - echo "" - echo " -V/--verbose Verbose messages from this script" + echo " --verbose or --quiet/-q Verbose or quiet logging. Sets default log level of Solr to DEBUG or WARN instead of INFO" echo "" elif [ "$CMD" == "stop" ]; then echo "" - echo "Usage: solr stop [-k key] [-p port] [-V]" + echo "Usage: solr stop [-k key] [-p port] [--all]" echo "" echo " -k Stop key; default is solrrocks" echo "" @@ -439,7 +433,7 @@ function print_usage() { echo "" echo " --all Find and stop all running Solr servers on this host" echo "" - echo " -V/--verbose Verbose messages from this script" + echo " --verbose Verbose messages from this script" echo "" echo " NOTE: To see if any Solr servers are running, do: solr status" echo "" @@ -496,55 +490,13 @@ function run_tool() { # shellcheck disable=SC2086 "$JAVA" $SOLR_SSL_OPTS $AUTHC_OPTS ${SOLR_ZK_CREDS_AND_ACLS:-} ${SOLR_TOOL_OPTS:-} -Dsolr.install.dir="$SOLR_TIP" \ - -Dlog4j.configurationFile="$DEFAULT_SERVER_DIR/resources/log4j2-console.xml" \ + -Dlog4j.configurationFile="$DEFAULT_SERVER_DIR/resources/log4j2-console.xml" -Dsolr.pid.dir="$SOLR_PID_DIR" \ -classpath "$DEFAULT_SERVER_DIR/solr-webapp/webapp/WEB-INF/lib/*:$DEFAULT_SERVER_DIR/lib/ext/*:$DEFAULT_SERVER_DIR/lib/*" \ org.apache.solr.cli.SolrCLI "$@" return $? } # end run_tool function -# get status about any Solr nodes running on this host -function get_status() { - # first, see if Solr is running - numSolrs=$(find "$SOLR_PID_DIR" -name "solr-*.pid" -type f | wc -l | tr -d ' ') - if [ "$numSolrs" != "0" ]; then - echo -e "\nFound $numSolrs Solr nodes: " - while read PIDF - do - ID=$(cat "$PIDF") - port=$(jetty_port "$ID") - if [ "$port" != "" ]; then - echo -e "\nSolr process $ID running on port $port" - run_tool status --solr-url "$SOLR_URL_SCHEME://$SOLR_TOOL_HOST:$port" "$@" - echo "" - else - echo -e "\nSolr process $ID from $PIDF not found." - fi - done < <(find "$SOLR_PID_DIR" -name "solr-*.pid" -type f) - else - # no pid files but check using ps just to be sure - numSolrs=$(ps auxww | grep start\.jar | grep solr\.solr\.home | grep -v grep | wc -l | sed -e 's/^[ \t]*//') - if [ "$numSolrs" != "0" ]; then - echo -e "\nFound $numSolrs Solr nodes: " - PROCESSES=$(ps auxww | grep start\.jar | grep solr\.solr\.home | grep -v grep | awk '{print $2}' | sort -r) - for ID in $PROCESSES - do - port=$(jetty_port "$ID") - if [ "$port" != "" ]; then - echo "" - echo "Solr process $ID running on port $port" - run_tool status --solr-url "$SOLR_URL_SCHEME://$SOLR_TOOL_HOST:$port" "$@" - echo "" - fi - done - else - echo -e "\nNo Solr nodes are running.\n" - run_tool status "$@" - fi - fi - -} # end get_status - # tries to gracefully stop Solr using the Jetty # stop command and if that fails, then uses kill -9 # (will attempt to thread dump before killing) @@ -619,7 +571,7 @@ function stop_solr() { if [ $# -eq 1 ]; then case $1 in - --help|-h|-help) + --help|-h) run_tool "" exit ;; @@ -635,12 +587,6 @@ else exit fi -# status tool -if [ "$SCRIPT_CMD" == "status" ]; then - get_status - exit $? -fi - # configure authentication if [[ "$SCRIPT_CMD" == "auth" ]]; then : "${SOLR_SERVER_DIR:=$DEFAULT_SERVER_DIR}" @@ -648,7 +594,7 @@ if [[ "$SCRIPT_CMD" == "auth" ]]; then echo -e "\nSolr server directory $SOLR_SERVER_DIR not found!\n" exit 1 fi - + if [ -z "${SOLR_HOME:-}" ]; then SOLR_HOME="$SOLR_SERVER_DIR/solr" elif [[ $SOLR_HOME != /* ]]; then @@ -659,7 +605,7 @@ if [[ "$SCRIPT_CMD" == "auth" ]]; then SOLR_PID_DIR="$SOLR_HOME" fi fi - + if [ -z "${AUTH_PORT:-}" ]; then for ID in $(ps auxww | grep java | grep start\.jar | awk '{print $2}' | sort -r) do @@ -669,22 +615,18 @@ if [[ "$SCRIPT_CMD" == "auth" ]]; then break fi done - fi - + fi + run_tool auth $@ --solr-url "$SOLR_URL_SCHEME://$SOLR_TOOL_HOST:${AUTH_PORT:-8983}" --auth-conf-dir "$SOLR_HOME" "--solr-include-file" "$SOLR_INCLUDE" exit $? fi -# at this point all tools that have a custom run process, like "status" and "auth" have been run and exited. +# at this point all tools that have a custom run process, like "status" and "auth" have been run and exited. # Unless a command is one of the ones in the if clause below, we will just run it with the default run_tool function and then exit. if [ "$SCRIPT_CMD" != "start" ] && [ "$SCRIPT_CMD" != "stop" ] && [ "$SCRIPT_CMD" != "restart" ]; then # hand off the command to the SolrCLI and let it handle the option parsing and validation run_tool "$SCRIPT_CMD" "$@" - ret=$? - if [ $ret -ne 0 ]; then - exit $ret - fi - exit 0 + exit $? fi # Everything below here is to support start, stop and restart. @@ -695,7 +637,7 @@ fi : "${SOLR_RECOMMENDED_OPEN_FILES:=65000}" if [[ "${SOLR_ULIMIT_CHECKS:-}" != "false" ]]; then - if [ "$SCRIPT_CMD" == "start" ] || [ "$SCRIPT_CMD" == "restart" ] || [ "$SCRIPT_CMD" == "status" ]; then + if [ "$SCRIPT_CMD" == "start" ] || [ "$SCRIPT_CMD" == "restart" ]; then if hash ulimit 2>/dev/null; then openFiles=$(ulimit -n) maxProcs=$(ulimit -u) @@ -739,16 +681,17 @@ FORCE=false SOLR_OPTS=(${SOLR_OPTS:-}) SCRIPT_SOLR_OPTS=() PASS_TO_RUN_EXAMPLE=() +SOLR_MODE="solrcloud" if [ $# -gt 0 ]; then while true; do case "${1:-}" in - -c|--cloud|-cloud) - SOLR_MODE="solrcloud" - PASS_TO_RUN_EXAMPLE+=("-c") + --user-managed) + SOLR_MODE="user-managed" + PASS_TO_RUN_EXAMPLE+=("--user-managed") shift ;; - -d|--dir|-dir) + --server-dir) if [[ -z "$2" || "${2:0:1}" == "-" ]]; then print_usage "$SCRIPT_CMD" "Server directory is required when using the $1 option!" exit 1 @@ -768,7 +711,7 @@ if [ $# -gt 0 ]; then SOLR_SERVER_DIR="$(cd "$SOLR_SERVER_DIR" || (echo "SOLR_SERVER_DIR not found" && exit 1); pwd)" shift 2 ;; - -s|--solr-home|-solr.home) + --solr-home) if [[ -z "$2" || "${2:0:1}" == "-" ]]; then print_usage "$SCRIPT_CMD" "Solr home directory is required when using the $1 option!" exit 1 @@ -777,11 +720,15 @@ if [ $# -gt 0 ]; then SOLR_HOME="$2" shift 2 ;; - -t|--data-home|-data.home) + --data-home) + if [[ -z "$2" || "${2:0:1}" == "-" ]]; then + print_usage "$SCRIPT_CMD" "Data home directory is required when using the $1 option!" + exit 1 + fi SOLR_DATA_HOME="$2" shift 2 ;; - -e|--example|-example) + -e|--example) if [[ -z "$2" || "${2:0:1}" == "-" ]]; then print_usage "$SCRIPT_CMD" "Example name is required when using the $1 option!" exit 1 @@ -789,11 +736,11 @@ if [ $# -gt 0 ]; then EXAMPLE="$2" shift 2 ;; - -f|--foreground|-foreground) + -f|--foreground) FG="true" shift ;; - --host|-host) + --host) if [[ -z "$2" || "${2:0:1}" == "-" ]]; then print_usage "$SCRIPT_CMD" "Hostname is required when using the $1 option!" exit 1 @@ -802,7 +749,7 @@ if [ $# -gt 0 ]; then PASS_TO_RUN_EXAMPLE+=("--host" "$SOLR_HOST") shift 2 ;; - -m|--memory|-memory) + -m|--memory) if [[ -z "$2" || "${2:0:1}" == "-" ]]; then print_usage "$SCRIPT_CMD" "Memory setting is required when using the $1 option!" exit 1 @@ -811,7 +758,7 @@ if [ $# -gt 0 ]; then PASS_TO_RUN_EXAMPLE+=("-m" "$SOLR_HEAP") shift 2 ;; - -p|--port|-port) + -p|--port) if [[ -z "$2" || "${2:0:1}" == "-" ]]; then print_usage "$SCRIPT_CMD" "Port number is required when using the $1 option!" exit 1 @@ -821,56 +768,63 @@ if [ $# -gt 0 ]; then PASS_TO_RUN_EXAMPLE+=("-p" "$SOLR_PORT") shift 2 ;; - -z|--zk-host|-zkHost|--zkHost) + -z|--zk-host) if [[ -z "$2" || "${2:0:1}" == "-" ]]; then print_usage "$SCRIPT_CMD" "Zookeeper connection string is required when using the $1 option!" exit 1 fi ZK_HOST="$2" - SOLR_MODE="solrcloud" PASS_TO_RUN_EXAMPLE+=("-z" "$ZK_HOST") shift 2 ;; - -a|--jvm-opts|-addlopts) + --jvm-opts) + if [[ -z "$2" ]]; then + print_usage "$SCRIPT_CMD" "JVM options are required when using the $1 option!" + exit 1 + fi ADDITIONAL_CMD_OPTS="$2" PASS_TO_RUN_EXAMPLE+=("--jvm-opts" "$ADDITIONAL_CMD_OPTS") shift 2 ;; - -j|--jettyconfig|-jettyconfig) + -j|--jettyconfig) + if [[ -z "$2" || "${2:0:1}" == "-" ]]; then + print_usage "$SCRIPT_CMD" "Jetty config is required when using the $1 option!" + exit 1 + fi ADDITIONAL_JETTY_CONFIG="$2" - PASS_TO_RUN_EXAMPLE+=("-j" "$ADDITIONAL_JETTY_CONFIG") shift 2 ;; - -k|--key|-key) + -k|--key) + if [[ -z "$2" || "${2:0:1}" == "-" ]]; then + print_usage "$SCRIPT_CMD" "Stop key is required when using the $1 option!" + exit 1 + fi STOP_KEY="$2" shift 2 ;; - -h|--help|-help) + -h|--help) print_usage "$SCRIPT_CMD" exit 0 ;; - --noprompt|-noprompt|--no-prompt) + -y|--no-prompt) PASS_TO_RUN_EXAMPLE+=("--no-prompt") shift ;; - -V|--verbose|-verbose) + --verbose) verbose=true - PASS_TO_RUN_EXAMPLE+=("--verbose") - shift - ;; - -v) SOLR_LOG_LEVEL=DEBUG + PASS_TO_RUN_EXAMPLE+=("--verbose") shift ;; - -q) + -q|--quiet) SOLR_LOG_LEVEL=WARN shift ;; - --all|-all) + --all) stop_all=true shift ;; - --force|-force) + --force) FORCE=true PASS_TO_RUN_EXAMPLE+=("--force") shift @@ -927,7 +881,7 @@ fi # otherwise let this script proceed to process the user request # if [ -n "${EXAMPLE:-}" ] && [ "$SCRIPT_CMD" == "start" ]; then - run_tool run_example -e "$EXAMPLE" -d "$SOLR_SERVER_DIR" --url-scheme "$SOLR_URL_SCHEME" "${PASS_TO_RUN_EXAMPLE[@]}" + run_tool run_example -e "$EXAMPLE" --server-dir "$SOLR_SERVER_DIR" --url-scheme "$SOLR_URL_SCHEME" "${PASS_TO_RUN_EXAMPLE[@]}" exit $? fi @@ -1076,8 +1030,7 @@ if [ "${SOLR_HOME:0:${#EXAMPLE_DIR}}" = "$EXAMPLE_DIR" ]; then SOLR_LOGS_DIR="$SOLR_HOME/../logs" fi -# Set the logging manager by default, so that Lucene JUL logs are included with Solr logs. -LOG4J_CONFIG=("-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager") +LOG4J_CONFIG=() if [ -n "${LOG4J_PROPS:-}" ]; then LOG4J_CONFIG+=("-Dlog4j.configurationFile=$LOG4J_PROPS") fi @@ -1100,31 +1053,13 @@ fi # Establish default GC logging opts if no env var set (otherwise init to sensible default) if [ -z "${GC_LOG_OPTS}" ]; then - if [[ "$JAVA_VER_NUM" -lt "9" ]] ; then - GC_LOG_OPTS=('-verbose:gc' '-XX:+PrintHeapAtGC' '-XX:+PrintGCDetails' \ - '-XX:+PrintGCDateStamps' '-XX:+PrintGCTimeStamps' '-XX:+PrintTenuringDistribution' \ - '-XX:+PrintGCApplicationStoppedTime') - else - GC_LOG_OPTS=('-Xlog:gc*') - fi -else - # TODO: Should probably not overload GC_LOG_OPTS as both string and array, but leaving it be for now - # shellcheck disable=SC2128 - GC_LOG_OPTS=($GC_LOG_OPTS) + GC_LOG_OPTS=('-Xlog:gc*') fi # if verbose gc logging enabled, setup the location of the log file and rotation if [ "${#GC_LOG_OPTS[@]}" -gt 0 ]; then - if [[ "$JAVA_VER_NUM" -lt "9" ]] || [ "$JAVA_VENDOR" == "OpenJ9" ]; then - gc_log_flag="-Xloggc" - if [ "$JAVA_VENDOR" == "OpenJ9" ]; then - gc_log_flag="-Xverbosegclog" - fi - if [ -z ${JAVA8_GC_LOG_FILE_OPTS+x} ]; then - GC_LOG_OPTS+=("$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log" '-XX:+UseGCLogFileRotation' '-XX:NumberOfGCLogFiles=9' '-XX:GCLogFileSize=20M') - else - GC_LOG_OPTS+=($JAVA8_GC_LOG_FILE_OPTS) - fi + if [ "$JAVA_VENDOR" == "OpenJ9" ]; then + GC_LOG_OPTS+=("-Xverbosegclog:$SOLR_LOGS_DIR/solr_gc.log" '-XX:+UseGCLogFileRotation' '-XX:NumberOfGCLogFiles=9' '-XX:GCLogFileSize=20M') else # https://openjdk.java.net/jeps/158 for i in "${!GC_LOG_OPTS[@]}"; @@ -1138,11 +1073,6 @@ if [ "${#GC_LOG_OPTS[@]}" -gt 0 ]; then fi fi -# If ZK_HOST is defined, the assume SolrCloud mode -if [[ -n "${ZK_HOST:-}" ]]; then - SOLR_MODE="solrcloud" -fi - if [ "${SOLR_MODE:-}" == 'solrcloud' ]; then : "${ZK_CLIENT_TIMEOUT:=30000}" CLOUD_MODE_OPTS=("-DzkClientTimeout=$ZK_CLIENT_TIMEOUT") @@ -1226,7 +1156,7 @@ else SECURITY_MANAGER_OPTS=() fi -# Enable ADMIN UI by default, and give the option for users to disable it +# Enable Admin UI by default, and give the option for users to disable it if [ "${SOLR_ADMIN_UI_DISABLED:-false}" == "true" ]; then SOLR_ADMIN_UI="-DdisableAdminUI=true" echo -e "ADMIN UI Disabled" @@ -1369,22 +1299,12 @@ function start_solr() { cd "$SOLR_SERVER_DIR" || (echo -e "\nCd to SOLR_SERVER_DIR failed" && exit 1) if [ ! -e "$SOLR_SERVER_DIR/start.jar" ]; then - echo -e "\nERROR: start.jar file not found in $SOLR_SERVER_DIR!\nPlease check your -d parameter to set the correct Solr server directory.\n" + echo -e "\nERROR: start.jar file not found in $SOLR_SERVER_DIR!\nPlease check your --server-dir parameter to set the correct Solr server directory.\n" exit 1 fi - # Workaround for JIT crash, see https://issues.apache.org/jira/browse/SOLR-16463 - if [[ "$JAVA_VER_NUM" -ge "17" ]] ; then - SCRIPT_SOLR_OPTS+=("-XX:CompileCommand=exclude,com.github.benmanes.caffeine.cache.BoundedLocalCache::put") - echo "Java $JAVA_VER_NUM detected. Enabled workaround for SOLR-16463" - fi - - # Vector optimizations are only supported for Java 20 and 21 for now. - # This will need to change as Lucene is upgraded and newer Java versions are released - if [[ "$JAVA_VER_NUM" -ge "20" ]] && [[ "$JAVA_VER_NUM" -le "21" ]] ; then - SCRIPT_SOLR_OPTS+=("--add-modules" "jdk.incubator.vector") - echo "Java $JAVA_VER_NUM detected. Incubating Panama Vector APIs have been enabled" - fi + # Add vector optimizations module + SCRIPT_SOLR_OPTS+=("--add-modules" "jdk.incubator.vector") SOLR_START_OPTS=('-server' "${JAVA_MEM_OPTS[@]}" "${GC_TUNE_ARR[@]}" "${GC_LOG_OPTS[@]}" "${IP_ACL_OPTS[@]}" \ "${REMOTE_JMX_OPTS[@]}" "${CLOUD_MODE_OPTS[@]}" -Dsolr.log.dir="$SOLR_LOGS_DIR" \ diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd index 49f08502dae..7541e2183ac 100755 --- a/solr/bin/solr.cmd +++ b/solr/bin/solr.cmd @@ -20,6 +20,8 @@ IF "%OS%"=="Windows_NT" setlocal enabledelayedexpansion enableextensions +@REM What version of Java is required to run this version of Solr. +set REQUIRED_JAVA_VERSION=21 set "PASS_TO_RUN_EXAMPLE=" REM Determine top-level Solr directory @@ -52,7 +54,11 @@ IF NOT DEFINED JAVA_HOME ( set "JAVA_HOME=%%B" ) ) -IF NOT DEFINED JAVA_HOME goto need_java_home +IF NOT DEFINED JAVA_HOME ( + REM Need java home + @echo "Please set the JAVA_HOME environment variable to the path where you installed Java !REQUIRED_JAVA_VERSION!+" + goto done +) set JAVA_HOME=%JAVA_HOME:"=% IF %JAVA_HOME:~-1%==\ SET JAVA_HOME=%JAVA_HOME:~0,-1% IF NOT EXIST "%JAVA_HOME%\bin\java.exe" ( @@ -61,8 +67,8 @@ IF NOT EXIST "%JAVA_HOME%\bin\java.exe" ( ) set "JAVA=%JAVA_HOME%\bin\java" CALL :resolve_java_info -IF !JAVA_MAJOR_VERSION! LSS 8 ( - set "SCRIPT_ERROR=Java 1.8 or later is required to run Solr. Current Java version is: !JAVA_VERSION_INFO! (detected major: !JAVA_MAJOR_VERSION!)" +IF !JAVA_MAJOR_VERSION! LSS !REQUIRED_JAVA_VERSION! ( + set "SCRIPT_ERROR=Java !REQUIRED_JAVA_VERSION! or later is required to run Solr. Current Java version is: !JAVA_VERSION_INFO! (detected major: !JAVA_MAJOR_VERSION!)" goto err ) @@ -197,7 +203,7 @@ IF NOT DEFINED SOLR_AUTH_TYPE ( IF DEFINED SOLR_AUTHENTICATION_OPTS ( echo WARNING: SOLR_AUTHENTICATION_OPTS variable configured without associated SOLR_AUTH_TYPE variable echo Please configure SOLR_AUTH_TYPE variable with the authentication type to be used. - echo Currently supported authentication types are [kerberos, basic] + echo Currently supported authentication types are [basic] ) ) @@ -205,7 +211,7 @@ IF DEFINED SOLR_AUTH_TYPE ( IF DEFINED SOLR_AUTHENTICATION_CLIENT_BUILDER ( echo WARNING: SOLR_AUTHENTICATION_CLIENT_BUILDER and SOLR_AUTH_TYPE variables are configured together echo Use SOLR_AUTH_TYPE variable to configure authentication type to be used - echo Currently supported authentication types are [kerberos, basic] + echo Currently supported authentication types are [basic] echo The value of SOLR_AUTHENTICATION_CLIENT_BUILDER configuration variable will be ignored ) ) @@ -214,12 +220,8 @@ IF DEFINED SOLR_AUTH_TYPE ( IF /I "%SOLR_AUTH_TYPE%" == "basic" ( set SOLR_AUTHENTICATION_CLIENT_BUILDER="org.apache.solr.client.solrj.impl.PreemptiveBasicAuthClientBuilderFactory" ) ELSE ( - IF /I "%SOLR_AUTH_TYPE%" == "kerberos" ( - set SOLR_AUTHENTICATION_CLIENT_BUILDER="org.apache.solr.client.solrj.impl.PreemptiveBasicAuthClientBuilderFactory" - ) ELSE ( - echo ERROR: Value specified for SOLR_AUTH_TYPE configuration variable is invalid. - goto err - ) + echo ERROR: Value specified for SOLR_AUTH_TYPE configuration variable is invalid. + goto err ) ) @@ -242,109 +244,76 @@ IF "%SOLR_JETTY_HOST%"=="" ( set "SOLR_JETTY_HOST=127.0.0.1" ) -set FIRST_ARG=%1 - -IF [%1]==[] goto usage - -REM -help is a special case to faciliate folks learning about how to use Solr. -IF "%1"=="-help" goto run_solrcli -IF "%1"=="-usage" goto run_solrcli -IF "%1"=="-h" goto run_solrcli -IF "%1"=="--help" goto run_solrcli -IF "%1"=="-help" goto run_solrcli -IF "%1"=="/?" goto run_solrcli -IF "%1"=="status" goto get_status -IF "%1"=="version" goto run_solrcli -IF "%1"=="-v" goto run_solrcli -IF "%1"=="-version" goto run_solrcli -IF "%1"=="assert" goto run_solrcli -IF "%1"=="zk" goto run_solrcli -IF "%1"=="export" goto run_solrcli -IF "%1"=="package" goto run_solrcli -IF "%1"=="auth" goto run_solrcli -IF "%1"=="api" goto run_solrcli -IF "%1"=="post" goto run_solrcli - -REM Only allow the command to be the first argument, assume start if not supplied + +REM Handle special commands IF "%1"=="start" goto set_script_cmd IF "%1"=="stop" goto set_script_cmd IF "%1"=="restart" goto set_script_cmd -IF "%1"=="healthcheck" goto run_solrcli -IF "%1"=="create" goto run_solrcli -IF "%1"=="delete" goto run_solrcli -IF "%1"=="postlogs" goto run_solrcli +IF "%1"=="auth" goto set_script_cmd -IF "%1"=="auth" ( - set SCRIPT_CMD=auth - SHIFT - goto run_auth +REM Handle all other commands by simply running SolrCLI +"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ + -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ + -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ + org.apache.solr.cli.SolrCLI %* +if errorlevel 1 ( + exit /b 1 ) -IF "%1"=="config" goto run_solrcli +goto done +:set_script_cmd +set SCRIPT_CMD=%1 +SHIFT +IF "%SCRIPT_CMD%"=="auth" goto run_auth goto parse_args :usage IF NOT "%SCRIPT_ERROR%"=="" ECHO %SCRIPT_ERROR% -IF [%FIRST_ARG%]==[] goto run_solrcli -IF "%FIRST_ARG%"=="-help" goto run_solrcli -IF "%FIRST_ARG%"=="-usage" goto run_solrcli -IF "%FIRST_ARG%"=="-h" goto run_solrcli -IF "%FIRST_ARG%"=="--help" goto run_solrcli -IF "%FIRST_ARG%"=="/?" goto run_solrcli IF "%SCRIPT_CMD%"=="start" goto start_usage IF "%SCRIPT_CMD%"=="restart" goto start_usage IF "%SCRIPT_CMD%"=="stop" goto stop_usage -IF "%SCRIPT_CMD%"=="healthcheck" goto run_solrcli -IF "%SCRIPT_CMD%"=="create" goto run_solrcli -IF "%SCRIPT_CMD%"=="delete" goto run_solrcli -IF "%SCRIPT_CMD%"=="cluster" goto run_solrcli -IF "%SCRIPT_CMD%"=="zk" goto run_solrcli -IF "%SCRIPT_CMD%"=="auth" goto run_solrcli -IF "%SCRIPT_CMD%"=="package" goto run_solrcli -IF "%SCRIPT_CMD%"=="status" goto run_solrcli -IF "%SCRIPT_CMD%"=="postlogs" goto run_solrcli -goto done +REM Should not be reachable, but just in case +goto err :start_usage @echo. -@echo Usage: solr %SCRIPT_CMD% [-f] [-c] [--host hostname] [-p port] [-d directory] [-z zkHost] [-m memory] [-e example] [-s solr.solr.home] [-t solr.data.home] [--jvm-opts "jvm-opts"] [-V] +@echo Usage: solr %SCRIPT_CMD% [-f] [--user-managed] [--host hostname] [-p port] [--server-dir directory] [-z zkHost] [-m memory] [-e example] [--solr-home solr.solr.home] [--data-home solr.data.home] [--jvm-opts "jvm-opts"] [--verbose] @echo. -@echo -f Start Solr in foreground; default starts Solr in the background +@echo -f/--foreground Start Solr in foreground; default starts Solr in the background @echo and sends stdout / stderr to solr-PORT-console.log @echo. -@echo -c or --cloud Start Solr in SolrCloud mode; if -z not supplied and ZK_HOST not defined in -@echo solr.in.cmd, an embedded ZooKeeper instance is started on Solr port+1000, -@echo such as 9983 if Solr is bound to 8983 +@echo --user-managed Start Solr in user managed aka standalone mode" +@echo See the Ref Guide for more details: https://solr.apache.org/guide/solr/latest/deployment-guide/cluster-types.html @echo. @echo --host host Specify the hostname for this Solr instance @echo. -@echo -p port Specify the port to start the Solr HTTP listener on; default is 8983 +@echo -p/--port port Specify the port to start the Solr HTTP listener on; default is 8983 @echo The specified port (SOLR_PORT) will also be used to determine the stop port @echo STOP_PORT=(%%SOLR_PORT%%-1000) and JMX RMI listen port RMI_PORT=(%%SOLR_PORT%%+10000). @echo For instance, if you set -p 8985, then the STOP_PORT=7985 and RMI_PORT=18985 @echo. -@echo -d dir Specify the Solr server directory; defaults to server +@echo --server-dir dir Specify the Solr server directory; defaults to server @echo. @echo -z zkHost Zookeeper connection string; only used when running in SolrCloud mode using -c @echo If neither ZK_HOST is defined in solr.in.cmd nor the -z parameter is specified, @echo an embedded ZooKeeper instance will be launched. @echo Set the ZK_CREATE_CHROOT environment variable to true if your ZK host has a chroot path, and you want to create it automatically." @echo. -@echo -m memory Sets the min (-Xms) and max (-Xmx) heap size for the JVM, such as: -m 4g +@echo -m/--memory memory Sets the min (-Xms) and max (-Xmx) heap size for the JVM, such as: -m 4g @echo results in: -Xms4g -Xmx4g; by default, this script sets the heap size to 512m @echo. -@echo -s dir Sets the solr.solr.home system property; Solr will create core directories under +@echo --solr-home dir Sets the solr.solr.home system property; Solr will create core directories under @echo this directory. This allows you to run multiple Solr instances on the same host -@echo while reusing the same server directory set using the -d parameter. If set, the +@echo while reusing the same server directory set using the --server-dir parameter. If set, the @echo specified directory should contain a solr.xml file, unless solr.xml exists in Zookeeper. @echo This parameter is ignored when running examples (-e), as the solr.solr.home depends @echo on which example is run. The default value is server/solr. If passed a relative dir @echo validation with the current dir will be done before trying the default server/^ @echo. -@echo -t dir Sets the solr.data.home system property, where Solr will store index data in ^/data subdirectories. +@echo --data-home dir Sets the solr.data.home system property, where Solr will store index data in ^/data subdirectories. @echo If not set, Solr uses solr.solr.home for both config and data. @echo. -@echo -e example Name of the example to run; available examples: +@echo -e/--example name Name of the example to run; available examples: @echo cloud: SolrCloud example @echo techproducts: Comprehensive example illustrating many of Solr's core capabilities @echo schemaless: Schema-less example (schema is inferred from data during indexing) @@ -360,17 +329,15 @@ goto done @echo you could pass: -j "--include-jetty-dir=/etc/jetty/custom/server/" @echo In most cases, you should wrap the additional parameters in double quotes. @echo. -@echo --no-prompt Don't prompt for input; accept all defaults when running examples that accept user input +@echo -y/--no-prompt Don't prompt for input; accept all defaults when running examples that accept user input @echo. -@echo -v and -q Verbose (-v) or quiet (-q) logging. Sets default log level to DEBUG or WARN instead of INFO -@echo. -@echo -V/--verbose Verbose messages from this script +@echo --verbose and -q/--quiet Verbose or quiet logging. Sets default log level to DEBUG or WARN instead of INFO @echo. goto done :stop_usage @echo. -@echo Usage: solr stop [-k key] [-p port] [-V] +@echo Usage: solr stop [-k key] [-p port] [--verbose] @echo. @echo -k key Stop key; default is solrrocks @echo. @@ -378,72 +345,73 @@ goto done @echo. @echo --all Find and stop all running Solr servers on this host @echo. -@echo -V/--verbose Verbose messages from this script +@echo --verbose Verbose messages from this script @echo. @echo NOTE: To see if any Solr servers are running, do: solr status @echo. goto done - -REM Really basic command-line arg parsing +REM Parse arguments for special commands (start, stop, restart) :parse_args set "arg=%~1" set "firstTwo=%arg:~0,2%" -IF "%SCRIPT_CMD%"=="" set SCRIPT_CMD=start -IF [%1]==[] goto process_script_cmd -IF "%1"=="-help" goto usage -IF "%1"=="-h" goto usage -IF "%1"=="-usage" goto usage -IF "%1"=="/?" goto usage + +REM In case no arguments left, run special command +IF [%1]==[] goto run_special_command + +REM Skip start / restart arguments if stop command +IF "%SCRIPT_CMD%"=="stop" goto parse_stop_args + +:parse_start_args IF "%1"=="-f" goto set_foreground_mode IF "%1"=="--foreground" goto set_foreground_mode -IF "%1"=="-V" goto set_verbose IF "%1"=="--verbose" goto set_verbose -IF "%1"=="-v" goto set_debug IF "%1"=="-q" goto set_warn -IF "%1"=="-c" goto set_cloud_mode -IF "%1"=="-cloud" goto set_cloud_mode -IF "%1"=="--cloud" goto set_cloud_mode -IF "%1"=="-d" goto set_server_dir -IF "%1"=="--dir" goto set_server_dir -IF "%1"=="-s" goto set_solr_home_dir +IF "%1"=="--quiet" goto set_warn +IF "%1"=="--user-managed" goto set_user_managed_mode +IF "%1"=="--server-dir" goto set_server_dir IF "%1"=="--solr-home" goto set_solr_home_dir -IF "%1"=="-t" goto set_solr_data_dir -IF "%1"=="--solr-data" goto set_solr_data_dir +IF "%1"=="--data-home" goto set_solr_data_dir IF "%1"=="-e" goto set_example IF "%1"=="--example" goto set_example IF "%1"=="--host" goto set_host IF "%1"=="-m" goto set_memory IF "%1"=="--memory" goto set_memory -IF "%1"=="-p" goto set_port -IF "%1"=="--port" goto set_port IF "%1"=="-z" goto set_zookeeper IF "%1"=="--zk-host" goto set_zookeeper -IF "%1"=="-zkHost" goto set_zookeeper -IF "%1"=="--zkHost" goto set_zookeeper IF "%1"=="-s" goto set_solr_url IF "%1"=="--solr-url" goto set_solr_url -IF "%1"=="-solrUrl" goto set_solr_url -IF "%1"=="-a" goto set_addl_opts -IF "%1"=="--jvm-opts" goto set_addl_opts +IF "%1"=="--jvm-opts" goto set_jvm_opts IF "%1"=="-j" goto set_addl_jetty_config IF "%1"=="--jettyconfig" goto set_addl_jetty_config -IF "%1"=="--noprompt" goto set_noprompt +IF "%1"=="-y" goto set_noprompt IF "%1"=="--no-prompt" goto set_noprompt + +REM Skip stop arg parsing if not stop command +IF NOT "%SCRIPT_CMD%"=="stop" goto parse_general_args + +:parse_stop_args IF "%1"=="-k" goto set_stop_key IF "%1"=="--key" goto set_stop_key IF "%1"=="--all" goto set_stop_all -IF "%1"=="-all" goto set_stop_all + +:parse_general_args + +REM Print usage of command in case help option included +IF "%1"=="--help" goto usage +IF "%1"=="-h" goto usage + +REM other args supported by all special commands +IF "%1"=="-p" goto set_port +IF "%1"=="--port" goto set_port IF "%firstTwo%"=="-D" goto set_passthru + +REM Argument not supported / found IF NOT "%1"=="" goto invalid_cmd_line +REM Not reachable, but just in case goto invalid_cmd_line -:set_script_cmd -set SCRIPT_CMD=%1 -SHIFT -goto parse_args - :set_foreground_mode set FG=1 SHIFT @@ -451,12 +419,8 @@ goto parse_args :set_verbose set verbose=1 -set "PASS_TO_RUN_EXAMPLE=--verbose !PASS_TO_RUN_EXAMPLE!" -SHIFT -goto parse_args - -:set_debug set SOLR_LOG_LEVEL=DEBUG +set "PASS_TO_RUN_EXAMPLE=--verbose !PASS_TO_RUN_EXAMPLE!" SHIFT goto parse_args @@ -465,8 +429,8 @@ set SOLR_LOG_LEVEL=WARN SHIFT goto parse_args -:set_cloud_mode -set SOLR_MODE=solrcloud +:set_user_managed_mode +set SOLR_MODE=user-managed SHIFT goto parse_args @@ -661,9 +625,12 @@ SHIFT SHIFT goto parse_args -:set_addl_opts +:set_jvm_opts set "arg=%~2" set "SOLR_ADDL_ARGS=%~2" +IF "%SOLR_ADDL_ARGS%"=="" ( + set "EMPTY_ADDL_JVM_ARGS=true" +) SHIFT SHIFT goto parse_args @@ -676,15 +643,36 @@ SHIFT goto parse_args :set_passthru -set "PASSTHRU=%~1=%~2" +set "PASSTHRU_KEY=%~1" +set "PASSTHRU_VALUES=" + +SHIFT +:repeat_passthru +set "arg=%~1" +if "%arg%"=="" goto end_passthru +set firstChar=%arg:~0,1% +IF "%firstChar%"=="-" ( + goto end_passthru +) + +if defined PASSTHRU_VALUES ( + set "PASSTHRU_VALUES=%PASSTHRU_VALUES%,%arg%" +) else ( + set "PASSTHRU_VALUES=%arg%" +) +SHIFT +goto repeat_passthru + +:end_passthru +set "PASSTHRU=%PASSTHRU_KEY%=%PASSTHRU_VALUES%" + IF NOT "%SOLR_OPTS%"=="" ( set "SOLR_OPTS=%SOLR_OPTS% %PASSTHRU%" ) ELSE ( set "SOLR_OPTS=%PASSTHRU%" ) set "PASS_TO_RUN_EXAMPLE=%PASSTHRU% !PASS_TO_RUN_EXAMPLE!" -SHIFT -SHIFT + goto parse_args :set_noprompt @@ -694,8 +682,27 @@ set "PASS_TO_RUN_EXAMPLE=--no-prompt !PASS_TO_RUN_EXAMPLE!" SHIFT goto parse_args -REM Perform the requested command after processing args -:process_script_cmd +REM Handle invalid arguments passed to special commands (start, stop, restart) +:invalid_cmd_line +@echo. +IF "!SCRIPT_ERROR!"=="" ( + @echo Invalid command-line option: %1 +) ELSE ( + @echo ERROR: !SCRIPT_ERROR! +) +@echo. +IF "%SCRIPT_CMD%"=="start" ( + goto start_usage +) ELSE IF "%SCRIPT_CMD%"=="restart" ( + goto start_usage +) ELSE IF "%SCRIPT_CMD%"=="stop" ( + goto stop_usage +) +REM Not reachable, but just in case +goto err + +REM Process special commands (start, stop, restart) +:run_special_command IF "%verbose%"=="1" ( CALL :safe_echo "Using Solr root directory: %SOLR_TIP%" @@ -736,36 +743,20 @@ IF NOT EXIST "%SOLR_SERVER_DIR%" ( goto err ) -IF NOT "%EXAMPLE%"=="" goto run_example - -:start_solr -IF "%SOLR_HOME%"=="" set "SOLR_HOME=%SOLR_SERVER_DIR%\solr" -IF EXIST "%cd%\%SOLR_HOME%" set "SOLR_HOME=%cd%\%SOLR_HOME%" - -IF NOT EXIST "%SOLR_HOME%\" ( - IF EXIST "%SOLR_SERVER_DIR%\%SOLR_HOME%" ( - set "SOLR_HOME=%SOLR_SERVER_DIR%\%SOLR_HOME%" - ) ELSE ( - set "SCRIPT_ERROR=Solr home directory %SOLR_HOME% not found!" - goto err - ) -) - IF "%STOP_KEY%"=="" set STOP_KEY=solrrocks -@REM This is quite hacky, but examples rely on a different log4j2.xml -@REM so that we can write logs for examples to %SOLR_HOME%\..\logs -IF [%SOLR_LOGS_DIR%] == [] ( - set "SOLR_LOGS_DIR=%SOLR_SERVER_DIR%\logs" -) ELSE ( - set SOLR_LOGS_DIR=%SOLR_LOGS_DIR:"=% -) +IF NOT "%EXAMPLE%"=="" ( + REM Run the requested example -set "EXAMPLE_DIR=%SOLR_TIP%\example" -set TMP_SOLR_HOME=!SOLR_HOME:%EXAMPLE_DIR%=! -IF NOT "%TMP_SOLR_HOME%"=="%SOLR_HOME%" ( - set "SOLR_LOGS_DIR=%SOLR_HOME%\..\logs" - set "LOG4J_CONFIG=%SOLR_SERVER_DIR%\resources\log4j2.xml" + "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ + -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ + -Dsolr.install.symDir="%SOLR_TIP%" ^ + -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ + org.apache.solr.cli.SolrCLI run_example --script "%SDIR%\solr.cmd" -e %EXAMPLE% --server-dir "%SOLR_SERVER_DIR%" ^ + --url-scheme !SOLR_URL_SCHEME! !PASS_TO_RUN_EXAMPLE! + + REM End of run_example + goto done ) set IS_RESTART=0 @@ -778,77 +769,111 @@ IF "%SCRIPT_CMD%"=="restart" ( set IS_RESTART=1 ) +REM Skipt to start if not stop or restart (that executes stop first) +IF "%SCRIPT_CMD%"=="start" goto start_solr + @REM stop logic here +:stop_solr IF "%SOLR_STOP_WAIT%"=="" ( set SOLR_STOP_WAIT=180 ) -IF "%SCRIPT_CMD%"=="stop" ( - IF "%SOLR_PORT%"=="" ( - IF "%STOP_ALL%"=="1" ( - set found_it=0 - for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port$"`) do ( - set SOME_SOLR_PORT= - For /F "delims=" %%J In ('type "%SOLR_TIP%\bin\%%i"') do set SOME_SOLR_PORT=%%~J - if NOT "!SOME_SOLR_PORT!"=="" ( - for /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":0 " ^| find ":!SOME_SOLR_PORT! "') do ( - @REM j is the ip:port and k is the pid - IF NOT "%%k"=="0" ( - IF "%%j"=="%SOLR_JETTY_HOST%:!SOME_SOLR_PORT!" ( - set found_it=1 - @echo Stopping Solr process %%k running on port !SOME_SOLR_PORT! - IF "%STOP_PORT%"=="" ( - set /A LOCAL_STOP_PORT=!SOME_SOLR_PORT! - 1000 - ) else ( - set LOCAL_STOP_PORT=%STOP_PORT% - ) - "%JAVA%" %SOLR_SSL_OPTS% -Djetty.home="%SOLR_SERVER_DIR%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=!LOCAL_STOP_PORT! STOP.KEY=%STOP_KEY% --stop - del "%SOLR_TIP%"\bin\solr-!SOME_SOLR_PORT!.port - REM wait for the process to terminate - CALL :wait_for_process_exit %%k !SOLR_STOP_WAIT! - REM Kill it if it is still running after the graceful shutdown - IF EXIST "%JAVA_HOME%\bin\jstack.exe" ( - qprocess "%%k" >nul 2>nul && "%JAVA_HOME%\bin\jstack.exe" %%k && taskkill /f /PID %%k - ) else ( - qprocess "%%k" >nul 2>nul && taskkill /f /PID %%k - ) +IF "%SOLR_PORT%"=="" ( + IF "%STOP_ALL%"=="1" ( + REM Stop all running Solr instances + set found_it=0 + for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port$"`) do ( + set SOME_SOLR_PORT= + For /F "delims=" %%J In ('type "%SOLR_TIP%\bin\%%i"') do set SOME_SOLR_PORT=%%~J + if NOT "!SOME_SOLR_PORT!"=="" ( + for /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":0 " ^| find ":!SOME_SOLR_PORT! "') do ( + @REM j is the ip:port and k is the pid + IF NOT "%%k"=="0" ( + IF "%%j"=="%SOLR_JETTY_HOST%:!SOME_SOLR_PORT!" ( + set found_it=1 + @echo Stopping Solr process %%k running on port !SOME_SOLR_PORT! + IF "%STOP_PORT%"=="" ( + set /A LOCAL_STOP_PORT=!SOME_SOLR_PORT! - 1000 + ) else ( + set LOCAL_STOP_PORT=%STOP_PORT% + ) + "%JAVA%" %SOLR_SSL_OPTS% -Djetty.home="%SOLR_SERVER_DIR%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=!LOCAL_STOP_PORT! STOP.KEY=%STOP_KEY% --stop + del "%SOLR_TIP%"\bin\solr-!SOME_SOLR_PORT!.port + REM wait for the process to terminate + CALL :wait_for_process_exit %%k !SOLR_STOP_WAIT! + REM Kill it if it is still running after the graceful shutdown + IF EXIST "%JAVA_HOME%\bin\jstack.exe" ( + qprocess "%%k" >nul 2>nul && "%JAVA_HOME%\bin\jstack.exe" %%k && taskkill /f /PID %%k + ) else ( + qprocess "%%k" >nul 2>nul && taskkill /f /PID %%k ) ) ) ) ) - if "!found_it!"=="0" echo No Solr nodes found to stop. - ) ELSE ( - set "SCRIPT_ERROR=Must specify the port when trying to stop Solr, or use --all to stop all running nodes on this host." - goto err ) + if "!found_it!"=="0" echo No Solr nodes found to stop. ) ELSE ( - set found_it=0 - For /f "tokens=2,5" %%M in ('netstat -nao ^| find "TCP " ^| find ":0 " ^| find ":%SOLR_PORT% "') do ( - IF NOT "%%N"=="0" ( - IF "%%M"=="%SOLR_JETTY_HOST%:%SOLR_PORT%" ( - set found_it=1 - @echo Stopping Solr process %%N running on port %SOLR_PORT% - IF "%STOP_PORT%"=="" set /A STOP_PORT=%SOLR_PORT% - 1000 - "%JAVA%" %SOLR_SSL_OPTS% %SOLR_TOOL_OPTS% -Djetty.home="%SOLR_SERVER_DIR%" -jar "%SOLR_SERVER_DIR%\start.jar" %SOLR_JETTY_CONFIG% STOP.PORT=!STOP_PORT! STOP.KEY=%STOP_KEY% --stop - del "%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port - REM wait for the process to terminate - CALL :wait_for_process_exit %%N !SOLR_STOP_WAIT! - REM Kill it if it is still running after the graceful shutdown - IF EXIST "%JAVA_HOME%\bin\jstack.exe" ( - qprocess "%%N" >nul 2>nul && "%JAVA_HOME%\bin\jstack.exe" %%N && taskkill /f /PID %%N - ) else ( - qprocess "%%N" >nul 2>nul && taskkill /f /PID %%N - ) + set "SCRIPT_ERROR=Must specify the port when trying to stop Solr, or use --all to stop all running nodes on this host." + goto err + ) +) ELSE ( + REM Stop Solr running on specific port + set found_it=0 + For /f "tokens=2,5" %%M in ('netstat -nao ^| find "TCP " ^| find ":0 " ^| find ":%SOLR_PORT% "') do ( + IF NOT "%%N"=="0" ( + IF "%%M"=="%SOLR_JETTY_HOST%:%SOLR_PORT%" ( + set found_it=1 + @echo Stopping Solr process %%N running on port %SOLR_PORT% + IF "%STOP_PORT%"=="" set /A STOP_PORT=%SOLR_PORT% - 1000 + "%JAVA%" %SOLR_SSL_OPTS% %SOLR_TOOL_OPTS% -Djetty.home="%SOLR_SERVER_DIR%" -jar "%SOLR_SERVER_DIR%\start.jar" %SOLR_JETTY_CONFIG% STOP.PORT=!STOP_PORT! STOP.KEY=%STOP_KEY% --stop + del "%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port + REM wait for the process to terminate + CALL :wait_for_process_exit %%N !SOLR_STOP_WAIT! + REM Kill it if it is still running after the graceful shutdown + IF EXIST "%JAVA_HOME%\bin\jstack.exe" ( + qprocess "%%N" >nul 2>nul && "%JAVA_HOME%\bin\jstack.exe" %%N && taskkill /f /PID %%N + ) else ( + qprocess "%%N" >nul 2>nul && taskkill /f /PID %%N ) ) ) - if "!found_it!"=="0" echo No Solr found running on port %SOLR_PORT% ) + if "!found_it!"=="0" echo No Solr found running on port %SOLR_PORT% +) + +IF "!IS_RESTART!"=="0" goto done + +REM Clean state here, continue with starting (start or restart) +set SCRIPT_CMD=start + +:start_solr +REM Prepare for starting Solr +IF "%SOLR_HOME%"=="" set "SOLR_HOME=%SOLR_SERVER_DIR%\solr" +IF EXIST "%cd%\%SOLR_HOME%" set "SOLR_HOME=%cd%\%SOLR_HOME%" + +IF NOT EXIST "%SOLR_HOME%\" ( + IF EXIST "%SOLR_SERVER_DIR%\%SOLR_HOME%" ( + set "SOLR_HOME=%SOLR_SERVER_DIR%\%SOLR_HOME%" + ) ELSE ( + set "SCRIPT_ERROR=Solr home directory %SOLR_HOME% not found!" + goto err + ) +) - IF "!IS_RESTART!"=="0" goto done +@REM Handle overriding where logs are written to +IF [%SOLR_LOGS_DIR%] == [] ( + set "SOLR_LOGS_DIR=%SOLR_SERVER_DIR%\logs" +) ELSE ( + set SOLR_LOGS_DIR=%SOLR_LOGS_DIR:"=% +) + +set "EXAMPLE_DIR=%SOLR_TIP%\example" +set TMP_SOLR_HOME=!SOLR_HOME:%EXAMPLE_DIR%=! +IF NOT "%TMP_SOLR_HOME%"=="%SOLR_HOME%" ( + set "SOLR_LOGS_DIR=%SOLR_HOME%\..\logs" + set "LOG4J_CONFIG=%SOLR_SERVER_DIR%\resources\log4j2.xml" ) -IF "!IS_RESTART!"=="1" set SCRIPT_CMD=start IF "%SOLR_PORT%"=="" set SOLR_PORT=8983 IF "%STOP_PORT%"=="" set /A STOP_PORT=%SOLR_PORT% - 1000 @@ -865,18 +890,21 @@ IF DEFINED SOLR_ZK_EMBEDDED_HOST ( set "SCRIPT_SOLR_OPTS=%SCRIPT_SOLR_OPTS% -Dsolr.zk.embedded.host=%SOLR_ZK_EMBEDDED_HOST%" ) -IF "%SCRIPT_CMD%"=="start" ( - REM see if Solr is already running using netstat - For /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":0 " ^| find ":%SOLR_PORT% "') do ( - IF NOT "%%k"=="0" ( - IF "%%j"=="%SOLR_JETTY_HOST%:%SOLR_PORT%" ( - set "SCRIPT_ERROR=Process %%k is already listening on port %SOLR_PORT%. If this is Solr, please stop it first before starting (or use restart). If this is not Solr, then please choose a different port using -p PORT" - goto err - ) +REM Make sure Solr is not running using netstat +For /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":0 " ^| find ":%SOLR_PORT% "') do ( + IF NOT "%%k"=="0" ( + IF "%%j"=="%SOLR_JETTY_HOST%:%SOLR_PORT%" ( + set "SCRIPT_ERROR=Process %%k is already listening on port %SOLR_PORT%. If this is Solr, please stop it first before starting (or use restart). If this is not Solr, then please choose a different port using -p PORT" + goto err ) ) ) +IF "%EMPTY_ADDL_JVM_ARGS%"=="true" ( + set "SCRIPT_ERROR=JVM options are required when using the -a or --jvm-opts option!" + goto err +) + @REM determine if -server flag is supported by current JVM "%JAVA%" -server -version > nul 2>&1 IF ERRORLEVEL 1 ( @@ -888,20 +916,9 @@ IF ERRORLEVEL 1 ( set IS_JDK=true set "SERVEROPT=-server" ) -if !JAVA_MAJOR_VERSION! LSS 9 ( - "%JAVA%" -d64 -version > nul 2>&1 - IF ERRORLEVEL 1 ( - set "IS_64BIT=false" - @echo WARNING: 32-bit Java detected. Not recommended for production. Point your JAVA_HOME to a 64-bit JDK - @echo. - ) ELSE ( - set IS_64bit=true - ) -) ELSE ( - set IS_64bit=true -) IF NOT "%ZK_HOST%"=="" set SOLR_MODE=solrcloud +IF "%SOLR_MODE%"=="" set SOLR_MODE=solrcloud IF "%SOLR_MODE%"=="solrcloud" ( IF "%ZK_CLIENT_TIMEOUT%"=="" set "ZK_CLIENT_TIMEOUT=30000" @@ -937,7 +954,8 @@ IF "%SOLR_MODE%"=="solrcloud" ( IF EXIST "%SOLR_HOME%\collection1\core.properties" set "CLOUD_MODE_OPTS=!CLOUD_MODE_OPTS! -Dbootstrap_confdir=./solr/collection1/conf -Dcollection.configName=myconf -DnumShards=1" ) ELSE ( - set CLOUD_MODE_OPTS= + REM change Cloud mode to User Managed mode with flag + set "CLOUD_MODE_OPTS=" IF NOT EXIST "%SOLR_HOME%\solr.xml" ( IF "%SOLR_SOLRXML_REQUIRED%"=="true" ( set "SCRIPT_ERROR=Solr home directory %SOLR_HOME% must contain solr.xml!" @@ -1011,41 +1029,11 @@ IF "%GC_TUNE%"=="" ( -XX:+ExplicitGCInvokesConcurrent ) -REM Workaround for JIT crash, see https://issues.apache.org/jira/browse/SOLR-16463 -if !JAVA_MAJOR_VERSION! GEQ 17 ( - set SCRIPT_SOLR_OPTS=%SCRIPT_SOLR_OPTS% -XX:CompileCommand=exclude,com.github.benmanes.caffeine.cache.BoundedLocalCache::put - echo Java %JAVA_MAJOR_VERSION% detected. Enabled workaround for SOLR-16463 -) - -REM Vector optimizations are only supported for Java 20 and 21 for now. -REM This will need to change as Lucene is upgraded and newer Java versions are released -if !JAVA_MAJOR_VERSION! GEQ 20 if !JAVA_MAJOR_VERSION! LEQ 21 ( - set SCRIPT_SOLR_OPTS=%SCRIPT_SOLR_OPTS% --add-modules jdk.incubator.vector - echo Java %JAVA_MAJOR_VERSION% detected. Incubating Panama Vector APIs have been enabled -) +REM Add vector optimizations module +set SCRIPT_SOLR_OPTS=%SCRIPT_SOLR_OPTS% --add-modules jdk.incubator.vector -if !JAVA_MAJOR_VERSION! GEQ 9 if NOT "%JAVA_VENDOR%" == "OpenJ9" ( - IF NOT "%GC_LOG_OPTS%"=="" ( - echo ERROR: On Java 9 you cannot set GC_LOG_OPTS, only default GC logging is available. Exiting - GOTO :eof - ) - set GC_LOG_OPTS="-Xlog:gc*:file=\"!SOLR_LOGS_DIR!\solr_gc.log\":time,uptime:filecount=9,filesize=20M" -) else ( - IF "%GC_LOG_OPTS%"=="" ( - rem Set defaults for Java 8 - set GC_LOG_OPTS=-verbose:gc ^ - -XX:+PrintHeapAtGC ^ - -XX:+PrintGCDetails ^ - -XX:+PrintGCDateStamps ^ - -XX:+PrintGCTimeStamps ^ - -XX:+PrintTenuringDistribution ^ - -XX:+PrintGCApplicationStoppedTime - ) - if "%JAVA_VENDOR%" == "OpenJ9" ( - set GC_LOG_OPTS=!GC_LOG_OPTS! "-Xverbosegclog:!SOLR_LOGS_DIR!\solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M - ) else ( - set GC_LOG_OPTS=!GC_LOG_OPTS! "-Xloggc:!SOLR_LOGS_DIR!\solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M - ) +IF "%GC_LOG_OPTS%"=="" ( + set GC_LOG_OPTS="-Xlog:gc*" ) IF "%verbose%"=="1" ( @@ -1123,7 +1111,7 @@ IF "%SOLR_SSL_ENABLED%"=="true" ( set SOLR_LOGS_DIR_QUOTED="%SOLR_LOGS_DIR%" set SOLR_DATA_HOME_QUOTED="%SOLR_DATA_HOME%" -set "START_OPTS=%START_OPTS% -Dsolr.log.dir=%SOLR_LOGS_DIR_QUOTED% -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager" +set "START_OPTS=%START_OPTS% -Dsolr.log.dir=%SOLR_LOGS_DIR_QUOTED%" IF NOT "%SOLR_DATA_HOME%"=="" set "START_OPTS=%START_OPTS% -Dsolr.data.home=%SOLR_DATA_HOME_QUOTED%" IF NOT DEFINED LOG4J_CONFIG set "LOG4J_CONFIG=%SOLR_SERVER_DIR%\resources\log4j2.xml" @@ -1174,9 +1162,8 @@ IF "%FG%"=="1" ( -Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar %SOLR_JETTY_CONFIG% "%SOLR_JETTY_ADDL_CONFIG%" > "!SOLR_LOGS_DIR!\solr-%SOLR_PORT%-console.log" echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port - REM default to 30 seconds for backwards compatibility. IF "!SOLR_START_WAIT!"=="" ( - set SOLR_START_WAIT=30 + set SOLR_START_WAIT=180 ) REM now wait to see Solr come online ... "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" -Dsolr.default.confdir="%DEFAULT_CONFDIR%"^ @@ -1191,319 +1178,7 @@ IF "%FG%"=="1" ( goto done -:run_example -REM Run the requested example - -"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -Dsolr.install.symDir="%SOLR_TIP%" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI run_example --script "%SDIR%\solr.cmd" -e %EXAMPLE% -d "%SOLR_SERVER_DIR%" ^ - --url-scheme !SOLR_URL_SCHEME! !PASS_TO_RUN_EXAMPLE! - -REM End of run_example -goto done - -:get_status -REM Find all Java processes, correlate with those listening on a port -REM and then try to contact via that port using the status tool -for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port$"`) do ( - set SOME_SOLR_PORT= - For /F "Delims=" %%J In ('type "%SOLR_TIP%\bin\%%i"') do set SOME_SOLR_PORT=%%~J - if NOT "!SOME_SOLR_PORT!"=="" ( - for /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":0 " ^| find ":!SOME_SOLR_PORT! "') do ( - IF NOT "%%k"=="0" ( - if "%%j"=="%SOLR_JETTY_HOST%:!SOME_SOLR_PORT!" ( - @echo. - set has_info=1 - echo Found Solr process %%k running on port !SOME_SOLR_PORT! - REM Passing in %2 (-h or --help) directly is captured by a custom help path for usage output - "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI status --solr-url !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!SOME_SOLR_PORT! %2 - @echo. - ) - ) - ) - ) -) -if NOT "!has_info!"=="1" echo No running Solr nodes found. -set has_info= -goto done - -:run_solrcli -"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI %* -if errorlevel 1 ( - exit /b 1 -) -goto done - -:parse_config_args -IF [%1]==[] goto run_config -IF "%1"=="-z" goto set_config_zk -IF "%1"=="--zk-host" goto set_config_zk -IF "%1"=="-zkHost" goto set_config_zk -IF "%1"=="--zkHost" goto set_config_zk -IF "%1"=="-s" goto set_config_url_scheme -IF "%1"=="-scheme" goto set_config_url_scheme -set "CONFIG_ARGS=!CONFIG_ARGS! %1" -SHIFT -goto parse_config_args - -:set_config_zk -set ZK_HOST=%~2 -SHIFT -SHIFT -goto parse_config_args - -:set_config_url_scheme -set SOLR_URL_SCHEME=%~2 -SHIFT -SHIFT -goto parse_config_args - -REM Clumsy to do the state machine thing for -d and -n, but that's required for back-compat -:parse_zk_args -IF "%1"=="-V" ( - goto set_zk_verbose -) ELSE IF "%1"=="upconfig" ( - goto set_zk_op -) ELSE IF "%1"=="downconfig" ( - goto set_zk_op -) ELSE IF "%1"=="cp" ( - goto set_zk_op -) ELSE IF "%1"=="mv" ( - goto set_zk_op -) ELSE IF "%1"=="rm" ( - goto set_zk_op -) ELSE IF "%1"=="ls" ( - goto set_zk_op -) ELSE IF "%1"=="mkroot" ( - goto set_zk_op -) ELSE IF "%1"=="linkconfig" ( - goto set_zk_op -) ELSE IF "%1"=="updateacls" ( - goto set_zk_op -) ELSE IF "%1"=="-n" ( - goto set_config_name -) ELSE IF "%1"=="-r" ( - goto set_zk_recurse -) ELSE IF "%1"=="-configname" ( - goto set_config_name -) ELSE IF "%1"=="-d" ( - goto set_configdir -) ELSE IF "%1"=="-confdir" ( - goto set_configdir -) ELSE IF "%1"=="--conf-dir" ( - goto set_configdir -) ELSE IF "%1"=="-c" ( - goto set_collection_zk -) ELSE IF "%1"=="-z" ( - goto set_config_zk -) ELSE IF "!ZK_SRC!"=="" ( - if not "%~1"=="" ( - goto set_zk_src - ) -) ELSE IF "!ZK_DST!"=="" ( - IF "%ZK_OP%"=="cp" ( - goto set_zk_dst - ) - IF "%ZK_OP%"=="mv" ( - goto set_zk_dst - ) - set ZK_DST="_" -) ELSE IF NOT "%1"=="" ( - set ERROR_MSG="Unrecognized or misplaced zk argument %1%" - goto zk_short_usage -) -goto run_zk - -:set_zk_op -set ZK_OP=%~1 -SHIFT -goto parse_zk_args - -:set_zk_verbose -set ZK_VERBOSE="--verbose" -SHIFT -goto parse_zk_args - -:set_config_name -set CONFIGSET_NAME=%~2 -SHIFT -SHIFT -goto parse_zk_args - -:set_configdir -set CONFIGSET_DIR=%~2 -SHIFT -SHIFT -goto parse_zk_args - -:set_collection_zk -set ZK_COLLECTION=%~2 -SHIFT -SHIFT -goto parse_zk_args - -:set_config_zk -set ZK_HOST=%~2 -SHIFT -SHIFT -goto parse_zk_args - -:set_zk_src -set ZK_SRC=%~1 -SHIFT -goto parse_zk_args - -:set_zk_dst -set ZK_DST=%~1 -SHIFT -goto parse_zk_args - -:set_zk_recurse -set ZK_RECURSE="true" -SHIFT -goto parse_zk_args - -:run_zk -IF "!ZK_OP!"=="" ( - set "ERROR_MSG=Invalid command specified for zk sub-command" - goto zk_short_usage -) - -set CONNECTION_PARAMS="" - -IF "!ZK_OP!"=="" ( - set CONNECTION_PARAMS="--solr-url !ZK_SOLR_URL!" -) -ELSE ( - set CONNECTION_PARAMS="--zk-host ZK_HOST!" -) - -IF "!ZK_OP!"=="upconfig" ( - IF "!CONFIGSET_NAME!"=="" ( - set ERROR_MSG="-n option must be set for upconfig" - goto zk_short_usage - ) - IF "!CONFIGSET_DIR!"=="" ( - set ERROR_MSG="The -d option must be set for upconfig." - goto zk_short_usage - ) - "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI !ZK_OP! --conf-name !CONFIGSET_NAME! --conf-dir !CONFIGSET_DIR! %CONNECTION_PARAMS% %ZK_VERBOSE%^ -) ELSE IF "!ZK_OP!"=="downconfig" ( - IF "!CONFIGSET_NAME!"=="" ( - set ERROR_MSG="-n option must be set for downconfig" - goto zk_short_usage - ) - IF "!CONFIGSET_DIR!"=="" ( - set ERROR_MSG="The -d option must be set for downconfig." - goto zk_short_usage - ) - "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI !ZK_OP! --conf-name !CONFIGSET_NAME! --conf-dir !CONFIGSET_DIR! -z !ZK_HOST! %ZK_VERBOSE% -) ELSE IF "!ZK_OP!"=="linkconfig" ( - IF "!CONFIGSET_NAME!"=="" ( - set ERROR_MSG="-n option must be set for linkconfig" - goto zk_short_usage - ) - IF "!ZK_COLLECTION!"=="" ( - set ERROR_MSG="The -c option must be set for linkconfig." - goto zk_short_usage - ) - "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI !ZK_OP! --conf-name !CONFIGSET_NAME! -c !ZK_COLLECTION! -z !ZK_HOST! %ZK_VERBOSE% -) ELSE IF "!ZK_OP!"=="updateacls" ( - IF "%ZK_SRC"=="" ( - set ERROR_MSG="Zookeeper path to remove must be specified when using the 'ls' command" - goto zk_short_usage - ) - "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI !ZK_OP! --path !ZK_SRC! -z !ZK_HOST! %ZK_VERBOSE% -) ELSE IF "!ZK_OP!"=="cp" ( - IF "%ZK_SRC%"=="" ( - set ERROR_MSG=" must be specified for 'cp' command" - goto zk_short_usage - ) - IF "%ZK_DST%"=="" ( - set ERROR_MSG= must be specified for 'cp' command" - goto zk_short_usage - ) - IF NOT "!ZK_SRC:~0,3!"=="zk:" ( - IF NOT "!%ZK_DST:~0,3!"=="zk:" ( - set ERROR_MSG="At least one of src or dst must be prefixed by 'zk:'" - goto zk_short_usage - ) - ) - "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI !ZK_OP! -z !ZK_HOST! --source !ZK_SRC! --destination !ZK_DST! --recurse !ZK_RECURSE! %ZK_VERBOSE% -) ELSE IF "!ZK_OP!"=="mv" ( - IF "%ZK_SRC%"=="" ( - set ERROR_MSG=" must be specified for 'mv' command" - goto zk_short_usage - ) - IF "%ZK_DST%"=="" ( - set ERROR_MSG=" must be specified for 'mv' command" - goto zk_short_usage - ) - "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI !ZK_OP! -z !ZK_HOST! --source !ZK_SRC! --destination !ZK_DST! %ZK_VERBOSE% -) ELSE IF "!ZK_OP!"=="rm" ( - IF "%ZK_SRC"=="" ( - set ERROR_MSG="Zookeeper path to remove must be specified when using the 'rm' command" - goto zk_short_usage - ) - "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI !ZK_OP! -z !ZK_HOST! --path !ZK_SRC! --recurse !ZK_RECURSE! %ZK_VERBOSE% -) ELSE IF "!ZK_OP!"=="ls" ( - IF "%ZK_SRC"=="" ( - set ERROR_MSG="Zookeeper path to remove must be specified when using the 'ls' command" - goto zk_short_usage - ) - "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI !ZK_OP! -z !ZK_HOST! --path !ZK_SRC! --recurse !ZK_RECURSE! %ZK_VERBOSE% -) ELSE IF "!ZK_OP!"=="mkroot" ( - IF "%ZK_SRC"=="" ( - set ERROR_MSG="Zookeeper path to create must be specified when using the 'mkroot' command" - goto zk_short_usage - ) - "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^ - -Dlog4j.configurationFile="file:///%SOLR_SERVER_DIR%\resources\log4j2-console.xml" ^ - -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^ - org.apache.solr.cli.SolrCLI !ZK_OP! -z !ZK_HOST! --path !ZK_SRC! %ZK_VERBOSE% -) ELSE ( - set ERROR_MSG="Unknown zk option !ZK_OP!" - goto zk_short_usage -) -goto done - - :run_auth -IF "%1"=="-help" goto usage -IF "%1"=="-usage" goto usage - REM Options parsing. REM Note: With the following technique of parsing, it is not possible REM to have an option without a value. @@ -1515,9 +1190,9 @@ for %%a in (%*) do ( if "!arg:~0,1!" equ "-" set "option=!arg!" ) else ( set "option!option!=%%a" - if "!option!" equ "-d" set "SOLR_SERVER_DIR=%%a" - if "!option!" equ "-s" set "SOLR_HOME=%%a" - if not "!option!" equ "-s" if not "!option!" equ "-d" ( + if "!option!" equ "--solr-home" set "SOLR_HOME=%%a" + if "!option!" equ "--server-dir" set "SOLR_SERVER_DIR=%%a" + if not "!option!" equ "--solr-home" if not "!option!" equ "--server-dir" ( set "AUTH_PARAMS=!AUTH_PARAMS! !option! %%a" ) set "option=" @@ -1557,53 +1232,12 @@ if "!AUTH_PORT!"=="" ( --solr-url !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!AUTH_PORT! goto done - -:invalid_cmd_line -@echo. -IF "!SCRIPT_ERROR!"=="" ( - @echo Invalid command-line option: %1 -) ELSE ( - @echo ERROR: !SCRIPT_ERROR! -) -@echo. -IF "%FIRST_ARG%"=="start" ( - goto start_usage -) ELSE IF "%FIRST_ARG:~0,1%" == "-" ( - goto start_usage -) ELSE IF "%FIRST_ARG%"=="restart" ( - goto start_usage -) ELSE IF "%FIRST_ARG%"=="stop" ( - goto stop_usage -) ELSE IF "%FIRST_ARG%"=="healthcheck" ( - goto run_solrcli -) ELSE IF "%FIRST_ARG%"=="create" ( - goto run_solrcli -) ELSE IF "%FIRST_ARG%"=="zk" ( - goto zk_short_usage -) ELSE IF "%FIRST_ARG%"=="auth" ( - goto auth_usage -) ELSE IF "%FIRST_ARG%"=="status" ( - goto run_solrcli -) - -:need_java_home -@echo Please set the JAVA_HOME environment variable to the path where you installed Java 1.8+ -goto done - -:need_java_vers -@echo Java 1.8 or later is required to run Solr. -goto done - :err @echo. @echo ERROR: !SCRIPT_ERROR! @echo. exit /b 1 -:done -ENDLOCAL -exit /b 0 - REM Tests what Java we have and sets some global variables :resolve_java_info @@ -1677,3 +1311,7 @@ GOTO :eof ) ) GOTO :eof + +:done +ENDLOCAL +exit /b 0 diff --git a/solr/bin/solr.in.cmd b/solr/bin/solr.in.cmd index 77b1117208a..ac058eae131 100755 --- a/solr/bin/solr.in.cmd +++ b/solr/bin/solr.in.cmd @@ -49,15 +49,8 @@ REM set GC_TUNE=-XX:+ExplicitGCInvokesConcurrent REM set GC_TUNE=-XX:SurvivorRatio=4 REM set GC_TUNE=%GC_TUNE% -XX:TargetSurvivorRatio=90 REM set GC_TUNE=%GC_TUNE% -XX:MaxTenuringThreshold=8 -REM set GC_TUNE=%GC_TUNE% -XX:+UseConcMarkSweepGC -REM set GC_TUNE=%GC_TUNE% -XX:ConcGCThreads=4 REM set GC_TUNE=%GC_TUNE% -XX:ParallelGCThreads=4 -REM set GC_TUNE=%GC_TUNE% -XX:+CMSScavengeBeforeRemark REM set GC_TUNE=%GC_TUNE% -XX:PretenureSizeThreshold=64m -REM set GC_TUNE=%GC_TUNE% -XX:+UseCMSInitiatingOccupancyOnly -REM set GC_TUNE=%GC_TUNE% -XX:CMSInitiatingOccupancyFraction=50 -REM set GC_TUNE=%GC_TUNE% -XX:CMSMaxAbortablePrecleanTime=6000 -REM set GC_TUNE=%GC_TUNE% -XX:+CMSParallelRemarkEnabled REM set GC_TUNE=%GC_TUNE% -XX:+ParallelRefProcEnabled etc. REM Set the ZooKeeper connection string if using an external ZooKeeper ensemble @@ -97,7 +90,7 @@ REM set RMI_PORT=18983 REM Anything you add to the SOLR_OPTS variable will be included in the java REM start command line as-is, in ADDITION to other options. If you specify the -REM -a option on start script, those options will be appended as well. Examples: +REM --jvm-opts option on start script, those options will be appended as well. Examples: REM set SOLR_OPTS=%SOLR_OPTS% -Dsolr.autoSoftCommit.maxTime=3000 REM set SOLR_OPTS=%SOLR_OPTS% -Dsolr.autoCommit.maxTime=60000 @@ -262,4 +255,4 @@ REM set SOLR_MODULES=extraction,ltr REM Configure the default replica placement plugin to use if one is not configured in cluster properties REM See https://solr.apache.org/guide/solr/latest/configuration-guide/replica-placement-plugins.html for details -REM set SOLR_PLACEMENTPLUGIN_DEFAULT=simple \ No newline at end of file +REM set SOLR_PLACEMENTPLUGIN_DEFAULT=simple diff --git a/solr/bin/solr.in.sh b/solr/bin/solr.in.sh index 4202b98cd68..9d83a48c1c2 100644 --- a/solr/bin/solr.in.sh +++ b/solr/bin/solr.in.sh @@ -55,14 +55,8 @@ #-XX:SurvivorRatio=4 \ #-XX:TargetSurvivorRatio=90 \ #-XX:MaxTenuringThreshold=8 \ -#-XX:+UseConcMarkSweepGC \ -#-XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 \ -#-XX:+CMSScavengeBeforeRemark \ +#-XX:ParallelGCThreads=4 \ #-XX:PretenureSizeThreshold=64m \ -#-XX:+UseCMSInitiatingOccupancyOnly \ -#-XX:CMSInitiatingOccupancyFraction=50 \ -#-XX:CMSMaxAbortablePrecleanTime=6000 \ -#-XX:+CMSParallelRemarkEnabled \ #-XX:+ParallelRefProcEnabled etc. # Set the ZooKeeper connection string if using an external ZooKeeper ensemble @@ -279,7 +273,7 @@ # other directory, which will implicitly enable heap dumping. Dump name pattern will be solr-[timestamp]-pid[###].hprof # When using this feature, it is recommended to have an external service monitoring the given dir. # If more fine grained control is required, you can manually add the appropriate flags to SOLR_OPTS -# See https://docs.oracle.com/en/java/javase/11/troubleshoot/command-line-options1.html +# See https://docs.oracle.com/en/java/javase/21/troubleshoot/command-line-options1.html # You can test this behavior by setting SOLR_HEAP=25m #SOLR_HEAP_DUMP=true #SOLR_HEAP_DUMP_DIR=/var/log/dumps diff --git a/solr/core/build.gradle b/solr/core/build.gradle index b36843fa568..f406f842201 100644 --- a/solr/core/build.gradle +++ b/solr/core/build.gradle @@ -23,178 +23,187 @@ dependencies { // Spotbugs Annotations are only needed for old findbugs // annotation usage like in Zookeeper during compilation time. // It is not included in the release so exclude from checks. - compileOnly 'com.github.spotbugs:spotbugs-annotations' - testCompileOnly 'com.github.spotbugs:spotbugs-annotations' - permitUnusedDeclared 'com.github.spotbugs:spotbugs-annotations' + compileOnly libs.spotbugs.annotations + testCompileOnly libs.spotbugs.annotations + permitUnusedDeclared libs.spotbugs.annotations // Exclude these from jar validation and license checks. configurations.jarValidation { exclude group: "com.github.spotbugs", module: "spotbugs-annotations" } - implementation 'io.swagger.core.v3:swagger-annotations-jakarta' + implementation libs.swagger3.annotations.jakarta // Export these dependencies so that they're imported transitively by // other modules. // These Lucene modules are the most significant to Solr - api "org.apache.lucene:lucene-core" - api "org.apache.lucene:lucene-analysis-common" - api "org.apache.lucene:lucene-queries" + api libs.apache.lucene.core + api libs.apache.lucene.analysis.common + api libs.apache.lucene.queries // We export logging api with dependencies, which is useful for all modules - api 'org.slf4j:slf4j-api' + api libs.slf4j.api api project(':solr:api') api project(':solr:solrj') api project(':solr:solrj-zookeeper') api project(':solr:solrj-streaming') - - api 'io.dropwizard.metrics:metrics-core' - implementation ('io.dropwizard.metrics:metrics-graphite', { + api libs.dropwizard.metrics.core + implementation (libs.dropwizard.metrics.graphite, { exclude group: "com.rabbitmq", module: "amqp-client" }) - implementation 'io.dropwizard.metrics:metrics-jmx' - implementation 'io.dropwizard.metrics:metrics-jvm' + implementation libs.dropwizard.metrics.jmx + implementation libs.dropwizard.metrics.jvm - implementation('org.glassfish.jersey.containers:jersey-container-jetty-http', { + implementation(libs.jersey.containers.jettyhttp, { exclude group: "org.eclipse.jetty", module: "jetty-continuation" exclude group: "org.glassfish.hk2.external", module: "jakarta.inject" }) - permitUnusedDeclared 'org.glassfish.jersey.containers:jersey-container-jetty-http' - implementation 'org.glassfish.jersey.inject:jersey-hk2' - permitUnusedDeclared 'org.glassfish.jersey.inject:jersey-hk2' - implementation ('org.glassfish.jersey.media:jersey-media-json-jackson', { + permitUnusedDeclared libs.jersey.containers.jettyhttp + implementation libs.jersey.inject.hk2 + permitUnusedDeclared libs.jersey.inject.hk2 + implementation (libs.jersey.media.jsonjackson, { exclude group: "jakarta.xml.bind", module: "jakarta.xml.bind-api" }) - permitUnusedDeclared 'org.glassfish.jersey.media:jersey-media-json-jackson' - implementation 'org.glassfish.jersey.core:jersey-common' - implementation 'org.glassfish.jersey.core:jersey-server' - implementation 'org.glassfish.hk2:hk2-api' - implementation 'jakarta.inject:jakarta.inject-api' - implementation 'jakarta.ws.rs:jakarta.ws.rs-api' - implementation 'jakarta.annotation:jakarta.annotation-api' + permitUnusedDeclared libs.jersey.media.jsonjackson + implementation libs.jersey.core.common + implementation libs.jersey.core.server + implementation libs.hk2.api + implementation libs.jakarta.inject.api + implementation libs.jakarta.ws.rsapi + implementation libs.jakarta.annotation.api // Non-API below; although there are exceptions - runtimeOnly "org.apache.lucene:lucene-analysis-kuromoji" - runtimeOnly "org.apache.lucene:lucene-analysis-nori" - runtimeOnly "org.apache.lucene:lucene-analysis-phonetic" - runtimeOnly "org.apache.lucene:lucene-backward-codecs" - implementation "org.apache.lucene:lucene-codecs" - implementation "org.apache.lucene:lucene-backward-codecs" - permitUnusedDeclared "org.apache.lucene:lucene-backward-codecs" - implementation "org.apache.lucene:lucene-classification" - implementation "org.apache.lucene:lucene-expressions" - implementation "org.apache.lucene:lucene-grouping" - implementation "org.apache.lucene:lucene-highlighter" - implementation "org.apache.lucene:lucene-join" - implementation "org.apache.lucene:lucene-misc" - implementation "org.apache.lucene:lucene-queryparser" - implementation "org.apache.lucene:lucene-spatial-extras" - implementation "org.apache.lucene:lucene-suggest" + runtimeOnly libs.apache.lucene.analysis.kuromoji + runtimeOnly libs.apache.lucene.analysis.nori + runtimeOnly libs.apache.lucene.analysis.phonetic + runtimeOnly libs.apache.lucene.backward.codecs + implementation libs.apache.lucene.codecs + implementation libs.apache.lucene.backward.codecs + permitUnusedDeclared libs.apache.lucene.backward.codecs + implementation libs.apache.lucene.classification + implementation libs.apache.lucene.expressions + implementation libs.apache.lucene.grouping + implementation libs.apache.lucene.highlighter + implementation libs.apache.lucene.join + implementation libs.apache.lucene.misc + implementation libs.apache.lucene.queryparser + implementation libs.apache.lucene.spatialextras + implementation libs.apache.lucene.suggest // Collections & lang utilities - implementation 'com.google.guava:guava' - implementation 'org.apache.commons:commons-lang3' - implementation 'org.apache.commons:commons-math3' - implementation 'commons-io:commons-io' - implementation 'com.carrotsearch:hppc' + implementation libs.google.guava + implementation libs.apache.commons.lang3 + implementation libs.apache.commons.math3 + implementation libs.commonsio.commonsio + implementation libs.carrotsearch.hppc - implementation('com.github.ben-manes.caffeine:caffeine') { transitive = false } + implementation(libs.benmanes.caffeine) { transitive = false } - implementation 'commons-codec:commons-codec' + implementation libs.commonscodec.commonscodec - implementation 'commons-cli:commons-cli' + implementation libs.commonscli.commonscli - implementation 'org.locationtech.spatial4j:spatial4j' + implementation libs.locationtech.spatial4j - implementation 'com.fasterxml.jackson.core:jackson-annotations' - implementation 'com.fasterxml.jackson.core:jackson-core' - implementation 'com.fasterxml.jackson.core:jackson-databind' - implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-smile' - implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-cbor' + implementation libs.fasterxml.jackson.core.annotations + implementation libs.fasterxml.jackson.core.core + implementation libs.fasterxml.jackson.core.databind + implementation libs.fasterxml.jackson.dataformat.smile + implementation libs.fasterxml.jackson.dataformat.cbor - implementation 'org.apache.httpcomponents:httpclient' - implementation 'org.apache.httpcomponents:httpcore' + implementation libs.apache.httpcomponents.httpclient + implementation libs.apache.httpcomponents.httpcore - implementation 'org.eclipse.jetty:jetty-client' - implementation 'org.eclipse.jetty:jetty-http' - implementation 'org.eclipse.jetty:jetty-io' - implementation 'org.eclipse.jetty.toolchain:jetty-servlet-api' + implementation libs.eclipse.jetty.client + implementation libs.eclipse.jetty.http + implementation libs.eclipse.jetty.io + implementation libs.eclipse.jetty.toolchain.servletapi // ZooKeeper - implementation('org.apache.zookeeper:zookeeper', { + + implementation(libs.apache.curator.framework, { + exclude group: 'org.apache.zookeeper', module: 'zookeeper' + }) + implementation(libs.apache.curator.client, { + exclude group: 'org.apache.zookeeper', module: 'zookeeper' + }) + testImplementation(libs.apache.curator.test, { + exclude group: 'org.apache.zookeeper', module: 'zookeeper' + }) + implementation(libs.apache.zookeeper.zookeeper, { exclude group: "org.apache.yetus", module: "audience-annotations" }) - implementation('org.apache.zookeeper:zookeeper-jute') { + implementation(libs.apache.zookeeper.jute) { exclude group: 'org.apache.yetus', module: 'audience-annotations' } - testImplementation 'org.apache.zookeeper:zookeeper::tests' + testImplementation variantOf(libs.apache.zookeeper.zookeeper) { classifier 'tests' } // required for instantiating a Zookeeper server (for embedding ZK or running tests) - runtimeOnly ('org.xerial.snappy:snappy-java') + runtimeOnly libs.xerial.snappy.java - implementation('com.jayway.jsonpath:json-path', { + implementation(libs.jayway.jsonpath, { exclude group: "net.minidev", module: "json-smart" }) // StatsComponents percentiles - implementation 'com.tdunning:t-digest' + implementation libs.tdunning.tdigest // Distributed Tracing - api 'io.opentelemetry:opentelemetry-api' // Tracer is exposed on some methods - implementation 'io.opentelemetry:opentelemetry-context' + api libs.opentelemetry.api // Tracer is exposed on some methods + implementation libs.opentelemetry.context - implementation 'org.apache.commons:commons-exec' + implementation libs.apache.commons.exec - implementation 'org.apache.logging.log4j:log4j-api' - implementation 'org.apache.logging.log4j:log4j-core' - runtimeOnly 'org.apache.logging.log4j:log4j-slf4j2-impl' + implementation libs.apache.log4j.api + implementation libs.apache.log4j.core + runtimeOnly libs.apache.log4j.slf4j2impl // For the PrometheusResponseWriter - implementation 'io.prometheus:prometheus-metrics-model:1.1.0' - implementation('io.prometheus:prometheus-metrics-exposition-formats:1.1.0', { + implementation libs.prometheus.metrics.model + implementation(libs.prometheus.metrics.expositionformats, { exclude group: "io.prometheus", module: "prometheus-metrics-shaded-protobuf" exclude group: "io.prometheus", module: "prometheus-metrics-config" }) // For faster XML processing than the JDK - implementation 'org.codehaus.woodstox:stax2-api' - implementation 'com.fasterxml.woodstox:woodstox-core' + implementation libs.codehaus.woodstox.stax2api + implementation libs.fasterxml.woodstox.core // See https://issues.apache.org/jira/browse/LOG4J2-3609 due to needing these annotations - compileOnly 'biz.aQute.bnd:biz.aQute.bnd.annotation' - compileOnly 'org.osgi:osgi.annotation' + compileOnly libs.aqute.bnd.annotation + compileOnly libs.osgi.annotation - compileOnly 'com.github.stephenc.jcip:jcip-annotations' + compileOnly libs.stephenc.jcip.annotations - implementation 'com.j256.simplemagic:simplemagic' + implementation libs.j256.simplemagic // -- Test Dependencies - testRuntimeOnly 'org.slf4j:jcl-over-slf4j' + testRuntimeOnly libs.slf4j.jcloverslf4j - testRuntimeOnly "org.apache.lucene:lucene-analysis-icu" + testRuntimeOnly libs.apache.lucene.analysis.icu testRuntimeOnly project(':solr:modules:analysis-extras') testImplementation project(':solr:core') testImplementation project(':solr:test-framework') - testImplementation 'org.apache.lucene:lucene-test-framework' + testImplementation libs.apache.lucene.testframework - testImplementation 'org.eclipse.jetty:jetty-server' - testImplementation 'org.eclipse.jetty:jetty-servlet' + testImplementation libs.eclipse.jetty.server + testImplementation libs.eclipse.jetty.servlet - testImplementation 'com.carrotsearch.randomizedtesting:randomizedtesting-runner' - testImplementation 'junit:junit' - testImplementation 'org.hamcrest:hamcrest' + testImplementation libs.carrotsearch.randomizedtesting.runner + testImplementation libs.junit.junit + testImplementation libs.hamcrest.hamcrest - testImplementation('org.mockito:mockito-core', { + testImplementation(libs.mockito.core, { exclude group: "net.bytebuddy", module: "byte-buddy-agent" }) - testRuntimeOnly('org.mockito:mockito-subclass', { + testRuntimeOnly(libs.mockito.subclass, { exclude group: "net.bytebuddy", module: "byte-buddy-agent" }) } diff --git a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java index bfe35c47108..d4c6e454520 100644 --- a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java +++ b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java @@ -261,8 +261,7 @@ static class Cmd { } private void readPayloadType(Type t) { - if (t instanceof ParameterizedType) { - ParameterizedType typ = (ParameterizedType) t; + if (t instanceof ParameterizedType typ) { if (typ.getRawType() == PayloadObj.class) { isWrappedInPayloadObj = true; if (typ.getActualTypeArguments().length == 0) { @@ -271,8 +270,7 @@ private void readPayloadType(Type t) { return; } Type t1 = typ.getActualTypeArguments()[0]; - if (t1 instanceof ParameterizedType) { - ParameterizedType parameterizedType = (ParameterizedType) t1; + if (t1 instanceof ParameterizedType parameterizedType) { parameterClass = (Class) parameterizedType.getRawType(); } else { parameterClass = (Class) typ.getActualTypeArguments()[0]; @@ -345,9 +343,8 @@ public int hashCode() { public boolean equals(Object rhs) { if (null == rhs) return false; if (this == rhs) return true; - if (!(rhs instanceof Cmd)) return false; + if (!(rhs instanceof Cmd rhsCast)) return false; - final Cmd rhsCast = (Cmd) rhs; return Objects.equals(command, rhsCast.command) && Objects.equals(method, rhsCast.method) && Objects.equals(obj, rhsCast.obj) @@ -373,8 +370,7 @@ public static Map createSchema(Method m) { t = types[2]; // (SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj) if (types.length == 1) t = types[0]; // (PayloadObj) if (t != null) { - if (t instanceof ParameterizedType) { - ParameterizedType typ = (ParameterizedType) t; + if (t instanceof ParameterizedType typ) { if (typ.getRawType() == PayloadObj.class) { t = typ.getActualTypeArguments()[0]; } diff --git a/solr/core/src/java/org/apache/solr/api/ApiBag.java b/solr/core/src/java/org/apache/solr/api/ApiBag.java index 48359f783b1..1d9a6d2a80d 100644 --- a/solr/core/src/java/org/apache/solr/api/ApiBag.java +++ b/solr/core/src/java/org/apache/solr/api/ApiBag.java @@ -122,16 +122,14 @@ protected void attachValueToNode(PathTrie.Node node, Api o) { // If 'o' and 'node.obj' aren't both AnnotatedApi's then we can't aggregate the commands, so // fallback to the default behavior - if ((!(o instanceof AnnotatedApi)) || (!(node.getObject() instanceof AnnotatedApi))) { + if ((!(o instanceof AnnotatedApi beingRegistered)) + || (!(node.getObject() instanceof AnnotatedApi alreadyRegistered))) { super.attachValueToNode(node, o); return; } - final AnnotatedApi beingRegistered = (AnnotatedApi) o; - final AnnotatedApi alreadyRegistered = (AnnotatedApi) node.getObject(); - if (alreadyRegistered instanceof CommandAggregatingAnnotatedApi) { - final CommandAggregatingAnnotatedApi alreadyRegisteredAsCollapsing = - (CommandAggregatingAnnotatedApi) alreadyRegistered; + if (alreadyRegistered + instanceof CommandAggregatingAnnotatedApi alreadyRegisteredAsCollapsing) { alreadyRegisteredAsCollapsing.combineWith(beingRegistered); } else { final CommandAggregatingAnnotatedApi wrapperApi = @@ -404,11 +402,10 @@ public void registerLazy(PluginBag.PluginHolder holder, Plug public static SpecProvider constructSpec(PluginInfo info) { Object specObj = info == null ? null : info.attributes.get("spec"); - if (specObj != null && specObj instanceof Map) { + if (specObj != null && specObj instanceof Map map) { // Value from Map can be a Map because in PluginInfo(String, Map) we assign a // Map // assert false : "got a map when this should only be Strings"; - Map map = (Map) specObj; return () -> ValidatingJsonMap.getDeepCopy(map, 4, false); } else { return HANDLER_NAME_SPEC_PROVIDER; diff --git a/solr/core/src/java/org/apache/solr/api/ContainerPluginsRegistry.java b/solr/core/src/java/org/apache/solr/api/ContainerPluginsRegistry.java index 4573a6e550e..20cd2440bbe 100644 --- a/solr/core/src/java/org/apache/solr/api/ContainerPluginsRegistry.java +++ b/solr/core/src/java/org/apache/solr/api/ContainerPluginsRegistry.java @@ -158,8 +158,7 @@ static class PluginMetaHolder { @Override public boolean equals(Object obj) { - if (obj instanceof PluginMetaHolder) { - PluginMetaHolder that = (PluginMetaHolder) obj; + if (obj instanceof PluginMetaHolder that) { return Objects.equals(this.original, that.original); } return false; @@ -466,8 +465,7 @@ public static Class getConfigClass(ConfigurablePluginUsed to send an arbitrary HTTP request to a Solr API endpoint. */ public class ApiTool extends ToolBase { + + private static final Option SOLR_URL_OPTION = + Option.builder("s") + .longOpt("solr-url") + .hasArg() + .argName("URL") + .required() + .desc("Send a GET request to a Solr API endpoint.") + .build(); + public ApiTool() { this(CLIO.getOutStream()); } @@ -51,50 +60,26 @@ public String getName() { } @Override - public List