diff --git a/build.gradle b/build.gradle index b612fe2bf58..f10d8a1c23e 100644 --- a/build.gradle +++ b/build.gradle @@ -23,7 +23,7 @@ plugins { id "com.palantir.consistent-versions" version "2.11.0" id "org.owasp.dependencycheck" version "8.0.1" id 'ca.cutterslade.analyze' version "1.9.0" - id 'de.thetaphi.forbiddenapis' version '3.5' apply false + id 'de.thetaphi.forbiddenapis' version '3.6' apply false id "de.undercouch.download" version "5.2.0" apply false id "net.ltgt.errorprone" version "3.0.1" apply false id 'com.diffplug.spotless' version "6.5.2" apply false diff --git a/gradle/validation/error-prone.gradle b/gradle/validation/error-prone.gradle index 9c80fea0c47..d2800242425 100644 --- a/gradle/validation/error-prone.gradle +++ b/gradle/validation/error-prone.gradle @@ -85,6 +85,7 @@ allprojects { prj -> // '-Xep:AutoValueConstructorOrderChecker:OFF', // we don't use autovalue '-Xep:BadAnnotationImplementation:ERROR', '-Xep:BadShiftAmount:ERROR', + '-Xep:BanClassLoader:ERROR', '-Xep:BanJNDI:ERROR', // todo - implement with forbidden APIs instead? '-Xep:BoxedPrimitiveEquality:ERROR', // '-Xep:BundleDeserializationCast:OFF', // we don't use android @@ -104,6 +105,7 @@ allprojects { prj -> '-Xep:DangerousLiteralNull:ERROR', '-Xep:DeadException:ERROR', '-Xep:DeadThread:ERROR', + '-Xep:DereferenceWithNullBranch:ERROR', '-Xep:DiscardedPostfixExpression:ERROR', // '-Xep:DoNotCall:OFF', // we don't use this annotation '-Xep:DoNotMock:ERROR', @@ -262,6 +264,7 @@ allprojects { prj -> '-Xep:AssertThrowsMultipleStatements:WARN', '-Xep:AssertionFailureIgnored:WARN', '-Xep:AssistedInjectAndInjectOnSameConstructor:WARN', + '-Xep:AttemptedNegativeZero:WARN', // '-Xep:AutoValueFinalMethods:OFF', // we don't use autovalue // '-Xep:AutoValueImmutableFields:OFF', // we don't use autovalue // '-Xep:AutoValueSubclassLeaked:OFF', // we don't use autovalue @@ -284,6 +287,7 @@ allprojects { prj -> '-Xep:ClassCanBeStatic:WARN', '-Xep:ClassNewInstance:WARN', // '-Xep:CloseableProvides:OFF', // we don't use this annotation + '-Xep:ClosingStandardOutputStreams:WARN', '-Xep:CollectionUndefinedEquality:WARN', '-Xep:CollectorShouldNotUseState:WARN', '-Xep:ComparableAndComparator:WARN', @@ -299,6 +303,7 @@ allprojects { prj -> // '-Xep:DoNotCallSuggester:OFF', // we don't use this annotation // '-Xep:DoNotClaimAnnotations:OFF', // we don't use this annotation // '-Xep:DoNotMockAutoValue:OFF', // we don't use autovalue + // '-Xep:DoNotUseRuleChain:OFF', // todo could be fixed but not easy // '-Xep:DoubleCheckedLocking:OFF', // todo check if useful or comment why not '-Xep:EmptyBlockTag:WARN', // '-Xep:EmptyCatch:OFF', // todo check if useful or comment why not - might be handled by ECJ? @@ -325,6 +330,7 @@ allprojects { prj -> // '-Xep:FutureReturnValueIgnored:OFF', // todo there are problems that should be fixed '-Xep:GetClassOnEnum:WARN', '-Xep:HidingField:WARN', + '-Xep:ICCProfileGetInstance:WARN', '-Xep:IdentityHashMapUsage:WARN', // '-Xep:ImmutableAnnotationChecker:OFF', // we don't use this annotation '-Xep:ImmutableEnumChecker:WARN', @@ -343,6 +349,7 @@ allprojects { prj -> // '-Xep:InlineMeInliner:OFF', // we don't use this annotation // '-Xep:InlineMeSuggester:OFF', // We don't use this annotation // '-Xep:InputStreamSlowMultibyteRead:OFF', // todo check if useful or comment why not + '-Xep:InlineTrivialConstant:WARN', '-Xep:InstanceOfAndCastMatchWrongType:WARN', '-Xep:IntLongMath:WARN', // '-Xep:InvalidBlockTag:OFF', // this is needed for tags like lucene.internal @@ -379,6 +386,7 @@ allprojects { prj -> '-Xep:LiteEnumValueOf:WARN', '-Xep:LiteProtoToString:WARN', '-Xep:LockNotBeforeTry:WARN', + '-Xep:LockOnNonEnclosingClassLiteral:WARN', '-Xep:LogicalAssignment:WARN', '-Xep:LongDoubleConversion:WARN', '-Xep:LongFloatConversion:WARN', @@ -390,25 +398,32 @@ allprojects { prj -> '-Xep:MissingFail:WARN', '-Xep:MissingImplementsComparable:WARN', '-Xep:MissingOverride:WARN', + // '-Xep:MissingRefasterAnnotation:OFF', // don't use Refaster // '-Xep:MissingSummary:OFF', // style preference that we don't want to enforce // '-Xep:MixedMutabilityReturnType:OFF', // todo check if useful or comment why not '-Xep:MockNotUsedInProduction:WARN', '-Xep:ModifiedButNotUsed:WARN', '-Xep:ModifyCollectionInEnhancedForLoop:WARN', '-Xep:ModifySourceCollectionInStream:WARN', + '-Xep:MultimapKeys:WARN', '-Xep:MultipleParallelOrSequentialCalls:WARN', '-Xep:MultipleUnaryOperatorsInMethodCall:WARN', + // '-Xep:MutableGuiceModule:OFF', // we don't use guice '-Xep:MutablePublicArray:WARN', + '-Xep:NamedLikeContextualKeyword:WARN', '-Xep:NarrowCalculation:WARN', '-Xep:NarrowingCompoundAssignment:WARN', // '-Xep:NegativeCharLiteral:OFF', // todo check if useful or comment why not '-Xep:NestedInstanceOfConditions:WARN', '-Xep:NewFileSystem:WARN', + //'-Xep:NonApiType:OFF', // todo could be fixed but lots of changes // '-Xep:NonAtomicVolatileUpdate:OFF', // todo check if useful or comment why not '-Xep:NonCanonicalType:WARN', '-Xep:NonOverridingEquals:WARN', + '-Xep:NotJavadoc:WARN', '-Xep:NullOptional:WARN', // '-Xep:NullableConstructor:OFF', // we don't use this annotation + '-Xep:NullableOptional:WARN', // '-Xep:NullablePrimitive:OFF', // we don't use this annotation // '-Xep:NullablePrimitiveArray:OFF', // we don't use this annotation // '-Xep:NullableVoid:OFF', // we don't use this annotation @@ -420,9 +435,10 @@ allprojects { prj -> '-Xep:OptionalNotPresent:WARN', '-Xep:OrphanedFormatString:WARN', // '-Xep:OutlineNone:OFF', // we don't use gwt + '-Xep:OverridingMethodInconsistentArgumentNamesChecker:WARN', '-Xep:OverrideThrowableToString:WARN', '-Xep:Overrides:WARN', - '-Xep:OverridesGuiceInjectableMethod:WARN', + // '-Xep:OverridesGuiceInjectableMethod:OFF', // we don't use guice '-Xep:ParameterName:WARN', '-Xep:PreconditionsCheckNotNullRepeated:WARN', '-Xep:PrimitiveAtomicReference:WARN', @@ -434,6 +450,7 @@ allprojects { prj -> '-Xep:ReachabilityFenceUsage:WARN', '-Xep:ReferenceEquality:WARN', '-Xep:RethrowReflectiveOperationExceptionAsLinkageError:WARN', + '-Xep:ReturnAtTheEndOfVoidFunction:WARN', '-Xep:ReturnFromVoid:WARN', '-Xep:RobolectricShadowDirectlyOn:WARN', '-Xep:RxReturnValueIgnored:WARN', @@ -446,6 +463,7 @@ allprojects { prj -> '-Xep:StaticMockMember:WARN', // '-Xep:StreamResourceLeak:OFF', // todo check if useful or comment why not '-Xep:StreamToIterable:WARN', + // '-Xep:StringCaseLocaleUsage:OFF', // we have forbiddenapis for that // '-Xep:StringSplitter:OFF', // todo check if useful or comment why not - might be able to use forbidden-apis for this? // '-Xep:SwigMemoryLeak:OFF', // we don't use swig // '-Xep:SynchronizeOnNonFinalField:OFF', // todo check if useful or comment why not @@ -457,6 +475,7 @@ allprojects { prj -> '-Xep:ToStringReturnsNull:WARN', // '-Xep:TruthAssertExpected:OFF', // we don't use truth // '-Xep:TruthConstantAsserts:OFF', // we don't use truth + // '-Xep:TruthContainsExactlyElementsInUsage:OFF', // we don't use truth // '-Xep:TruthGetOrDefault:OFF', // we don't use truth // '-Xep:TruthIncompatibleType:OFF', // we don't use truth '-Xep:TypeEquals:WARN', @@ -468,11 +487,14 @@ allprojects { prj -> '-Xep:UnescapedEntity:WARN', // '-Xep:UnicodeEscape:OFF', // can't enable since Lucene/Solr tests use unicode a bunch // '-Xep:UnnecessaryAssignment:OFF', // we don't use these annotations + '-Xep:UnnecessaryAsync:WARN', '-Xep:UnnecessaryLambda:WARN', '-Xep:UnnecessaryLongToIntConversion:WARN', '-Xep:UnnecessaryMethodInvocationMatcher:WARN', '-Xep:UnnecessaryMethodReference:WARN', // '-Xep:UnnecessaryParentheses:OFF', // style preference that we don't want to enforce + '-Xep:UnnecessaryStringBuilder:WARN', + // '-Xep:UnnecessaryTestMethodPrefix:OFF', // style preference that we don't want to enforce // '-Xep:UnqualifiedYield:OFF', // javac takes care '-Xep:UnrecognisedJavadocTag:WARN', '-Xep:UnsafeFinalization:WARN', diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index c8a3a1dd63c..d9e18452386 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -72,6 +72,10 @@ New Features * SOLR-16954: Make Circuit Breakers available for Update Requests (janhoy, Christine Poerschke, Pierre Salagnac) +* SOLR-15056: A new Circuit breaker for percentage of CPU utilization is added. The former "CPU" circuit breaker + is now more correctly named LoadAverageCircuitBreaker as it trips on system load average which is not a percentage. + Users of legacy CircuitBreakerManager are not affected by this change. (Walter Underwood, janhoy, Christine Poerschke, Atri Sharma) + * SOLR-15771: bin/auth creates reasonable roles and permissions for security: 'search', 'index', 'admin', and 'superadmin' and assigns user superadmin role. (Eric Pugh, janhoy) * SOLR-15367: Convert "rid" functionality into a default Tracer (Alex Deparvu, David Smiley) @@ -109,6 +113,8 @@ Improvements * SOLR-15474: Make Circuit breakers individually pluggable (Atri Sharma, Christine Poerschke, janhoy) +* SOLR-16982: Trip Circuit Breakers only for external requests (janhoy, Christine Poerschke) + * SOLR-16927: Allow SolrClientCache clients to use Jetty HTTP2 clients (Alex Deparvu, David Smiley) * SOLR-16896, SOLR-16897: Add support of OAuth 2.0/OIDC 'code with PKCE' flow (Lamine Idjeraoui, janhoy, Kevin Risden, Anshum Gupta) @@ -123,6 +129,8 @@ Improvements * SOLR-16970: SOLR_OPTS is now able to override options set by the Solr control scripts, "bin/solr" and "bin/solr.cmd". (Houston Putman) +* SOLR-16968: The MemoryCircuitBreaker now uses average heap usage over the last 30 seconds (janhoy, Christine Poerschke) + * SOLR-14886: Suppress stack traces in query response (Isabelle Giguere via Alex Deparvu) * SOLR-16461: `/solr/coreName/replication?command=backup` now has a v2 equivalent, available at @@ -134,6 +142,11 @@ Improvements * SOLR-15440: The Learning To Rank FieldValueFeature now uses DocValues when docValues=true and stored=true are combined. (Christine Poerschke, Tom Gilke) +* SOLR-16959: Make the internal CoresLocator implementation configurable in solr.xml (Vincent Primault via David Smiley) + +* SOLR-16967: Some ConfigSet operations formerly required that solrconfig.xml exist but should not have because + the name of the file is configurable when creating cores / collections. (David Smiley) + Optimizations --------------------- @@ -141,6 +154,9 @@ Optimizations * SOLR-16265: reduce memory usage of ContentWriter based requests in Http2SolrClient (Alex Deparvu, Kevin Risden, David Smiley) +* SOLR-16989: Optimize and consolidate reuse of DocValues iterators for value retrieval (Michael Gibney) + +* SOLR-17004: ZkStateReader waitForState should check clusterState before using watchers (Kevin Risden) Bug Fixes --------------------- @@ -192,11 +208,36 @@ Bug Fixes * SOLR-16971: RealTimeGet with Composite router throws NPE (Alex Deparvu) +* SOLR-16931: ReRankScaler explain breaks with debug=true and in distributed mode (Joel Bernstein) + +* SOLR-16983: Fixed a bug that could cause some usages of SolrStream to fail to close InputStreams from the server. + Also fixed the usage of ObjectReleaseTracker in SolrTestCaseJ4 to catch these kinds of bugs (hossman) + +* SOLR-16925: Fix indentation for JacksonJsonWriter (Houston Putman) + +* SOLR-16701: Fix race condition on PRS enabled collection deletion (Patson Luk) + +* SOLR-16991: Concurrent requests failing JWT authentication in Admin UI intermittently (Lamine Idjeraoui, janhoy) + +* SOLR-16997: OTEL configurator NPE when SOLR_HOST not set (janhoy) + +* PR#1963: Fix the admin UI green core-size graph on nodes screen (janhoy) + +* SOLR-16980: Connect to SOLR standalone with basic authentication (Alex Deparvu) + +* SOLR-16992: Non-reproducible StreamingTest failures -- suggests CloudSolrStream concurency race condition (Alex Deparvu, hossman) + +* SOLR-16644: Fixing the entropy warning threshold using scaling based on poolsize (Raghavan Muthuregunathan) + Dependency Upgrades --------------------- * PR#1846: Update io.opentelemetry to 1.29.0 (Alex Deparvu) +* SOLR-16985: Upgrade Lucene to 9.8.0 (Alex Deparvu, Christine Poerschke) + +* PR#1971: Update forbiddenapis to 3.6 to support Java 21/22 and commons-io up to 2.14.0 (Uwe Schindler) + Other Changes --------------------- @@ -214,6 +255,10 @@ Other Changes * SOLR-16979: BATS integration tests now start solr instances on a randomly selected port (janhoy) +* SOLR-16978: Be case insensitive when parsing booleans from text (Tomás Fernández Löbbe) + +* SOLR-16960: Tests should sometimes run with a Tracer (not no-op) (Alex Deparvu) + ================== 9.3.0 ================== Upgrade Notes @@ -376,6 +421,9 @@ Improvements * SOLR-16878: Use Log4J JUL manager when starting Java. This is necessary for Lucene logs to be included with Solr logs. (Houston Putman, Uwe Schindler) +* SOLR-16397: Reload core v2 endpoints have been updated to be more REST-ful. + RELOAD is now available at `POST /api/cores/coreName/reload` (Sanjay Dutt via Jason Gerlowski) + Optimizations --------------------- diff --git a/solr/bin/solr b/solr/bin/solr index cc72f621094..101fde35a51 100644 --- a/solr/bin/solr +++ b/solr/bin/solr @@ -1915,12 +1915,26 @@ function start_solr() { -jar start.jar "${SOLR_JETTY_CONFIG[@]}" $SOLR_JETTY_ADDL_CONFIG \ 1>"$SOLR_LOGS_DIR/solr-$SOLR_PORT-console.log" 2>&1 & echo $! > "$SOLR_PID_DIR/solr-$SOLR_PORT.pid" - # check if /proc/sys/kernel/random/entropy_avail exists then check output of cat /proc/sys/kernel/random/entropy_avail to see if less than 300 - if [[ -f /proc/sys/kernel/random/entropy_avail ]] && (( $(cat /proc/sys/kernel/random/entropy_avail) < 300)); then - echo "Warning: Available entropy is low. As a result, use of the UUIDField, SSL, or any other features that require" - echo "RNG might not work properly. To check for the amount of available entropy, use 'cat /proc/sys/kernel/random/entropy_avail'." - echo "" + # Get the current entropy available + entropy_avail=$(cat /proc/sys/kernel/random/entropy_avail) + + # Get the pool size + pool_size=$(cat /proc/sys/kernel/random/poolsize) + + # Check if entropy is available and pool size is non-zero + if [[ $entropy_avail -gt 0 && $pool_size -ne 0 ]]; then + # Compute the ratio of entropy available to pool size + ratio=$(awk -v ea="$entropy_avail" -v ps="$pool_size" 'BEGIN {print (ea/ps)*100}') + + # Check if the ratio is less than 25% + if (( $(echo "$ratio < 25" | bc -l) )); then + echo "Warning: Available entropy is low. As a result, use of the UUIDField, SSL, or any other features that require" + echo "RNG might not work properly. To check for the amount of available entropy, use 'cat /proc/sys/kernel/random/entropy_avail'." + fi + else + echo "Error: Either no entropy is available or the pool size is zero." fi + # no lsof on cygwin though if lsof -v 2>&1 | grep -q revision; then echo -n "Waiting up to $SOLR_START_WAIT seconds to see Solr running on port $SOLR_PORT" diff --git a/solr/core/src/java/org/apache/solr/cli/SimplePostTool.java b/solr/core/src/java/org/apache/solr/cli/SimplePostTool.java index 40153c27be0..35842eb1c38 100644 --- a/solr/core/src/java/org/apache/solr/cli/SimplePostTool.java +++ b/solr/core/src/java/org/apache/solr/cli/SimplePostTool.java @@ -426,7 +426,6 @@ private void doWebMode() { } catch (MalformedURLException e) { fatal("Wrong URL trying to append /extract to " + solrUrl); } - return; } private void doStdinMode() { diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkConfigSetService.java b/solr/core/src/java/org/apache/solr/cloud/ZkConfigSetService.java index 5f83f88c8eb..1718edbf56a 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkConfigSetService.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkConfigSetService.java @@ -139,10 +139,7 @@ public String configSetName(CoreDescriptor cd) { @Override public boolean checkConfigExists(String configName) throws IOException { try { - Boolean existsSolrConfigXml = - zkClient.exists(CONFIGS_ZKNODE + "/" + configName + "/solrconfig.xml", true); - if (existsSolrConfigXml == null) return false; - return existsSolrConfigXml; + return zkClient.exists(CONFIGS_ZKNODE + "/" + configName, true); } catch (KeeperException | InterruptedException e) { throw new IOException( "Error checking whether config exists", SolrZkClient.checkInterrupted(e)); diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java index b35cbbef058..cdf90bdc7a4 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java +++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java @@ -26,7 +26,6 @@ import static org.apache.solr.common.params.CommonParams.METRICS_PATH; import static org.apache.solr.common.params.CommonParams.ZK_PATH; import static org.apache.solr.common.params.CommonParams.ZK_STATUS_PATH; -import static org.apache.solr.core.CorePropertiesLocator.PROPERTIES_FILENAME; import static org.apache.solr.security.AuthenticationPlugin.AUTHENTICATION_PLUGIN_PROP; import com.github.benmanes.caffeine.cache.Interner; @@ -377,21 +376,24 @@ public CoreContainer(Path solrHome, Properties properties) { * @see #load() */ public CoreContainer(NodeConfig config) { - this(config, new CorePropertiesLocator(config.getCoreRootDirectory())); + this(config, CoresLocator.instantiate(config)); } public CoreContainer(NodeConfig config, boolean asyncSolrCoreLoad) { - this(config, new CorePropertiesLocator(config.getCoreRootDirectory()), asyncSolrCoreLoad); + this(config, CoresLocator.instantiate(config), asyncSolrCoreLoad); } /** - * Create a new CoreContainer using the given configuration and locator. The container's cores are - * not loaded. + * Create a new CoreContainer using the given configuration and locator. + * + *

The container's cores are not loaded. This constructor should be used only in tests, as it + * overrides {@link CoresLocator}'s instantiation process. * * @param config a ConfigSolr representation of this container's configuration * @param locator a CoresLocator * @see #load() */ + @VisibleForTesting public CoreContainer(NodeConfig config, CoresLocator locator) { this(config, locator, false); } @@ -1945,9 +1947,7 @@ private CoreDescriptor reloadCoreDescriptor(CoreDescriptor oldDesc) { return null; } - CorePropertiesLocator cpl = new CorePropertiesLocator(null); - CoreDescriptor ret = - cpl.buildCoreDescriptor(oldDesc.getInstanceDir().resolve(PROPERTIES_FILENAME), this); + CoreDescriptor ret = getCoresLocator().reload(oldDesc, this); // Ok, this little jewel is all because we still create core descriptors on the fly from lists // of properties in tests particularly. Theoretically, there should be _no_ way to create a diff --git a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java index be3da5c3447..c060fc5d3cd 100644 --- a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java +++ b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java @@ -16,6 +16,7 @@ */ package org.apache.solr.core; +import com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -51,6 +52,11 @@ public class CorePropertiesLocator implements CoresLocator { private final Path rootDirectory; + public CorePropertiesLocator(NodeConfig nodeConfig) { + this(nodeConfig.getCoreRootDirectory()); + } + + @VisibleForTesting public CorePropertiesLocator(Path coreDiscoveryRoot) { this.rootDirectory = coreDiscoveryRoot; log.debug("Config-defined core root directory: {}", this.rootDirectory); @@ -193,6 +199,11 @@ public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOExce return cds; } + @Override + public CoreDescriptor reload(CoreDescriptor cd, CoreContainer cc) { + return buildCoreDescriptor(cd.getInstanceDir().resolve(PROPERTIES_FILENAME), cc); + } + protected CoreDescriptor buildCoreDescriptor(Path propertiesFile, CoreContainer cc) { if (Files.notExists(propertiesFile)) { // This can happen in tests, see CoreContainer#reloadCoreDescriptor diff --git a/solr/core/src/java/org/apache/solr/core/CoresLocator.java b/solr/core/src/java/org/apache/solr/core/CoresLocator.java index 10eac005230..3321e057860 100644 --- a/solr/core/src/java/org/apache/solr/core/CoresLocator.java +++ b/solr/core/src/java/org/apache/solr/core/CoresLocator.java @@ -27,7 +27,7 @@ public interface CoresLocator { * @param cc the CoreContainer * @param coreDescriptors CoreDescriptors to persist */ - public void create(CoreContainer cc, CoreDescriptor... coreDescriptors); + void create(CoreContainer cc, CoreDescriptor... coreDescriptors); /** * Ensure that the core definitions from the passed in CoreDescriptors will persist across @@ -36,7 +36,7 @@ public interface CoresLocator { * @param cc the CoreContainer * @param coreDescriptors CoreDescriptors to persist */ - public void persist(CoreContainer cc, CoreDescriptor... coreDescriptors); + void persist(CoreContainer cc, CoreDescriptor... coreDescriptors); /** * Ensure that the core definitions from the passed in CoreDescriptors are not available for @@ -45,7 +45,7 @@ public interface CoresLocator { * @param cc the CoreContainer * @param coreDescriptors CoreDescriptors of the cores to remove */ - public void delete(CoreContainer cc, CoreDescriptor... coreDescriptors); + void delete(CoreContainer cc, CoreDescriptor... coreDescriptors); /** * Persist the new name of a renamed core @@ -54,7 +54,7 @@ public interface CoresLocator { * @param oldCD the CoreDescriptor of the core before renaming * @param newCD the CoreDescriptor of the core after renaming */ - public void rename(CoreContainer cc, CoreDescriptor oldCD, CoreDescriptor newCD); + void rename(CoreContainer cc, CoreDescriptor oldCD, CoreDescriptor newCD); /** * Swap two core definitions @@ -63,7 +63,7 @@ public interface CoresLocator { * @param cd1 the core descriptor of the first core, after swapping * @param cd2 the core descriptor of the second core, after swapping */ - public void swap(CoreContainer cc, CoreDescriptor cd1, CoreDescriptor cd2); + void swap(CoreContainer cc, CoreDescriptor cd1, CoreDescriptor cd2); /** * Load all the CoreDescriptors from persistence store @@ -71,5 +71,31 @@ public interface CoresLocator { * @param cc the CoreContainer * @return a list of all CoreDescriptors found */ - public List discover(CoreContainer cc); + List discover(CoreContainer cc); + + /** + * Reload a core descriptor. + * + * @param cd the old core descriptor + * @param cc the CoreContainer + * @return a new core descriptor + */ + CoreDescriptor reload(CoreDescriptor cd, CoreContainer cc); + + /** + * Returns a new instance of {@link CoresLocator}, depending on provided config. + * + * @param nodeConfig Solr configuration. + */ + static CoresLocator instantiate(NodeConfig nodeConfig) { + final String coresLocatorClass = nodeConfig.getCoresLocatorClass(); + return nodeConfig + .getSolrResourceLoader() + .newInstance( + coresLocatorClass, + CoresLocator.class, + null, + new Class[] {NodeConfig.class}, + new Object[] {nodeConfig}); + } } diff --git a/solr/core/src/java/org/apache/solr/core/FileSystemConfigSetService.java b/solr/core/src/java/org/apache/solr/core/FileSystemConfigSetService.java index 5ada2f99cfb..7631edc9809 100644 --- a/solr/core/src/java/org/apache/solr/core/FileSystemConfigSetService.java +++ b/solr/core/src/java/org/apache/solr/core/FileSystemConfigSetService.java @@ -80,8 +80,7 @@ public String configSetName(CoreDescriptor cd) { @Override public boolean checkConfigExists(String configName) throws IOException { - Path solrConfigXmlFile = getConfigDir(configName).resolve("solrconfig.xml"); - return Files.exists(solrConfigXmlFile); + return Files.exists(getConfigDir(configName)); } @Override diff --git a/solr/core/src/java/org/apache/solr/core/NodeConfig.java b/solr/core/src/java/org/apache/solr/core/NodeConfig.java index 9177889f447..387fe3beafb 100644 --- a/solr/core/src/java/org/apache/solr/core/NodeConfig.java +++ b/solr/core/src/java/org/apache/solr/core/NodeConfig.java @@ -56,6 +56,7 @@ public class NodeConfig { private final String nodeName; private final Path coreRootDirectory; + private final String coresLocatorClass; private final Path solrDataHome; @@ -125,6 +126,7 @@ public class NodeConfig { private NodeConfig( String nodeName, Path coreRootDirectory, + String coresLocatorClass, Path solrDataHome, Integer booleanQueryMaxClauseCount, Path configSetBaseDirectory, @@ -162,6 +164,7 @@ private NodeConfig( // all Path params here are absolute and normalized. this.nodeName = nodeName; this.coreRootDirectory = coreRootDirectory; + this.coresLocatorClass = coresLocatorClass; this.solrDataHome = solrDataHome; this.booleanQueryMaxClauseCount = booleanQueryMaxClauseCount; this.configSetBaseDirectory = configSetBaseDirectory; @@ -271,6 +274,10 @@ public Path getCoreRootDirectory() { return coreRootDirectory; } + public String getCoresLocatorClass() { + return this.coresLocatorClass; + } + /** Absolute. */ public Path getSolrDataHome() { return solrDataHome; @@ -592,6 +599,7 @@ public static class NodeConfigBuilder { // all Path fields here are absolute and normalized. private SolrResourceLoader loader; private Path coreRootDirectory; + private String coresLocatorClass = DEFAULT_CORESLOCATORCLASS; private Path solrDataHome; private Integer booleanQueryMaxClauseCount; private Path configSetBaseDirectory; @@ -632,6 +640,8 @@ public static class NodeConfigBuilder { // No:of core load threads in cloud mode is set to a default of 8 public static final int DEFAULT_CORE_LOAD_THREADS_IN_CLOUD = 8; + private static final String DEFAULT_CORESLOCATORCLASS = + "org.apache.solr.core.CorePropertiesLocator"; private static final String DEFAULT_ADMINHANDLERCLASS = "org.apache.solr.handler.admin.CoreAdminHandler"; private static final String DEFAULT_INFOHANDLERCLASS = @@ -671,6 +681,11 @@ public NodeConfigBuilder setCoreRootDirectory(String coreRootDirectory) { return this; } + public NodeConfigBuilder setCoresLocatorClass(String coresLocatorClass) { + this.coresLocatorClass = coresLocatorClass; + return this; + } + public NodeConfigBuilder setSolrDataHome(String solrDataHomeString) { // keep it null unless explicitly set to non-empty value if (solrDataHomeString != null && !solrDataHomeString.isEmpty()) { @@ -755,8 +770,8 @@ public NodeConfigBuilder setReplayUpdatesThreads(int replayUpdatesThreads) { this.replayUpdatesThreads = replayUpdatesThreads; return this; } - // Remove in Solr 10.0 + @Deprecated public NodeConfigBuilder setTransientCacheSize(int transientCacheSize) { this.transientCacheSize = transientCacheSize; @@ -886,6 +901,7 @@ public NodeConfig build() { return new NodeConfig( nodeName, coreRootDirectory, + coresLocatorClass, solrDataHome, booleanQueryMaxClauseCount, configSetBaseDirectory, diff --git a/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java b/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java index 8615e0a32ef..2b2933b0f86 100644 --- a/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java +++ b/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java @@ -16,16 +16,21 @@ */ package org.apache.solr.core; +import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.Arrays; import java.util.Locale; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.KnnVectorsReader; +import org.apache.lucene.codecs.KnnVectorsWriter; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene95.Lucene95Codec; import org.apache.lucene.codecs.lucene95.Lucene95Codec.Mode; import org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.util.NamedList; @@ -127,7 +132,8 @@ public KnnVectorsFormat getKnnVectorsFormatForField(String field) { if (DenseVectorField.HNSW_ALGORITHM.equals(knnAlgorithm)) { int maxConn = vectorType.getHnswMaxConn(); int beamWidth = vectorType.getHnswBeamWidth(); - return new Lucene95HnswVectorsFormat(maxConn, beamWidth); + var delegate = new Lucene95HnswVectorsFormat(maxConn, beamWidth); + return new SolrDelegatingKnnVectorsFormat(delegate, vectorType.getDimension()); } else { throw new SolrException( ErrorCode.SERVER_ERROR, knnAlgorithm + " KNN algorithm is not supported"); @@ -145,4 +151,34 @@ public Codec getCodec() { assert core != null : "inform must be called first"; return codec; } + + /** + * This class exists because Lucene95HnswVectorsFormat's getMaxDimensions method is final and we + * need to workaround that constraint to allow more than the default number of dimensions + */ + private static final class SolrDelegatingKnnVectorsFormat extends KnnVectorsFormat { + private final KnnVectorsFormat delegate; + private final int maxDimensions; + + public SolrDelegatingKnnVectorsFormat(KnnVectorsFormat delegate, int maxDimensions) { + super(delegate.getName()); + this.delegate = delegate; + this.maxDimensions = maxDimensions; + } + + @Override + public KnnVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException { + return delegate.fieldsWriter(state); + } + + @Override + public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException { + return delegate.fieldsReader(state); + } + + @Override + public int getMaxDimensions(String fieldName) { + return maxDimensions; + } + } } diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index 944d4684016..cb741cc97b6 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -1088,9 +1088,6 @@ private SolrCore( solrMetricsContext = coreMetricManager.getSolrMetricsContext(); this.coreMetricManager.loadReporters(); - // init pluggable circuit breakers - initPlugins(null, CircuitBreaker.class); - if (updateHandler == null) { directoryFactory = initDirectoryFactory(); recoveryStrategyBuilder = initRecoveryStrategyBuilder(); @@ -1115,6 +1112,9 @@ private SolrCore( // initialize core metrics initializeMetrics(solrMetricsContext, null); + // init pluggable circuit breakers, after metrics because some circuit breakers use metrics + initPlugins(null, CircuitBreaker.class); + SolrFieldCacheBean solrFieldCacheBean = new SolrFieldCacheBean(); // this is registered at the CONTAINER level because it's not core-specific - for now we // also register it here for back-compat @@ -1764,6 +1764,17 @@ private void doClose() { ExecutorUtil.shutdownAndAwaitTermination(coreAsyncTaskExecutor); + // Close circuit breakers that may have background threads, before metrics because some circuit + // breakers use metrics + try { + getCircuitBreakerRegistry().close(); + } catch (Throwable e) { + log.error("Exception closing circuit breakers", e); + if (e instanceof Error) { + throw (Error) e; + } + } + // stop reporting metrics try { coreMetricManager.close(); @@ -3041,10 +3052,10 @@ public PluginBag getResponseWriters() { try { m.put( "xlsx", - (QueryResponseWriter) - Class.forName("org.apache.solr.handler.extraction.XLSXResponseWriter") - .getConstructor() - .newInstance()); + Class.forName("org.apache.solr.handler.extraction.XLSXResponseWriter") + .asSubclass(QueryResponseWriter.class) + .getDeclaredConstructor() + .newInstance()); } catch (Exception e) { // don't worry; extraction module not in class path } diff --git a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java index df0645c8f69..cba8a277504 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java +++ b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java @@ -348,6 +348,9 @@ private static NodeConfig fillSolrSection(NodeConfig.NodeConfigBuilder builder, case "configSetService": builder.setConfigSetServiceClass(it.txt()); break; + case "coresLocator": + builder.setCoresLocatorClass(it.txt()); + break; case "coreRootDirectory": builder.setCoreRootDirectory(it.txt()); break; diff --git a/solr/core/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java index fb40af3c081..f28a7a6a6de 100644 --- a/solr/core/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java +++ b/solr/core/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java @@ -19,6 +19,7 @@ import static org.apache.solr.common.params.CommonParams.FAILURE; import static org.apache.solr.common.params.CommonParams.STATUS; +import java.lang.invoke.MethodHandles; import java.util.List; import org.apache.solr.client.solrj.SolrRequest.SolrRequestType; import org.apache.solr.common.SolrException; @@ -33,6 +34,8 @@ import org.apache.solr.update.processor.UpdateRequestProcessorChain; import org.apache.solr.util.circuitbreaker.CircuitBreaker; import org.apache.solr.util.circuitbreaker.CircuitBreakerRegistry; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Shares common code between various handlers that manipulate {@link @@ -40,6 +43,8 @@ */ public abstract class ContentStreamHandlerBase extends RequestHandlerBase { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + @Override public void init(NamedList args) { super.init(args); @@ -119,6 +124,12 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw * @return true if circuit breakers are tripped, false otherwise. */ protected boolean checkCircuitBreakers(SolrQueryRequest req, SolrQueryResponse rsp) { + if (isInternalShardRequest(req)) { + if (log.isTraceEnabled()) { + log.trace("Internal request, skipping circuit breaker check"); + } + return false; + } CircuitBreakerRegistry circuitBreakerRegistry = req.getCore().getCircuitBreakerRegistry(); if (circuitBreakerRegistry.isEnabled(SolrRequestType.UPDATE)) { List trippedCircuitBreakers = diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java index ab40ff02543..096f81ee492 100644 --- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java @@ -1804,11 +1804,10 @@ private static Long readIntervalNs(String interval) { public static final String LEADER_URL = "leaderUrl"; - @Deprecated /** - * @deprecated: Only used for backwards compatibility. Use {@link #LEADER_URL} + * @deprecated Only used for backwards compatibility. Use {@link #LEADER_URL} */ - public static final String LEGACY_LEADER_URL = "masterUrl"; + @Deprecated public static final String LEGACY_LEADER_URL = "masterUrl"; public static final String FETCH_FROM_LEADER = "fetchFromLeader"; @@ -1817,11 +1816,11 @@ private static Long readIntervalNs(String interval) { // loss public static final String SKIP_COMMIT_ON_LEADER_VERSION_ZERO = "skipCommitOnLeaderVersionZero"; - @Deprecated /** - * @deprecated: Only used for backwards compatibility. Use {@link + * @deprecated Only used for backwards compatibility. Use {@link * #SKIP_COMMIT_ON_LEADER_VERSION_ZERO} */ + @Deprecated public static final String LEGACY_SKIP_COMMIT_ON_LEADER_VERSION_ZERO = "skipCommitOnMasterVersionZero"; diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java index 3a3c89f2b8a..2c53eb26625 100644 --- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java +++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java @@ -28,6 +28,7 @@ import org.apache.solr.api.ApiBag; import org.apache.solr.api.ApiSupport; import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SuppressForbidden; @@ -44,6 +45,7 @@ import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.search.SyntaxError; import org.apache.solr.security.PermissionNameProvider; +import org.apache.solr.update.processor.DistributedUpdateProcessor; import org.apache.solr.util.SolrPluginUtils; import org.apache.solr.util.TestInjection; import org.slf4j.Logger; @@ -343,4 +345,16 @@ public Collection getApis() { return Collections.singleton( new ApiBag.ReqHandlerToApi(this, ApiBag.constructSpec(pluginInfo))); } + + /** + * Checks whether the given request is an internal request to a shard. We rely on the fact that an + * internal search request to a shard contains the param "isShard", and an internal update request + * to a shard contains the param "distrib.from". + * + * @return true if request is internal + */ + public static boolean isInternalShardRequest(SolrQueryRequest req) { + return req.getParams().get(DistributedUpdateProcessor.DISTRIB_FROM) != null + || "true".equals(req.getParams().get(ShardParams.IS_SHARD)); + } } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java index 8ee1bf3254a..d928282df5c 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java @@ -383,7 +383,6 @@ public Collection getApis() { apis.addAll(AnnotatedApi.getApis(new CreateCoreAPI(this))); apis.addAll(AnnotatedApi.getApis(new RejoinLeaderElectionAPI(this))); apis.addAll(AnnotatedApi.getApis(new OverseerOperationAPI(this))); - apis.addAll(AnnotatedApi.getApis(new ReloadCoreAPI(this))); apis.addAll(AnnotatedApi.getApis(new SwapCoresAPI(this))); apis.addAll(AnnotatedApi.getApis(new RenameCoreAPI(this))); apis.addAll(AnnotatedApi.getApis(new UnloadCoreAPI(this))); @@ -403,7 +402,11 @@ public Collection getApis() { @Override public Collection> getJerseyResources() { return List.of( - CoreSnapshotAPI.class, InstallCoreDataAPI.class, BackupCoreAPI.class, RestoreCoreAPI.class); + CoreSnapshotAPI.class, + InstallCoreDataAPI.class, + BackupCoreAPI.class, + RestoreCoreAPI.class, + ReloadCoreAPI.class); } public interface CoreAdminOp { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java index 31251759c76..987c79d376b 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java @@ -58,6 +58,7 @@ import java.nio.file.Path; import java.util.Locale; import java.util.Map; +import org.apache.solr.client.api.model.SolrJerseyResponse; import org.apache.solr.cloud.ZkController; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; @@ -72,6 +73,7 @@ import org.apache.solr.core.SolrCore; import org.apache.solr.handler.admin.CoreAdminHandler.CoreAdminOp; import org.apache.solr.handler.admin.api.CoreSnapshotAPI; +import org.apache.solr.handler.admin.api.ReloadCoreAPI; import org.apache.solr.handler.api.V2ApiUtils; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.update.UpdateLog; @@ -133,7 +135,13 @@ public enum CoreAdminOperation implements CoreAdminOp { SolrParams params = it.req.getParams(); String cname = params.required().get(CoreAdminParams.CORE); - it.handler.coreContainer.reload(cname); + ReloadCoreAPI reloadCoreAPI = + new ReloadCoreAPI( + it.req, it.rsp, it.handler.coreContainer, it.handler.getCoreAdminAsyncTracker()); + ReloadCoreAPI.ReloadCoreRequestBody reloadCoreRequestBody = + new ReloadCoreAPI.ReloadCoreRequestBody(); + SolrJerseyResponse response = reloadCoreAPI.reloadCore(cname, reloadCoreRequestBody); + V2ApiUtils.squashIntoSolrResponseWithoutHeader(it.rsp, response); }), STATUS_OP(STATUS, new StatusOp()), SWAP_OP( diff --git a/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java b/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java index df9453d3da6..d56dbb6506f 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java @@ -20,7 +20,6 @@ import java.lang.invoke.MethodHandles; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicReference; import org.apache.solr.cloud.CloudDescriptor; import org.apache.solr.cloud.ZkController.NotInClusterStateException; import org.apache.solr.cloud.ZkShardTerms; @@ -75,7 +74,6 @@ public void execute(CallInfo it) throws Exception { collectionName = core.getCoreDescriptor().getCloudDescriptor().getCollectionName(); cloudDescriptor = core.getCoreDescriptor().getCloudDescriptor(); } - AtomicReference errorMessage = new AtomicReference<>(); try { coreContainer .getZkController() @@ -201,9 +199,8 @@ public void execute(CallInfo it) throws Exception { return false; }); } catch (TimeoutException | InterruptedException e) { - String error = errorMessage.get(); - if (error == null) error = "Timeout waiting for collection state."; - throw new NotInClusterStateException(ErrorCode.SERVER_ERROR, error); + throw new NotInClusterStateException( + ErrorCode.SERVER_ERROR, "Timeout waiting for collection state."); } } } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/api/ReloadCoreAPI.java b/solr/core/src/java/org/apache/solr/handler/admin/api/ReloadCoreAPI.java index e330d713340..b0252b83fa9 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/api/ReloadCoreAPI.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/api/ReloadCoreAPI.java @@ -17,52 +17,69 @@ package org.apache.solr.handler.admin.api; -import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST; -import static org.apache.solr.common.params.CommonParams.ACTION; -import static org.apache.solr.handler.ClusterAPI.wrapParams; +import static org.apache.solr.client.solrj.impl.BinaryResponseParser.BINARY_CONTENT_TYPE_V2; import static org.apache.solr.security.PermissionNameProvider.Name.CORE_EDIT_PERM; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; -import org.apache.solr.api.Command; -import org.apache.solr.api.EndPoint; -import org.apache.solr.api.PayloadObj; -import org.apache.solr.common.params.CoreAdminParams; -import org.apache.solr.common.util.ReflectMapWriter; +import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Schema; +import io.swagger.v3.oas.annotations.parameters.RequestBody; +import javax.inject.Inject; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import org.apache.solr.client.api.model.SolrJerseyResponse; +import org.apache.solr.core.CoreContainer; import org.apache.solr.handler.admin.CoreAdminHandler; +import org.apache.solr.jersey.JacksonReflectMapWriter; +import org.apache.solr.jersey.PermissionName; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; /** * V2 API for reloading an individual core. * - *

The new API (POST /v2/cores/coreName {'reload': {...}}) is equivalent to the v1 - * /admin/cores?action=reload command. - * - * @see ReloadCorePayload + *

The new API (POST /v2/cores/coreName/reload is analogous to the v1 /admin/cores?action=RELOAD + * command. */ -@EndPoint( - path = {"/cores/{core}"}, - method = POST, - permission = CORE_EDIT_PERM) -public class ReloadCoreAPI { - private static final String V2_RELOAD_CORE_CMD = "reload"; - - private final CoreAdminHandler coreHandler; +@Path("/cores/{coreName}/reload") +public class ReloadCoreAPI extends CoreAdminAPIBase { - public ReloadCoreAPI(CoreAdminHandler coreHandler) { - this.coreHandler = coreHandler; + @Inject + public ReloadCoreAPI( + SolrQueryRequest solrQueryRequest, + SolrQueryResponse solrQueryResponse, + CoreContainer coreContainer, + CoreAdminHandler.CoreAdminAsyncTracker coreAdminAsyncTracker) { + super(coreContainer, coreAdminAsyncTracker, solrQueryRequest, solrQueryResponse); } - @Command(name = V2_RELOAD_CORE_CMD) - public void reloadCore(PayloadObj obj) throws Exception { - final String coreName = obj.getRequest().getPathTemplateValues().get(CoreAdminParams.CORE); - - final Map v1Params = new HashMap<>(); - v1Params.put(ACTION, CoreAdminParams.CoreAdminAction.RELOAD.name().toLowerCase(Locale.ROOT)); - v1Params.put(CoreAdminParams.CORE, coreName); - - coreHandler.handleRequestBody(wrapParams(obj.getRequest(), v1Params), obj.getResponse()); + @POST + @Produces({"application/json", "application/xml", BINARY_CONTENT_TYPE_V2}) + @PermissionName(CORE_EDIT_PERM) + public SolrJerseyResponse reloadCore( + @Parameter(description = "The name of the core to reload.", required = true) + @PathParam("coreName") + String coreName, + @Schema(description = "Additional parameters for reloading the core") @RequestBody + ReloadCoreAPI.ReloadCoreRequestBody reloadCoreRequestBody) + throws Exception { + SolrJerseyResponse solrJerseyResponse = instantiateJerseyResponse(SolrJerseyResponse.class); + return handlePotentiallyAsynchronousTask( + solrJerseyResponse, + coreName, + (reloadCoreRequestBody == null) ? null : reloadCoreRequestBody.async, + "reload", + () -> { + coreContainer.reload(coreName); + return solrJerseyResponse; + }); } - public static class ReloadCorePayload implements ReflectMapWriter {} + public static class ReloadCoreRequestBody implements JacksonReflectMapWriter { + @Schema(description = "Request ID to track this action which will be processed asynchronously.") + @JsonProperty("async") + public String async; + } } diff --git a/solr/core/src/java/org/apache/solr/handler/component/FieldFacetStats.java b/solr/core/src/java/org/apache/solr/handler/component/FieldFacetStats.java index 8c0201be2d0..2b0926ba2ee 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/FieldFacetStats.java +++ b/solr/core/src/java/org/apache/solr/handler/component/FieldFacetStats.java @@ -190,6 +190,5 @@ public void accumulateMissing() throws IOException { } } } - return; } } diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java index 3d5b7eb28af..61816a7c5f1 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java @@ -329,8 +329,7 @@ protected ElevationProvider handleConfigLoadingException(E * * @return The cached or loaded {@link ElevationProvider}. */ - @VisibleForTesting - ElevationProvider getElevationProvider(IndexReader reader, SolrCore core) { + protected ElevationProvider getElevationProvider(IndexReader reader, SolrCore core) { synchronized (LOCK) { if (cacheElevationProvider != null && Objects.equals(cacheIndexReader.get(), reader)) { return cacheElevationProvider; // cache hit ! @@ -377,7 +376,8 @@ protected long getConfigVersion(SolrCore core) { * * @return The loaded {@link ElevationProvider}; not null. */ - private ElevationProvider loadElevationProvider(SolrCore core) throws IOException, SAXException { + protected ElevationProvider loadElevationProvider(SolrCore core) + throws IOException, SAXException { Document xmlDocument; try { xmlDocument = SafeXMLParsing.parseConfigXML(log, core.getResourceLoader(), configFileName); diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java index 7944f4840ce..a34cd99d8be 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java @@ -81,6 +81,7 @@ import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.DocList; +import org.apache.solr.search.DocValuesIteratorCache; import org.apache.solr.search.QueryUtils; import org.apache.solr.search.ReturnFields; import org.apache.solr.search.SolrDocumentFetcher; @@ -238,6 +239,7 @@ public void process(ResponseBuilder rb) throws IOException { boolean opennedRealtimeSearcher = false; BytesRefBuilder idBytes = new BytesRefBuilder(); + DocValuesIteratorCache reuseDvIters = null; for (String idStr : reqIds.allIds) { fieldType.readableToIndexed(idStr, idBytes); // if _route_ is passed, id is a child doc. TODO remove in SOLR-15064 @@ -348,7 +350,11 @@ public void process(ResponseBuilder rb) throws IOException { searcherInfo.getSearcher().doc(docid, rsp.getReturnFields().getLuceneFieldNames()); SolrDocument doc = toSolrDoc(luceneDocument, core.getLatestSchema()); SolrDocumentFetcher docFetcher = searcherInfo.getSearcher().getDocFetcher(); - docFetcher.decorateDocValueFields(doc, docid, docFetcher.getNonStoredDVs(true)); + if (reuseDvIters == null) { + reuseDvIters = new DocValuesIteratorCache(searcherInfo.getSearcher()); + } + docFetcher.decorateDocValueFields( + doc, docid, docFetcher.getNonStoredDVs(true), reuseDvIters); if (null != transformer) { if (null == resultContext) { // either first pass, or we've re-opened searcher - either way now we setContext @@ -575,7 +581,11 @@ private static SolrDocument mergePartialDocWithFullDocFromIndex( if (!doc.containsKey(VERSION_FIELD)) { searcher .getDocFetcher() - .decorateDocValueFields(doc, docid, Collections.singleton(VERSION_FIELD)); + .decorateDocValueFields( + doc, + docid, + Collections.singleton(VERSION_FIELD), + new DocValuesIteratorCache(searcher, false)); } long docVersion = (long) doc.getFirstValue(VERSION_FIELD); diff --git a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java index 995e6a3c684..744d6093222 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java @@ -357,6 +357,12 @@ protected ResponseBuilder newResponseBuilder( */ protected boolean checkCircuitBreakers( SolrQueryRequest req, SolrQueryResponse rsp, ResponseBuilder rb) { + if (isInternalShardRequest(req)) { + if (log.isTraceEnabled()) { + log.trace("Internal request, skipping circuit breaker check"); + } + return false; + } final RTimerTree timer = rb.isDebug() ? req.getRequestTimer() : null; final CircuitBreakerRegistry circuitBreakerRegistry = req.getCore().getCircuitBreakerRegistry(); diff --git a/solr/core/src/java/org/apache/solr/handler/export/BoolFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/BoolFieldWriter.java index fbeccdc4c0d..cf32497b5d7 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/BoolFieldWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/BoolFieldWriter.java @@ -21,10 +21,14 @@ import org.apache.lucene.util.BytesRef; import org.apache.solr.common.MapWriter; import org.apache.solr.schema.FieldType; +import org.apache.solr.search.DocValuesIteratorCache; class BoolFieldWriter extends StringFieldWriter { - public BoolFieldWriter(String field, FieldType fieldType) { - super(field, fieldType); + public BoolFieldWriter( + String field, + FieldType fieldType, + DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache) { + super(field, fieldType, docValuesCache); } @Override diff --git a/solr/core/src/java/org/apache/solr/handler/export/DateFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/DateFieldWriter.java index c86f0eacac1..7e32e98d5b8 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/DateFieldWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/DateFieldWriter.java @@ -17,58 +17,19 @@ package org.apache.solr.handler.export; -import com.carrotsearch.hppc.IntObjectHashMap; import java.io.IOException; import java.util.Date; -import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.NumericDocValues; import org.apache.solr.common.MapWriter; +import org.apache.solr.search.DocValuesIteratorCache; -class DateFieldWriter extends FieldWriter { - private String field; - private IntObjectHashMap docValuesCache = new IntObjectHashMap<>(); - - public DateFieldWriter(String field) { - this.field = field; +class DateFieldWriter extends LongFieldWriter { + public DateFieldWriter( + String field, DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache) { + super(field, docValuesCache); } @Override - public boolean write( - SortDoc sortDoc, LeafReaderContext readerContext, MapWriter.EntryWriter ew, int fieldIndex) - throws IOException { - Long val; - SortValue sortValue = sortDoc.getSortValue(this.field); - if (sortValue != null) { - if (sortValue.isPresent()) { - val = (long) sortValue.getCurrentValue(); - } else { // empty-value - return false; - } - } else { - // field is not part of 'sort' param, but part of 'fl' param - int readerOrd = readerContext.ord; - NumericDocValues vals = null; - if (docValuesCache.containsKey(readerOrd)) { - NumericDocValues numericDocValues = docValuesCache.get(readerOrd); - if (numericDocValues.docID() < sortDoc.docId) { - // We have not advanced beyond the current docId so we can use this docValues. - vals = numericDocValues; - } - } - - if (vals == null) { - vals = DocValues.getNumeric(readerContext.reader(), this.field); - docValuesCache.put(readerOrd, vals); - } - - if (vals.advance(sortDoc.docId) == sortDoc.docId) { - val = vals.longValue(); - } else { - return false; - } - } - ew.put(this.field, new Date(val)); - return true; + protected void doWrite(MapWriter.EntryWriter ew, long val) throws IOException { + ew.put(field, new Date(val)); } } diff --git a/solr/core/src/java/org/apache/solr/handler/export/DoubleFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/DoubleFieldWriter.java index 50cbdddb385..e439560894b 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/DoubleFieldWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/DoubleFieldWriter.java @@ -17,57 +17,46 @@ package org.apache.solr.handler.export; -import com.carrotsearch.hppc.IntObjectHashMap; import java.io.IOException; -import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.solr.common.MapWriter; +import org.apache.solr.search.DocValuesIteratorCache; class DoubleFieldWriter extends FieldWriter { - private String field; - private IntObjectHashMap docValuesCache = new IntObjectHashMap<>(); + private final String field; + private final DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache; - public DoubleFieldWriter(String field) { + public DoubleFieldWriter( + String field, DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache) { this.field = field; + this.docValuesCache = docValuesCache; } @Override public boolean write( SortDoc sortDoc, LeafReaderContext readerContext, MapWriter.EntryWriter ew, int fieldIndex) throws IOException { + double val; SortValue sortValue = sortDoc.getSortValue(this.field); if (sortValue != null) { if (sortValue.isPresent()) { - double val = (double) sortValue.getCurrentValue(); - ew.put(this.field, val); - return true; + val = (double) sortValue.getCurrentValue(); } else { // empty-value return false; } } else { // field is not part of 'sort' param, but part of 'fl' param - int readerOrd = readerContext.ord; - NumericDocValues vals = null; - if (docValuesCache.containsKey(readerOrd)) { - NumericDocValues numericDocValues = docValuesCache.get(readerOrd); - if (numericDocValues.docID() < sortDoc.docId) { - // We have not advanced beyond the current docId so we can use this docValues. - vals = numericDocValues; - } - } - - if (vals == null) { - vals = DocValues.getNumeric(readerContext.reader(), this.field); - docValuesCache.put(readerOrd, vals); - } - if (vals.advance(sortDoc.docId) == sortDoc.docId) { - long val = vals.longValue(); - ew.put(this.field, Double.longBitsToDouble(val)); - return true; + NumericDocValues vals = + docValuesCache.getNumericDocValues( + sortDoc.docId, readerContext.reader(), readerContext.ord); + if (vals != null) { + val = Double.longBitsToDouble(vals.longValue()); } else { return false; } } + ew.put(this.field, val); + return true; } } diff --git a/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java b/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java index 8f38c94942e..51ba5551b69 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java @@ -74,6 +74,7 @@ import org.apache.solr.schema.SchemaField; import org.apache.solr.schema.SortableTextField; import org.apache.solr.schema.StrField; +import org.apache.solr.search.DocValuesIteratorCache; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SortSpec; import org.apache.solr.search.SyntaxError; @@ -99,6 +100,14 @@ public class ExportWriter implements SolrCore.RawWriter, Closeable { public static final int DEFAULT_BATCH_SIZE = 30000; public static final int DEFAULT_QUEUE_SIZE = 150000; + private static final FieldWriter EMPTY_FIELD_WRITER = + new FieldWriter() { + @Override + public boolean write( + SortDoc sortDoc, LeafReaderContext readerContext, EntryWriter out, int fieldIndex) { + return false; + } + }; private OutputStreamWriter respWriter; final SolrQueryRequest req; @@ -480,6 +489,7 @@ public FieldWriter[] getFieldWriters(String[] fields, SolrIndexSearcher searcher throws IOException { IndexSchema schema = searcher.getSchema(); FieldWriter[] writers = new FieldWriter[fields.length]; + DocValuesIteratorCache dvIterCache = new DocValuesIteratorCache(searcher, false); for (int i = 0; i < fields.length; i++) { String field = fields[i]; SchemaField schemaField = null; @@ -501,47 +511,51 @@ public FieldWriter[] getFieldWriters(String[] fields, SolrIndexSearcher searcher schemaField + " Must have useDocValuesAsStored='true' to be used with export writer"); } - if (fieldType instanceof IntValueFieldType) { + DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache = dvIterCache.getSupplier(field); + + if (docValuesCache == null) { + writers[i] = EMPTY_FIELD_WRITER; + } else if (fieldType instanceof IntValueFieldType) { if (multiValued) { - writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true); + writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache); } else { - writers[i] = new IntFieldWriter(field); + writers[i] = new IntFieldWriter(field, docValuesCache); } } else if (fieldType instanceof LongValueFieldType) { if (multiValued) { - writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true); + writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache); } else { - writers[i] = new LongFieldWriter(field); + writers[i] = new LongFieldWriter(field, docValuesCache); } } else if (fieldType instanceof FloatValueFieldType) { if (multiValued) { - writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true); + writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache); } else { - writers[i] = new FloatFieldWriter(field); + writers[i] = new FloatFieldWriter(field, docValuesCache); } } else if (fieldType instanceof DoubleValueFieldType) { if (multiValued) { - writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true); + writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache); } else { - writers[i] = new DoubleFieldWriter(field); + writers[i] = new DoubleFieldWriter(field, docValuesCache); } } else if (fieldType instanceof StrField || fieldType instanceof SortableTextField) { if (multiValued) { - writers[i] = new MultiFieldWriter(field, fieldType, schemaField, false); + writers[i] = new MultiFieldWriter(field, fieldType, schemaField, false, docValuesCache); } else { - writers[i] = new StringFieldWriter(field, fieldType); + writers[i] = new StringFieldWriter(field, fieldType, docValuesCache); } } else if (fieldType instanceof DateValueFieldType) { if (multiValued) { - writers[i] = new MultiFieldWriter(field, fieldType, schemaField, false); + writers[i] = new MultiFieldWriter(field, fieldType, schemaField, false, docValuesCache); } else { - writers[i] = new DateFieldWriter(field); + writers[i] = new DateFieldWriter(field, docValuesCache); } } else if (fieldType instanceof BoolField) { if (multiValued) { - writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true); + writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache); } else { - writers[i] = new BoolFieldWriter(field, fieldType); + writers[i] = new BoolFieldWriter(field, fieldType, docValuesCache); } } else { throw new IOException( diff --git a/solr/core/src/java/org/apache/solr/handler/export/FloatFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/FloatFieldWriter.java index 48bd9d632ed..a60c14e6b0a 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/FloatFieldWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/FloatFieldWriter.java @@ -17,58 +17,46 @@ package org.apache.solr.handler.export; -import com.carrotsearch.hppc.IntObjectHashMap; import java.io.IOException; -import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.solr.common.MapWriter; +import org.apache.solr.search.DocValuesIteratorCache; class FloatFieldWriter extends FieldWriter { - private String field; - private IntObjectHashMap docValuesCache = new IntObjectHashMap<>(); + private final String field; + private final DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache; - public FloatFieldWriter(String field) { + public FloatFieldWriter( + String field, DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache) { this.field = field; + this.docValuesCache = docValuesCache; } @Override public boolean write( SortDoc sortDoc, LeafReaderContext readerContext, MapWriter.EntryWriter ew, int fieldIndex) throws IOException { + float val; SortValue sortValue = sortDoc.getSortValue(this.field); if (sortValue != null) { if (sortValue.isPresent()) { - float val = (float) sortValue.getCurrentValue(); - ew.put(this.field, val); - return true; + val = (float) sortValue.getCurrentValue(); } else { // empty-value return false; } } else { // field is not part of 'sort' param, but part of 'fl' param - int readerOrd = readerContext.ord; - NumericDocValues vals = null; - if (docValuesCache.containsKey(readerOrd)) { - NumericDocValues numericDocValues = docValuesCache.get(readerOrd); - if (numericDocValues.docID() < sortDoc.docId) { - // We have not advanced beyond the current docId so we can use this docValues. - vals = numericDocValues; - } - } - - if (vals == null) { - vals = DocValues.getNumeric(readerContext.reader(), this.field); - docValuesCache.put(readerOrd, vals); - } - - if (vals.advance(sortDoc.docId) == sortDoc.docId) { - int val = (int) vals.longValue(); - ew.put(this.field, Float.intBitsToFloat(val)); - return true; + NumericDocValues vals = + docValuesCache.getNumericDocValues( + sortDoc.docId, readerContext.reader(), readerContext.ord); + if (vals != null) { + val = Float.intBitsToFloat((int) vals.longValue()); } else { return false; } } + ew.put(this.field, val); + return true; } } diff --git a/solr/core/src/java/org/apache/solr/handler/export/IntFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/IntFieldWriter.java index 1ecef85f21b..bf0396d4ab8 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/IntFieldWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/IntFieldWriter.java @@ -17,19 +17,20 @@ package org.apache.solr.handler.export; -import com.carrotsearch.hppc.IntObjectHashMap; import java.io.IOException; -import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.solr.common.MapWriter; +import org.apache.solr.search.DocValuesIteratorCache; class IntFieldWriter extends FieldWriter { - private String field; - private IntObjectHashMap docValuesCache = new IntObjectHashMap<>(); + private final String field; + private final DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache; - public IntFieldWriter(String field) { + public IntFieldWriter( + String field, DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache) { this.field = field; + this.docValuesCache = docValuesCache; } @Override @@ -46,22 +47,10 @@ public boolean write( } } else { // field is not part of 'sort' param, but part of 'fl' param - int readerOrd = readerContext.ord; - NumericDocValues vals = null; - if (docValuesCache.containsKey(readerOrd)) { - NumericDocValues numericDocValues = docValuesCache.get(readerOrd); - if (numericDocValues.docID() < sortDoc.docId) { - // We have not advanced beyond the current docId so we can use this docValues. - vals = numericDocValues; - } - } - - if (vals == null) { - vals = DocValues.getNumeric(readerContext.reader(), this.field); - docValuesCache.put(readerOrd, vals); - } - - if (vals.advance(sortDoc.docId) == sortDoc.docId) { + NumericDocValues vals = + docValuesCache.getNumericDocValues( + sortDoc.docId, readerContext.reader(), readerContext.ord); + if (vals != null) { val = (int) vals.longValue(); } else { return false; diff --git a/solr/core/src/java/org/apache/solr/handler/export/LongFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/LongFieldWriter.java index 9c18a72bd6d..7961549477c 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/LongFieldWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/LongFieldWriter.java @@ -17,20 +17,21 @@ package org.apache.solr.handler.export; -import com.carrotsearch.hppc.IntObjectHashMap; import java.io.IOException; -import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.solr.common.MapWriter; +import org.apache.solr.search.DocValuesIteratorCache; class LongFieldWriter extends FieldWriter { - private String field; + protected final String field; - private IntObjectHashMap docValuesCache = new IntObjectHashMap<>(); + private final DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache; - public LongFieldWriter(String field) { + public LongFieldWriter( + String field, DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache) { this.field = field; + this.docValuesCache = docValuesCache; } @Override @@ -47,28 +48,20 @@ public boolean write( } } else { // field is not part of 'sort' param, but part of 'fl' param - int readerOrd = readerContext.ord; - NumericDocValues vals = null; - if (docValuesCache.containsKey(readerOrd)) { - NumericDocValues numericDocValues = docValuesCache.get(readerOrd); - if (numericDocValues.docID() < sortDoc.docId) { - // We have not advanced beyond the current docId so we can use this docValues. - vals = numericDocValues; - } - } - - if (vals == null) { - vals = DocValues.getNumeric(readerContext.reader(), this.field); - docValuesCache.put(readerOrd, vals); - } - - if (vals.advance(sortDoc.docId) == sortDoc.docId) { + NumericDocValues vals = + docValuesCache.getNumericDocValues( + sortDoc.docId, readerContext.reader(), readerContext.ord); + if (vals != null) { val = vals.longValue(); } else { return false; } } - ew.put(field, val); + doWrite(ew, val); return true; } + + protected void doWrite(MapWriter.EntryWriter ew, long val) throws IOException { + ew.put(field, val); + } } diff --git a/solr/core/src/java/org/apache/solr/handler/export/MultiFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/MultiFieldWriter.java index 86dd8ba82e3..7f5bdee4899 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/MultiFieldWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/MultiFieldWriter.java @@ -17,11 +17,9 @@ package org.apache.solr.handler.export; -import com.carrotsearch.hppc.IntObjectHashMap; import java.io.IOException; import java.util.Date; import java.util.function.LongFunction; -import org.apache.lucene.index.DocValues; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; @@ -33,18 +31,23 @@ import org.apache.solr.common.MapWriter; import org.apache.solr.schema.FieldType; import org.apache.solr.schema.SchemaField; +import org.apache.solr.search.DocValuesIteratorCache; class MultiFieldWriter extends FieldWriter { - private String field; - private FieldType fieldType; - private SchemaField schemaField; - private boolean numeric; - private CharsRefBuilder cref = new CharsRefBuilder(); + private final String field; + private final FieldType fieldType; + private final SchemaField schemaField; + private final boolean numeric; + private final CharsRefBuilder cref = new CharsRefBuilder(); private final LongFunction bitsToValue; - private IntObjectHashMap docValuesCache = new IntObjectHashMap<>(); + private final DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache; public MultiFieldWriter( - String field, FieldType fieldType, SchemaField schemaField, boolean numeric) { + String field, + FieldType fieldType, + SchemaField schemaField, + boolean numeric, + DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache) { this.field = field; this.fieldType = fieldType; this.schemaField = schemaField; @@ -54,6 +57,7 @@ public MultiFieldWriter( } else { bitsToValue = null; } + this.docValuesCache = docValuesCache; } @Override @@ -61,24 +65,13 @@ public boolean write( SortDoc sortDoc, LeafReaderContext readerContext, MapWriter.EntryWriter out, int fieldIndex) throws IOException { if (this.fieldType.isPointField()) { - int readerOrd = readerContext.ord; - SortedNumericDocValues vals = null; - if (docValuesCache.containsKey(readerOrd)) { - SortedNumericDocValues sortedNumericDocValues = - (SortedNumericDocValues) docValuesCache.get(readerOrd); - if (sortedNumericDocValues.docID() < sortDoc.docId) { - // We have not advanced beyond the current docId so we can use this docValues. - vals = sortedNumericDocValues; - } - } - + SortedNumericDocValues vals = + docValuesCache.getSortedNumericDocValues( + sortDoc.docId, readerContext.reader(), readerContext.ord); if (vals == null) { - vals = DocValues.getSortedNumeric(readerContext.reader(), this.field); - docValuesCache.put(readerOrd, vals); + return false; } - if (!vals.advanceExact(sortDoc.docId)) return false; - final SortedNumericDocValues docVals = vals; out.put( @@ -91,32 +84,21 @@ public boolean write( }); return true; } else { - int readerOrd = readerContext.ord; - SortedSetDocValues vals = null; - if (docValuesCache.containsKey(readerOrd)) { - SortedSetDocValues sortedSetDocValues = (SortedSetDocValues) docValuesCache.get(readerOrd); - if (sortedSetDocValues.docID() < sortDoc.docId) { - // We have not advanced beyond the current docId so we can use this docValues. - vals = sortedSetDocValues; - } - } - + SortedSetDocValues vals = + docValuesCache.getSortedSetDocValues( + sortDoc.docId, readerContext.reader(), readerContext.ord); if (vals == null) { - vals = DocValues.getSortedSet(readerContext.reader(), this.field); - docValuesCache.put(readerOrd, vals); + return false; } - if (vals.advance(sortDoc.docId) != sortDoc.docId) return false; - final SortedSetDocValues docVals = vals; out.put( this.field, (IteratorWriter) w -> { - long o; - while ((o = docVals.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { - BytesRef ref = docVals.lookupOrd(o); + for (int i = 0, count = docVals.docValueCount(); i < count; i++) { + BytesRef ref = docVals.lookupOrd(docVals.nextOrd()); fieldType.indexedToReadable(ref, cref); IndexableField f = fieldType.createField(schemaField, cref.toString()); if (f == null) w.add(cref.toString()); diff --git a/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java index 7e15704f32d..2f8d0963e3a 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java @@ -17,9 +17,7 @@ package org.apache.solr.handler.export; -import com.carrotsearch.hppc.IntObjectHashMap; import java.io.IOException; -import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.util.BytesRef; @@ -28,13 +26,14 @@ import org.apache.solr.common.util.ByteArrayUtf8CharSequence; import org.apache.solr.common.util.JavaBinCodec; import org.apache.solr.schema.FieldType; +import org.apache.solr.search.DocValuesIteratorCache; class StringFieldWriter extends FieldWriter { - protected String field; - private FieldType fieldType; + protected final String field; + private final FieldType fieldType; private BytesRef lastRef; private int lastOrd = -1; - private IntObjectHashMap docValuesCache = new IntObjectHashMap<>(); + private final DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache; protected CharsRefBuilder cref = new CharsRefBuilder(); final ByteArrayUtf8CharSequence utf8 = @@ -50,9 +49,13 @@ public String toString() { } }; - public StringFieldWriter(String field, FieldType fieldType) { + public StringFieldWriter( + String field, + FieldType fieldType, + DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache) { this.field = field; this.fieldType = fieldType; + this.docValuesCache = docValuesCache; } @Override @@ -82,23 +85,10 @@ public boolean write( } if (ref == null) { - // Reuse the last DocValues object if possible - int readerOrd = readerContext.ord; - SortedDocValues vals = null; - if (docValuesCache.containsKey(readerOrd)) { - SortedDocValues sortedDocValues = docValuesCache.get(readerOrd); - if (sortedDocValues.docID() < sortDoc.docId) { - // We have not advanced beyond the current docId so we can use this docValues. - vals = sortedDocValues; - } - } - + SortedDocValues vals = + docValuesCache.getSortedDocValues( + sortDoc.docId, readerContext.reader(), readerContext.ord); if (vals == null) { - vals = DocValues.getSorted(readerContext.reader(), this.field); - docValuesCache.put(readerOrd, vals); - } - - if (vals.advance(sortDoc.docId) != sortDoc.docId) { return false; } diff --git a/solr/core/src/java/org/apache/solr/index/SlowCompositeReaderWrapper.java b/solr/core/src/java/org/apache/solr/index/SlowCompositeReaderWrapper.java index b066ba373fd..85084f5887b 100644 --- a/solr/core/src/java/org/apache/solr/index/SlowCompositeReaderWrapper.java +++ b/solr/core/src/java/org/apache/solr/index/SlowCompositeReaderWrapper.java @@ -50,7 +50,7 @@ import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.TermVectors; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.KnnCollector; import org.apache.lucene.util.Bits; import org.apache.lucene.util.Version; import org.apache.lucene.util.packed.PackedInts; @@ -368,14 +368,14 @@ public ByteVectorValues getByteVectorValues(String field) { } @Override - public TopDocs searchNearestVectors( - String field, float[] target, int k, Bits acceptDocs, int visitedLimit) throws IOException { + public void searchNearestVectors( + String field, float[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { throw new UnsupportedOperationException(); } @Override - public TopDocs searchNearestVectors( - String field, byte[] target, int k, Bits acceptDocs, int visitedLimit) throws IOException { + public void searchNearestVectors( + String field, byte[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { throw new UnsupportedOperationException(); } diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java index bd63e9e8a22..fa1034797c8 100644 --- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java +++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java @@ -897,7 +897,7 @@ public static String enforcePrefix(String name) { if (name.startsWith(REGISTRY_NAME_PREFIX)) { return name; } else { - return new StringBuilder(REGISTRY_NAME_PREFIX).append(name).toString(); + return REGISTRY_NAME_PREFIX + name; } } @@ -912,8 +912,7 @@ public static String enforcePrefix(String name) { */ public static String getRegistryName(SolrInfoBean.Group group, String... names) { String fullName; - String prefix = - new StringBuilder(REGISTRY_NAME_PREFIX).append(group.name()).append('.').toString(); + String prefix = REGISTRY_NAME_PREFIX + group.name() + '.'; // check for existing prefix and group if (names != null && names.length > 0 && names[0] != null && names[0].startsWith(prefix)) { // assume the first segment already was expanded diff --git a/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java b/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java index 337ce85974b..9711081f950 100644 --- a/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java +++ b/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java @@ -60,7 +60,6 @@ public void handleConflict( // if we aren't handling as a list, and we aren't handling as a map, then just overwrite (i.e. // nothing else to do) - return; } // merges srcMap onto targetMap (i.e. changes targetMap but not srcMap) diff --git a/solr/core/src/java/org/apache/solr/response/JacksonJsonWriter.java b/solr/core/src/java/org/apache/solr/response/JacksonJsonWriter.java index 7a1ff5276a8..4de5039ba1e 100644 --- a/solr/core/src/java/org/apache/solr/response/JacksonJsonWriter.java +++ b/solr/core/src/java/org/apache/solr/response/JacksonJsonWriter.java @@ -19,7 +19,6 @@ import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.PrettyPrinter; import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import java.io.IOException; import java.io.OutputStream; @@ -32,7 +31,7 @@ public class JacksonJsonWriter extends BinaryResponseWriter { protected final JsonFactory jsonfactory; - protected static final PrettyPrinter pretty = + protected static final DefaultPrettyPrinter pretty = new DefaultPrettyPrinter() .withoutSpacesInObjectEntries() .withArrayIndenter(DefaultPrettyPrinter.NopIndenter.instance); @@ -71,7 +70,7 @@ public WriterImpl( try { gen = j.createGenerator(out, JsonEncoding.UTF8); if (doIndent) { - gen.setPrettyPrinter(pretty); + gen.setPrettyPrinter(pretty.createInstance()); } } catch (IOException e) { throw new RuntimeException(e); diff --git a/solr/core/src/java/org/apache/solr/schema/DenseVectorField.java b/solr/core/src/java/org/apache/solr/schema/DenseVectorField.java index 5d2013cf204..feda6169ea1 100644 --- a/solr/core/src/java/org/apache/solr/schema/DenseVectorField.java +++ b/solr/core/src/java/org/apache/solr/schema/DenseVectorField.java @@ -25,12 +25,11 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.KnnByteVectorField; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.document.StoredField; -import org.apache.lucene.index.ByteVectorValues; -import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.VectorEncoding; import org.apache.lucene.index.VectorSimilarityFunction; @@ -194,24 +193,24 @@ public void checkSchemaField(final SchemaField field) throws SolrException { switch (vectorEncoding) { case FLOAT32: - if (dimension > FloatVectorValues.MAX_DIMENSIONS) { + if (dimension > KnnVectorsFormat.DEFAULT_MAX_DIMENSIONS) { if (log.isWarnEnabled()) { log.warn( "The vector dimension {} specified for field {} exceeds the current Lucene default max dimension of {}. It's un-tested territory, extra caution and benchmarks are recommended for production systems.", dimension, field.getName(), - FloatVectorValues.MAX_DIMENSIONS); + KnnVectorsFormat.DEFAULT_MAX_DIMENSIONS); } } break; case BYTE: - if (dimension > ByteVectorValues.MAX_DIMENSIONS) { + if (dimension > KnnVectorsFormat.DEFAULT_MAX_DIMENSIONS) { if (log.isWarnEnabled()) { log.warn( "The vector dimension {} specified for field {} exceeds the current Lucene default max dimension of {}. It's un-tested territory, extra caution and benchmarks are recommended for production systems.", dimension, field.getName(), - ByteVectorValues.MAX_DIMENSIONS); + KnnVectorsFormat.DEFAULT_MAX_DIMENSIONS); } } break; diff --git a/solr/core/src/java/org/apache/solr/schema/PointType.java b/solr/core/src/java/org/apache/solr/schema/PointType.java index 6a6b29b698b..3d68af9449c 100644 --- a/solr/core/src/java/org/apache/solr/schema/PointType.java +++ b/solr/core/src/java/org/apache/solr/schema/PointType.java @@ -123,8 +123,8 @@ public Type getUninversionType(SchemaField sf) { return null; } - @Override /** Care should be taken in calling this with higher order dimensions for performance reasons. */ + @Override protected Query getSpecializedRangeQuery( QParser parser, SchemaField field, diff --git a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java index 82316adf85f..d5dde182b84 100644 --- a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java @@ -712,7 +712,7 @@ public void collect(int contextDoc) throws IOException { } @Override - public void finish() throws IOException { + public void complete() throws IOException { if (contexts.length == 0) { return; } @@ -803,7 +803,7 @@ public void finish() throws IOException { } if (delegate instanceof DelegatingCollector) { - ((DelegatingCollector) delegate).finish(); + ((DelegatingCollector) delegate).complete(); } } } @@ -919,7 +919,7 @@ public void collect(int contextDoc) throws IOException { } @Override - public void finish() throws IOException { + public void complete() throws IOException { if (contexts.length == 0) { return; } @@ -993,7 +993,7 @@ public void finish() throws IOException { } if (delegate instanceof DelegatingCollector) { - ((DelegatingCollector) delegate).finish(); + ((DelegatingCollector) delegate).complete(); } } } @@ -1196,7 +1196,7 @@ public void collect(int contextDoc) throws IOException { } @Override - public void finish() throws IOException { + public void complete() throws IOException { if (contexts.length == 0) { return; } @@ -1278,7 +1278,7 @@ public void finish() throws IOException { } if (delegate instanceof DelegatingCollector) { - ((DelegatingCollector) delegate).finish(); + ((DelegatingCollector) delegate).complete(); } } } @@ -1443,7 +1443,7 @@ public void collect(int contextDoc) throws IOException { } @Override - public void finish() throws IOException { + public void complete() throws IOException { if (contexts.length == 0) { return; } @@ -1507,7 +1507,7 @@ public void finish() throws IOException { } if (delegate instanceof DelegatingCollector) { - ((DelegatingCollector) delegate).finish(); + ((DelegatingCollector) delegate).complete(); } } } @@ -1587,11 +1587,11 @@ protected boolean isBoostedAdvanceExact(final int contextDoc) { } @Override - public void finish() throws IOException { + public void complete() throws IOException { // Deal with last group (if any)... maybeDelegateCollect(); - super.finish(); + super.complete(); } /** @@ -3387,9 +3387,7 @@ public boolean collectInNullGroupIfBoosted(int globalDoc) { public void purgeGroupsThatHaveBoostedDocs( final FixedBitSet collapsedSet, final IntProcedure removeGroupKey, - final Runnable resetNullGroupHead) { - return; - } + final Runnable resetNullGroupHead) {} }; } diff --git a/solr/core/src/java/org/apache/solr/search/DelegatingCollector.java b/solr/core/src/java/org/apache/solr/search/DelegatingCollector.java index 685a5512b3c..af9b669f01f 100644 --- a/solr/core/src/java/org/apache/solr/search/DelegatingCollector.java +++ b/solr/core/src/java/org/apache/solr/search/DelegatingCollector.java @@ -80,9 +80,23 @@ protected void doSetNextReader(LeafReaderContext context) throws IOException { leafDelegate = delegate.getLeafCollector(context); } - public void finish() throws IOException { + /** + * From Solr 9.4 using Lucene 9.8 onwards DelegatingCollector.finish clashes with the + * super class's LeafCollector.finish method. Please relocate any finishing logic + * into the DelegatingCollector.complete replacement completion method. + */ + @Override + public final void finish() throws IOException { + if (leafDelegate != null) { + leafDelegate.finish(); + } + super.finish(); + } + + /** since 9.4 */ + public void complete() throws IOException { if (delegate instanceof DelegatingCollector) { - ((DelegatingCollector) delegate).finish(); + ((DelegatingCollector) delegate).complete(); } } } diff --git a/solr/core/src/java/org/apache/solr/search/DocValuesIteratorCache.java b/solr/core/src/java/org/apache/solr/search/DocValuesIteratorCache.java new file mode 100644 index 00000000000..eba8a731881 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/search/DocValuesIteratorCache.java @@ -0,0 +1,319 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.search; + +import java.io.IOException; +import java.util.Arrays; +import java.util.EnumMap; +import java.util.HashMap; +import java.util.function.Function; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.solr.schema.SchemaField; + +/** + * A helper class for random-order value access over docValues (such as in the case of + * useDocValuesAsStored). This class optimizes access by reusing DocValues iterators where possible, + * and by narrowing the scope of DocValues per-field/per-segment (shortcircuiting attempts to + * `advance()` to docs that are known to have no value for a given field). + */ +public class DocValuesIteratorCache { + + private static final EnumMap> + funcMap = new EnumMap<>(DocValuesType.class); + + static { + funcMap.put(DocValuesType.NUMERIC, LeafReader::getNumericDocValues); + funcMap.put(DocValuesType.BINARY, LeafReader::getBinaryDocValues); + funcMap.put( + DocValuesType.SORTED, + (r, f) -> { + SortedDocValues dvs = r.getSortedDocValues(f); + return dvs == null || dvs.getValueCount() < 1 ? null : dvs; + }); + funcMap.put(DocValuesType.SORTED_NUMERIC, LeafReader::getSortedNumericDocValues); + funcMap.put( + DocValuesType.SORTED_SET, + (r, f) -> { + SortedSetDocValues dvs = r.getSortedSetDocValues(f); + return dvs == null || dvs.getValueCount() < 1 ? null : dvs; + }); + } + + private static final FieldDocValuesSupplier NONE = new FieldDocValuesSupplier(null, null, 0); + + private final SolrIndexSearcher searcher; + private final int nLeaves; + private final Function getSupplier; + + /** + * Construct an instance used to optimize random-order DocValues iterator access for the specified + * searcher. + */ + public DocValuesIteratorCache(SolrIndexSearcher searcher) { + this(searcher, true); + } + + /** + * Construct an instance used to optimize random-order DocValues iterator access for the specified + * searcher. + * + * @param searcher the associated searcher + * @param cache if false, caching is disabled (useful mainly for single-field, single-doc access). + */ + public DocValuesIteratorCache(SolrIndexSearcher searcher, boolean cache) { + this.searcher = searcher; + this.nLeaves = searcher.getTopReaderContext().leaves().size(); + if (cache) { + HashMap map = new HashMap<>(); + getSupplier = (f) -> map.computeIfAbsent(f, this::newEntry); + } else { + getSupplier = this::newEntry; + } + } + + public FieldDocValuesSupplier getSupplier(String fieldName) { + FieldDocValuesSupplier ret = getSupplier.apply(fieldName); + return ret == NONE ? null : ret; + } + + private FieldDocValuesSupplier newEntry(String fieldName) { + final SchemaField schemaField = searcher.getSchema().getFieldOrNull(fieldName); + FieldInfo fi = searcher.getFieldInfos().fieldInfo(fieldName); + if (schemaField == null || !schemaField.hasDocValues() || fi == null) { + return NONE; // Searcher doesn't have info about this field, hence ignore it. + } + final DocValuesType dvType = fi.getDocValuesType(); + switch (dvType) { + case NUMERIC: + case BINARY: + case SORTED: + case SORTED_NUMERIC: + case SORTED_SET: + return new FieldDocValuesSupplier(schemaField, dvType, nLeaves); + default: + return NONE; + } + } + + private interface IOBiFunction { + R apply(T t, U u) throws IOException; + } + + /** + * Supplies (and coordinates arbitrary-order value retrieval over) docValues iterators for a + * particular field, encapsulating the logic of iterator creation, reuse/caching, and advancing. + * Returned iterators are already positioned, and should not be advanced (though + * multi-valued iterators may consume/iterate over values/ords). + * + *

Instances of this class are specifically designed to support arbitrary-order value + * retrieval, (e.g., useDocValuesAsStored, ExportWriter) and should generally not be used for + * ordered retrieval (although ordered retrieval would work perfectly fine, and would add only + * minimal overhead). + */ + public static class FieldDocValuesSupplier { + public final SchemaField schemaField; + public final DocValuesType type; + private final int[] minLocalIds; + private final int[] ceilingIds; + private final int[] noMatchSince; + private final DocIdSetIterator[] perLeaf; + + private FieldDocValuesSupplier(SchemaField schemaField, DocValuesType type, int nLeaves) { + this.schemaField = schemaField; + this.type = type; + this.minLocalIds = new int[nLeaves]; + Arrays.fill(minLocalIds, -1); + this.ceilingIds = new int[nLeaves]; + Arrays.fill(ceilingIds, DocIdSetIterator.NO_MORE_DOCS); + this.noMatchSince = new int[nLeaves]; + this.perLeaf = new DocIdSetIterator[nLeaves]; + } + + /** + * This method does the actual work caching iterators, determining eligibility for re-use, + * pulling new iterators if necessary, and determining if we have a hit for a particular doc id. + */ + private DocIdSetIterator getDocValues( + int localId, + LeafReader leafReader, + int leafOrd, + boolean singleValued, + IOBiFunction dvFunction) + throws IOException { + int min = minLocalIds[leafOrd]; + DocIdSetIterator dv; + if (min == -1) { + // we are not yet initialized for this field/leaf. + dv = dvFunction.apply(leafReader, schemaField.getName()); + if (dv == null) { + minLocalIds[leafOrd] = DocIdSetIterator.NO_MORE_DOCS; // cache absence of this field + return null; + } + // on field/leaf init, determine the min doc, so that we don't expend effort pulling + // new iterators for docs that fall below this floor. + min = dv.nextDoc(); + minLocalIds[leafOrd] = min; + perLeaf[leafOrd] = dv; + if (localId < min) { + noMatchSince[leafOrd] = 0; // implicit in initial `nextDoc()` call + return null; + } else if (localId == min) { + noMatchSince[leafOrd] = DocIdSetIterator.NO_MORE_DOCS; + return dv; + } + } else if (localId < min || localId >= ceilingIds[leafOrd]) { + // out of range: either too low or too high + return null; + } else { + dv = perLeaf[leafOrd]; + int currentDoc = dv.docID(); + if (localId == currentDoc) { + if (singleValued) { + return dv; + } else if (noMatchSince[leafOrd] != DocIdSetIterator.NO_MORE_DOCS) { + // `noMatchSince[leafOrd] != DocIdSetIterator.NO_MORE_DOCS` means that `dv` has not + // been returned at its current position, and has therefore not been consumed and + // is thus eligible to be returned directly. (singleValued dv iterators are always + // eligible to be returned directly, as they have no concept of being "consumed") + + // NOTE: we must reset `noMatchSince[leafOrd]` here in order to prevent returning + // consumed docValues; even though this actually loses us possible skipping information, + // it's an edge case, and allows us to use `noMatchSince[leafOrd]` as a signal of + // whether we have consumed multivalued docValues. + noMatchSince[leafOrd] = DocIdSetIterator.NO_MORE_DOCS; + return dv; + } + } + if (localId <= currentDoc) { + if (localId >= noMatchSince[leafOrd]) { + // if the requested doc falls between the last requested doc and the current + // position, then we know there's no match. + return null; + } + // we must re-init the iterator + dv = dvFunction.apply(leafReader, schemaField.getName()); + perLeaf[leafOrd] = dv; + } + } + // NOTE: use `advance()`, not `advanceExact()`. There's no cost (in terms of re-use) to + // doing so, because we track `noMatchSince` in the event of a miss. + int found = dv.advance(localId); + if (found == localId) { + noMatchSince[leafOrd] = DocIdSetIterator.NO_MORE_DOCS; + return dv; + } else { + if (found == DocIdSetIterator.NO_MORE_DOCS) { + ceilingIds[leafOrd] = Math.min(localId, ceilingIds[leafOrd]); + } + noMatchSince[leafOrd] = localId; + return null; + } + } + + /** + * Returns docValues for the specified doc id in the specified reader, if the specified doc + * holds docValues for this {@link FieldDocValuesSupplier} instance, otherwise returns null. + * + *

If a non-null value is returned, it will already positioned at the specified docId. + * + * @param localId leaf-scoped docId + * @param leafReader reader containing docId + * @param leafOrd top-level ord of the specified reader + */ + public NumericDocValues getNumericDocValues(int localId, LeafReader leafReader, int leafOrd) + throws IOException { + return (NumericDocValues) + getDocValues(localId, leafReader, leafOrd, true, funcMap.get(DocValuesType.NUMERIC)); + } + + /** + * Returns docValues for the specified doc id in the specified reader, if the specified doc + * holds docValues for this {@link FieldDocValuesSupplier} instance, otherwise returns null. + * + *

If a non-null value is returned, it will already positioned at the specified docId. + * + * @param localId leaf-scoped docId + * @param leafReader reader containing docId + * @param leafOrd top-level ord of the specified reader + */ + public BinaryDocValues getBinaryDocValues(int localId, LeafReader leafReader, int leafOrd) + throws IOException { + return (BinaryDocValues) + getDocValues(localId, leafReader, leafOrd, true, funcMap.get(DocValuesType.BINARY)); + } + + /** + * Returns docValues for the specified doc id in the specified reader, if the specified doc + * holds docValues for this {@link FieldDocValuesSupplier} instance, otherwise returns null. + * + *

If a non-null value is returned, it will already positioned at the specified docId. + * + * @param localId leaf-scoped docId + * @param leafReader reader containing docId + * @param leafOrd top-level ord of the specified reader + */ + public SortedDocValues getSortedDocValues(int localId, LeafReader leafReader, int leafOrd) + throws IOException { + return (SortedDocValues) + getDocValues(localId, leafReader, leafOrd, true, funcMap.get(DocValuesType.SORTED)); + } + + /** + * Returns docValues for the specified doc id in the specified reader, if the specified doc + * holds docValues for this {@link FieldDocValuesSupplier} instance, otherwise returns null. + * + *

If a non-null value is returned, it will already positioned at the specified docId, and + * with values ({@link SortedNumericDocValues#nextValue()}) not yet consumed. + * + * @param localId leaf-scoped docId + * @param leafReader reader containing docId + * @param leafOrd top-level ord of the specified reader + */ + public SortedNumericDocValues getSortedNumericDocValues( + int localId, LeafReader leafReader, int leafOrd) throws IOException { + return (SortedNumericDocValues) + getDocValues( + localId, leafReader, leafOrd, false, funcMap.get(DocValuesType.SORTED_NUMERIC)); + } + + /** + * Returns docValues for the specified doc id in the specified reader, if the specified doc + * holds docValues for this {@link FieldDocValuesSupplier} instance, otherwise returns null. + * + *

If a non-null value is returned, it will already positioned at the specified docId, and + * with ords ({@link SortedSetDocValues#nextOrd()}) not yet consumed. + * + * @param localId leaf-scoped docId + * @param leafReader reader containing docId + * @param leafOrd top-level ord of the specified reader + */ + public SortedSetDocValues getSortedSetDocValues(int localId, LeafReader leafReader, int leafOrd) + throws IOException { + return (SortedSetDocValues) + getDocValues(localId, leafReader, leafOrd, false, funcMap.get(DocValuesType.SORTED_SET)); + } + } +} diff --git a/solr/core/src/java/org/apache/solr/search/Grouping.java b/solr/core/src/java/org/apache/solr/search/Grouping.java index dcc88463696..f838361d407 100644 --- a/solr/core/src/java/org/apache/solr/search/Grouping.java +++ b/solr/core/src/java/org/apache/solr/search/Grouping.java @@ -368,7 +368,7 @@ public void execute() throws IOException { searchWithTimeLimiter(filterQuery, allCollectors); if (allCollectors instanceof DelegatingCollector) { - ((DelegatingCollector) allCollectors).finish(); + ((DelegatingCollector) allCollectors).complete(); } } @@ -409,7 +409,7 @@ public void execute() throws IOException { searchWithTimeLimiter(filterQuery, secondPhaseCollectors); } if (secondPhaseCollectors instanceof DelegatingCollector) { - ((DelegatingCollector) secondPhaseCollectors).finish(); + ((DelegatingCollector) secondPhaseCollectors).complete(); } } } diff --git a/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java index 74b31fe04cd..011e22c0f6c 100644 --- a/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java @@ -146,7 +146,7 @@ public void collect(int doc) throws IOException { } @Override - public void finish() throws IOException { + public void complete() throws IOException { NamedList analytics = new NamedList<>(); NamedList topFreq = new NamedList<>(); NamedList allFreq = new NamedList<>(); @@ -204,7 +204,7 @@ public void finish() throws IOException { } if (this.delegate instanceof DelegatingCollector) { - ((DelegatingCollector) this.delegate).finish(); + ((DelegatingCollector) this.delegate).complete(); } } diff --git a/solr/core/src/java/org/apache/solr/search/SignificantTermsQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/SignificantTermsQParserPlugin.java index 2488df85e5b..4e6c09f38f6 100644 --- a/solr/core/src/java/org/apache/solr/search/SignificantTermsQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/SignificantTermsQParserPlugin.java @@ -109,7 +109,7 @@ private NoOpTermsCollector(ResponseBuilder rb) { public void collect(int doc) throws IOException {} @Override - public void finish() throws IOException { + public void complete() throws IOException { List outTerms = new ArrayList<>(); List outFreq = new ArrayList<>(); List outQueryFreq = new ArrayList<>(); @@ -174,7 +174,7 @@ public void collect(int doc) throws IOException { } @Override - public void finish() throws IOException { + public void complete() throws IOException { List outTerms = new ArrayList<>(); List outFreq = new ArrayList<>(); List outQueryFreq = new ArrayList<>(); @@ -270,7 +270,7 @@ public void finish() throws IOException { } if (this.delegate instanceof DelegatingCollector) { - ((DelegatingCollector) this.delegate).finish(); + ((DelegatingCollector) this.delegate).complete(); } } } diff --git a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java index a975337f869..e6ccb9edd18 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java @@ -84,6 +84,8 @@ public class SolrDocumentFetcher { private final SolrIndexSearcher searcher; + private final int nLeaves; + private final boolean enableLazyFieldLoading; private final SolrCache documentCache; @@ -119,6 +121,7 @@ public class SolrDocumentFetcher { @SuppressWarnings({"unchecked"}) SolrDocumentFetcher(SolrIndexSearcher searcher, SolrConfig solrConfig, boolean cachingEnabled) { this.searcher = searcher; + this.nLeaves = searcher.getTopReaderContext().leaves().size(); this.enableLazyFieldLoading = solrConfig.enableLazyFieldLoading; if (cachingEnabled) { documentCache = @@ -561,16 +564,23 @@ public InvertableType invertableType() { * @param fields The fields with docValues to populate the document with. DocValues fields which * do not exist or not decodable will be ignored. */ - public void decorateDocValueFields(SolrDocumentBase doc, int docid, Set fields) + public void decorateDocValueFields( + SolrDocumentBase doc, + int docid, + Set fields, + DocValuesIteratorCache reuseDvIters) throws IOException { final List leafContexts = searcher.getLeafContexts(); final int subIndex = ReaderUtil.subIndex(docid, leafContexts); final int localId = docid - leafContexts.get(subIndex).docBase; final LeafReader leafReader = leafContexts.get(subIndex).reader(); for (String fieldName : fields) { - Object fieldValue = decodeDVField(localId, leafReader, fieldName); - if (fieldValue != null) { - doc.setField(fieldName, fieldValue); + DocValuesIteratorCache.FieldDocValuesSupplier e = reuseDvIters.getSupplier(fieldName); + if (e != null) { + Object fieldValue = decodeDVField(localId, leafReader, subIndex, e); + if (fieldValue != null) { + doc.setField(fieldName, fieldValue); + } } } } @@ -580,59 +590,56 @@ public void decorateDocValueFields(SolrDocumentBase doc, int docid, Set outValues = new ArrayList<>(docValueCount); for (int i = 0; i < docValueCount; i++) { long number = numericDv.nextValue(); - Object value = decodeNumberFromDV(schemaField, number, true); + Object value = decodeNumberFromDV(e.schemaField, number, true); // return immediately if the number is not decodable, hence won't return an empty list. if (value == null) { return null; } // normally never true but LatLonPointSpatialField uses SORTED_NUMERIC even when single // valued - else if (schemaField.multiValued() == false) { + else if (e.schemaField.multiValued() == false) { return value; } else { outValues.add(value); @@ -643,21 +650,21 @@ else if (schemaField.multiValued() == false) { } return null; case SORTED_SET: - final SortedSetDocValues values = leafReader.getSortedSetDocValues(fieldName); - if (values != null && values.getValueCount() > 0 && values.advance(localId) == localId) { + final SortedSetDocValues values = e.getSortedSetDocValues(localId, leafReader, readerOrd); + if (values != null) { final List outValues = new ArrayList<>(); for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { BytesRef value = values.lookupOrd(ord); - outValues.add(schemaField.getType().toObject(schemaField, value)); + outValues.add(e.schemaField.getType().toObject(e.schemaField, value)); } assert outValues.size() > 0; return outValues; } return null; default: - return null; + throw new IllegalStateException(); } } @@ -751,6 +758,8 @@ class RetrieveFieldsOptimizer { private final SolrReturnFields solrReturnFields; + private final DocValuesIteratorCache reuseDvIters; + RetrieveFieldsOptimizer(SolrReturnFields solrReturnFields) { this.storedFields = calcStoredFieldsForReturn(solrReturnFields); this.dvFields = calcDocValueFieldsForReturn(solrReturnFields); @@ -760,6 +769,7 @@ class RetrieveFieldsOptimizer { dvFields.addAll(storedFields); storedFields.clear(); } + reuseDvIters = dvFields.isEmpty() ? null : new DocValuesIteratorCache(searcher); } /** @@ -881,7 +891,7 @@ private SolrDocument getSolrDoc(int luceneDocId) { // decorate the document with non-stored docValues fields if (returnDVFields()) { - decorateDocValueFields(sdoc, luceneDocId, getDvFields()); + decorateDocValueFields(sdoc, luceneDocId, getDvFields(), reuseDvIters); } } catch (IOException e) { throw new SolrException( diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index 79bc89eec9f..65e668490c7 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -280,7 +280,7 @@ private Collector buildAndRunCollectorChain( qr.setPartialResults(true); } catch (EarlyTerminatingCollectorException etce) { if (collector instanceof DelegatingCollector) { - ((DelegatingCollector) collector).finish(); + ((DelegatingCollector) collector).complete(); } throw etce; } finally { @@ -293,7 +293,7 @@ private Collector buildAndRunCollectorChain( } } if (collector instanceof DelegatingCollector) { - ((DelegatingCollector) collector).finish(); + ((DelegatingCollector) collector).complete(); } return collector; @@ -1133,7 +1133,7 @@ public DocSet getDocSet(List queries) throws IOException { search(query, collector); if (collector instanceof DelegatingCollector) { - ((DelegatingCollector) collector).finish(); + ((DelegatingCollector) collector).complete(); } return DocSetUtil.getDocSet(setCollector, this); diff --git a/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java index b72fade0065..0f8142da399 100644 --- a/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java @@ -168,7 +168,7 @@ public void collect(int doc) throws IOException { @Override @SuppressWarnings({"unchecked"}) - public void finish() throws IOException { + public void complete() throws IOException { Map docVectors = new HashMap<>(); Terms terms = @@ -230,7 +230,7 @@ public void finish() throws IOException { analytics.add("feature", trainingParams.feature); analytics.add("positiveLabel", trainingParams.positiveLabel); if (this.delegate instanceof DelegatingCollector) { - ((DelegatingCollector) this.delegate).finish(); + ((DelegatingCollector) this.delegate).complete(); } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java index fbf5b18858e..7799db960c2 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java @@ -104,7 +104,7 @@ public Object getMergedResult() { List> resultBuckets = new ArrayList<>(Math.max(0, (int) (last - first))); - /** + /* * this only works if there are no filters (like mincount) for (int i=first; i getRefinementSpecial( refinement = getRefinementSpecial(mcontext, refinement, tagsWithPartial, missingBucket, "missing"); } - /** + /* * allBuckets does not execute sub-facets because we don't change the domain. We may need * refinement info in the future though for stats. if (freq.allBuckets) { refinement = * getRefinementSpecial(mcontext, refinement, tagsWithPartial, allBuckets, "allBuckets"); } diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java index 366922e008a..cb7d5492a91 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java @@ -215,7 +215,7 @@ private void collectPerSeg(SortedDocValues singleDv, SweepDISI disi, LongValues int segMax = singleDv.getValueCount(); final SegCountPerSeg segCounter = getSegCountPerSeg(disi, segMax); - /** + /* * alternate trial implementations // ord // FieldUtil.visitOrds(singleDv, disi, * (doc,ord)->{counts[ord+1]++;} ); * diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java index 63a28aa0829..e5176c6e2af 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java @@ -108,7 +108,7 @@ public void sortBuckets(final FacetRequest.FacetSort sort) { } else { final String key = sort.sortVariable; - /** + /* * final FacetSortableMerger[] arr = new FacetSortableMerger[buckets.size()]; final int[] * index = new int[arr.length]; int start = 0; int nullStart = index.length; int i=0; for * (FacetBucket bucket : buckets.values()) { FacetMerger merger = diff --git a/solr/core/src/java/org/apache/solr/security/SolrLogAuditLoggerPlugin.java b/solr/core/src/java/org/apache/solr/security/SolrLogAuditLoggerPlugin.java index ed609668d87..f5a4ec5a099 100644 --- a/solr/core/src/java/org/apache/solr/security/SolrLogAuditLoggerPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/SolrLogAuditLoggerPlugin.java @@ -42,34 +42,32 @@ public void init(Map pluginConfig) { super.init(pluginConfig); setFormatter( event -> - new StringBuilder() - .append("type=\"") - .append(event.getEventType().name()) - .append("\"") - .append(" message=\"") - .append(event.getMessage()) - .append("\"") - .append(" method=\"") - .append(event.getHttpMethod()) - .append("\"") - .append(" status=\"") - .append(event.getStatus()) - .append("\"") - .append(" requestType=\"") - .append(event.getRequestType()) - .append("\"") - .append(" username=\"") - .append(event.getUsername()) - .append("\"") - .append(" resource=\"") - .append(event.getResource()) - .append("\"") - .append(" queryString=\"") - .append(event.getHttpQueryString()) - .append("\"") - .append(" collections=") - .append(event.getCollections()) - .toString()); + "type=\"" + + event.getEventType().name() + + "\"" + + " message=\"" + + event.getMessage() + + "\"" + + " method=\"" + + event.getHttpMethod() + + "\"" + + " status=\"" + + event.getStatus() + + "\"" + + " requestType=\"" + + event.getRequestType() + + "\"" + + " username=\"" + + event.getUsername() + + "\"" + + " resource=\"" + + event.getResource() + + "\"" + + " queryString=\"" + + event.getHttpQueryString() + + "\"" + + " collections=" + + event.getCollections()); if (pluginConfig.size() > 0) { throw new SolrException( SolrException.ErrorCode.INVALID_STATE, diff --git a/solr/core/src/java/org/apache/solr/servlet/CoordinatorHttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/CoordinatorHttpSolrCall.java index 82664b23623..aa4a0e2fd75 100644 --- a/solr/core/src/java/org/apache/solr/servlet/CoordinatorHttpSolrCall.java +++ b/solr/core/src/java/org/apache/solr/servlet/CoordinatorHttpSolrCall.java @@ -47,6 +47,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * A coordinator node can serve requests as if it hosts all collections in the cluster. it does so + * by hosting a synthetic replica for each configset used in the cluster. + * + *

This class is responsible for forwarding the requests to the right core when the node is + * acting as a Coordinator The responsibilities also involve creating a synthetic collection or + * replica if they do not exist. It also sets the right threadlocal variables which reflects the + * current collection being served. + */ public class CoordinatorHttpSolrCall extends HttpSolrCall { public static final String SYNTHETIC_COLL_PREFIX = Assign.SYSTEM_COLL_PREFIX + "COORDINATOR-COLL-"; @@ -277,6 +286,11 @@ protected void init() throws Exception { } } + @Override + protected String getCoreOrColName() { + return collectionName; + } + public static SolrQueryRequest wrappedReq( SolrQueryRequest delegate, String collectionName, HttpSolrCall httpSolrCall) { Properties p = new Properties(); diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java index 9137f3ded4e..ce9b1f8e6be 100644 --- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java +++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java @@ -637,10 +637,7 @@ public Span getSpan() { // called after init(). protected void populateTracingSpan(Span span) { // Set db.instance - String coreOrColName = HttpSolrCall.this.origCorename; - if (coreOrColName == null && getCore() != null) { - coreOrColName = getCore().getName(); - } + String coreOrColName = getCoreOrColName(); TraceUtils.setDbInstance(span, coreOrColName); // Set operation name. @@ -658,6 +655,14 @@ protected void populateTracingSpan(Span span) { span.updateName(verb + ":" + path); } + protected String getCoreOrColName() { + String coreOrColName = HttpSolrCall.this.origCorename; + if (coreOrColName == null && getCore() != null) { + coreOrColName = getCore().getName(); + } + return coreOrColName; + } + public boolean shouldAudit() { return shouldAudit(cores); } diff --git a/solr/core/src/java/org/apache/solr/servlet/cache/HttpCacheHeaderUtil.java b/solr/core/src/java/org/apache/solr/servlet/cache/HttpCacheHeaderUtil.java index 078f57e8908..f661a7d156e 100644 --- a/solr/core/src/java/org/apache/solr/servlet/cache/HttpCacheHeaderUtil.java +++ b/solr/core/src/java/org/apache/solr/servlet/cache/HttpCacheHeaderUtil.java @@ -180,8 +180,6 @@ public static void setCacheControlHeader( if (null != maxAge) { resp.setDateHeader("Expires", timeNowForHeader() + (maxAge * 1000L)); } - - return; } /** diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java index 8f5700d7c86..7636bf54452 100644 --- a/solr/core/src/java/org/apache/solr/update/PeerSync.java +++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java @@ -767,8 +767,8 @@ MissedUpdatesRequest handleVersionsWithRanges(List otherVersions, boolean return handleVersionsWithRanges(otherVersions, completeList, ourUpdates, ourLowThreshold); } - @VisibleForTesting /** Implementation assumes the passed in lists are sorted and contain no duplicates. */ + @VisibleForTesting static MissedUpdatesRequest handleVersionsWithRanges( List otherVersions, boolean completeList, diff --git a/solr/core/src/java/org/apache/solr/util/circuitbreaker/AveragingMetricProvider.java b/solr/core/src/java/org/apache/solr/util/circuitbreaker/AveragingMetricProvider.java new file mode 100644 index 00000000000..60161e98181 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/util/circuitbreaker/AveragingMetricProvider.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.util.circuitbreaker; + +import com.google.common.util.concurrent.AtomicDouble; +import java.io.Closeable; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import org.apache.solr.common.util.ExecutorUtil; +import org.apache.solr.common.util.SolrNamedThreadFactory; +import org.apache.solr.logging.CircularList; + +/** Averages the metric value over a period of time */ +public class AveragingMetricProvider implements Closeable { + private final CircularList samplesRingBuffer; + private ScheduledExecutorService executor; + private final AtomicDouble currentAverageValue = new AtomicDouble(-1); + + /** + * Creates an instance with an executor that runs every sampleInterval seconds and averages over + * numSamples samples. + * + * @param metricProvider metric provider that will provide a value + * @param numSamples number of samples to calculate average for + * @param sampleInterval interval between each sample + */ + public AveragingMetricProvider( + MetricProvider metricProvider, int numSamples, long sampleInterval) { + this.samplesRingBuffer = new CircularList<>(numSamples); + executor = + Executors.newSingleThreadScheduledExecutor( + new SolrNamedThreadFactory( + "AveragingMetricProvider-" + metricProvider.getClass().getSimpleName())); + executor.scheduleWithFixedDelay( + () -> { + samplesRingBuffer.add(metricProvider.getMetricValue()); + currentAverageValue.set( + samplesRingBuffer.toList().stream() + .mapToDouble(Double::doubleValue) + .average() + .orElse(-1)); + }, + 0, + sampleInterval, + TimeUnit.SECONDS); + } + + /** + * Return current average. This is a cached value, so calling this method will not incur any + * calculations + */ + public double getMetricValue() { + return currentAverageValue.get(); + } + + @Override + public void close() { + ExecutorUtil.shutdownAndAwaitTermination(executor); + } + + /** Interface to provide the metric value. */ + public interface MetricProvider { + double getMetricValue(); + } +} diff --git a/solr/core/src/java/org/apache/solr/util/circuitbreaker/CPUCircuitBreaker.java b/solr/core/src/java/org/apache/solr/util/circuitbreaker/CPUCircuitBreaker.java index 90c86499b3c..4c1ac111c58 100644 --- a/solr/core/src/java/org/apache/solr/util/circuitbreaker/CPUCircuitBreaker.java +++ b/solr/core/src/java/org/apache/solr/util/circuitbreaker/CPUCircuitBreaker.java @@ -17,56 +17,63 @@ package org.apache.solr.util.circuitbreaker; +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Metric; import java.lang.invoke.MethodHandles; -import java.lang.management.ManagementFactory; -import java.lang.management.OperatingSystemMXBean; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.core.SolrCore; +import org.apache.solr.metrics.SolrMetricManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Tracks current CPU usage and triggers if the specified threshold is breached. * - *

This circuit breaker gets the average CPU load over the last minute and uses that data to take - * a decision. We depend on OperatingSystemMXBean which does not allow a configurable interval of - * collection of data. //TODO: Use Codahale Meter to calculate the value locally. - * - *

The configuration to define which mode to use and the trigger threshold are defined in - * solrconfig.xml + *

This circuit breaker gets the recent average CPU usage and uses that data to take a decision. + * We depend on OperatingSystemMXBean which does not allow a configurable interval of collection of + * data. */ public class CPUCircuitBreaker extends CircuitBreaker { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static final OperatingSystemMXBean operatingSystemMXBean = - ManagementFactory.getOperatingSystemMXBean(); + private boolean enabled = true; private double cpuUsageThreshold; + private final SolrCore core; private static final ThreadLocal seenCPUUsage = ThreadLocal.withInitial(() -> 0.0); private static final ThreadLocal allowedCPUUsage = ThreadLocal.withInitial(() -> 0.0); - public CPUCircuitBreaker() { + public CPUCircuitBreaker(SolrCore core) { super(); - } - - public void setThreshold(double threshold) { - this.cpuUsageThreshold = threshold; + this.core = core; } @Override - public boolean isTripped() { - - double localAllowedCPUUsage = getCpuUsageThreshold(); + public void init(NamedList args) { + super.init(args); double localSeenCPUUsage = calculateLiveCPUUsage(); if (localSeenCPUUsage < 0) { - if (log.isWarnEnabled()) { - String msg = "Unable to get CPU usage"; - - log.warn(msg); + String msg = + "Initialization failure for CPU circuit breaker. Unable to get 'systemCpuLoad', not supported by the JVM?"; + if (log.isErrorEnabled()) { + log.error(msg); } + enabled = false; + } + } + @Override + public boolean isTripped() { + if (!enabled) { + if (log.isDebugEnabled()) { + log.debug("CPU circuit breaker is disabled due to initialization failure."); + } return false; } + double localAllowedCPUUsage = getCpuUsageThreshold(); + double localSeenCPUUsage = calculateLiveCPUUsage(); allowedCPUUsage.set(localAllowedCPUUsage); @@ -84,11 +91,50 @@ public String getErrorMessage() { + allowedCPUUsage.get(); } + public void setThreshold(double thresholdValueInPercentage) { + if (thresholdValueInPercentage > 100) { + throw new IllegalArgumentException("Invalid Invalid threshold value."); + } + + if (thresholdValueInPercentage <= 0) { + throw new IllegalStateException("Threshold cannot be less than or equal to zero"); + } + cpuUsageThreshold = thresholdValueInPercentage; + } + public double getCpuUsageThreshold() { return cpuUsageThreshold; } + /** + * Calculate the CPU usage for the system in percentage. + * + * @return Percent CPU usage of -1 if value could not be obtained. + */ protected double calculateLiveCPUUsage() { - return operatingSystemMXBean.getSystemLoadAverage(); + // TODO: Use Codahale Meter to calculate the value + Metric metric = + this.core + .getCoreContainer() + .getMetricManager() + .registry("solr.jvm") + .getMetrics() + .get("os.systemCpuLoad"); + + if (metric == null) { + return -1.0; + } + + if (metric instanceof Gauge) { + @SuppressWarnings({"rawtypes"}) + Gauge gauge = (Gauge) metric; + // unwrap if needed + if (gauge instanceof SolrMetricManager.GaugeWrapper) { + gauge = ((SolrMetricManager.GaugeWrapper) gauge).getGauge(); + } + return (Double) gauge.getValue() * 100; + } + + return -1.0; // Unable to unpack metric } } diff --git a/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreaker.java b/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreaker.java index 4b9d24bb694..78841cceaf7 100644 --- a/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreaker.java +++ b/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreaker.java @@ -17,6 +17,8 @@ package org.apache.solr.util.circuitbreaker; +import java.io.Closeable; +import java.io.IOException; import java.util.List; import java.util.Locale; import java.util.Set; @@ -41,7 +43,7 @@ * * @lucene.experimental */ -public abstract class CircuitBreaker implements NamedListInitializedPlugin { +public abstract class CircuitBreaker implements NamedListInitializedPlugin, Closeable { // Only query requests are checked by default private Set requestTypes = Set.of(SolrRequestType.QUERY); private final List SUPPORTED_TYPES = @@ -60,6 +62,11 @@ public CircuitBreaker() {} /** Get error message when the circuit breaker triggers */ public abstract String getErrorMessage(); + @Override + public void close() throws IOException { + // Nothing to do by default + } + /** * Set the request types for which this circuit breaker should be checked. If not called, the * circuit breaker will be checked for the {@link SolrRequestType#QUERY} request type only. diff --git a/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreakerManager.java b/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreakerManager.java index 5b39217b33b..3ca0c760a86 100644 --- a/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreakerManager.java +++ b/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreakerManager.java @@ -17,14 +17,15 @@ package org.apache.solr.util.circuitbreaker; +import java.io.IOException; import java.lang.invoke.MethodHandles; import org.apache.solr.common.util.NamedList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Single CircuitBreaker that registers both a Memory and a CPU CircuitBreaker. This is only for - * backward compatibility with the 9.x versions prior to 9.4. + * Single CircuitBreaker that registers both a Memory and a LoadAverage CircuitBreaker. This is only + * for backward compatibility with the 9.x versions prior to 9.4. * * @deprecated Use individual Circuit Breakers instead */ @@ -36,7 +37,7 @@ public class CircuitBreakerManager extends CircuitBreaker { private int memThreshold = 100; private int cpuThreshold = 100; private MemoryCircuitBreaker memCB; - private CPUCircuitBreaker cpuCB; + private LoadAverageCircuitBreaker cpuCB; public CircuitBreakerManager() { super(); @@ -71,11 +72,25 @@ public void init(NamedList args) { memCB.setThreshold(memThreshold); } if (cpuEnabled) { - cpuCB = new CPUCircuitBreaker(); + // In SOLR-15056 CPUCircuitBreaker was renamed to LoadAverageCircuitBreaker, need back-compat + cpuCB = new LoadAverageCircuitBreaker(); cpuCB.setThreshold(cpuThreshold); } } + @Override + public void close() throws IOException { + try { + if (memEnabled) { + memCB.close(); + } + } finally { + if (cpuEnabled) { + cpuCB.close(); + } + } + } + // The methods below will be called by super class during init public void setMemEnabled(String enabled) { this.memEnabled = Boolean.getBoolean(enabled); diff --git a/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreakerRegistry.java b/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreakerRegistry.java index 84c2f61fb9b..a7081df96f6 100644 --- a/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreakerRegistry.java +++ b/solr/core/src/java/org/apache/solr/util/circuitbreaker/CircuitBreakerRegistry.java @@ -18,12 +18,19 @@ package org.apache.solr.util.circuitbreaker; import com.google.common.annotations.VisibleForTesting; +import java.io.Closeable; +import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.solr.client.solrj.SolrRequest.SolrRequestType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Keeps track of all registered circuit breaker instances for various request types. Responsible @@ -32,26 +39,35 @@ * @lucene.experimental * @since 9.4 */ -public class CircuitBreakerRegistry { +public class CircuitBreakerRegistry implements Closeable { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final Map> circuitBreakerMap = new HashMap<>(); public CircuitBreakerRegistry() {} public void register(CircuitBreaker circuitBreaker) { - circuitBreaker - .getRequestTypes() - .forEach( - r -> { - List list = - circuitBreakerMap.computeIfAbsent(r, k -> new ArrayList<>()); - list.add(circuitBreaker); - }); + synchronized (circuitBreakerMap) { + circuitBreaker + .getRequestTypes() + .forEach( + r -> { + List list = + circuitBreakerMap.computeIfAbsent(r, k -> new ArrayList<>()); + list.add(circuitBreaker); + if (log.isInfoEnabled()) { + log.info( + "Registered circuit breaker {} for request type(s) {}", + circuitBreaker.getClass().getSimpleName(), + r); + } + }); + } } @VisibleForTesting - public void deregisterAll() { - circuitBreakerMap.clear(); + public void deregisterAll() throws IOException { + this.close(); } /** @@ -97,4 +113,41 @@ public static String toErrorMessage(List circuitBreakerList) { public boolean isEnabled(SolrRequestType requestType) { return circuitBreakerMap.containsKey(requestType); } + + @Override + public void close() throws IOException { + synchronized (circuitBreakerMap) { + final AtomicInteger closeFailedCounter = new AtomicInteger(0); + circuitBreakerMap + .values() + .forEach( + list -> + list.forEach( + it -> { + try { + if (log.isDebugEnabled()) { + log.debug( + "Closed circuit breaker {} for request type(s) {}", + it.getClass().getSimpleName(), + it.getRequestTypes()); + } + it.close(); + } catch (IOException e) { + if (log.isErrorEnabled()) { + log.error( + String.format( + Locale.ROOT, + "Failed to close circuit breaker %s", + it.getClass().getSimpleName()), + e); + } + closeFailedCounter.incrementAndGet(); + } + })); + circuitBreakerMap.clear(); + if (closeFailedCounter.get() > 0) { + throw new IOException("Failed to close " + closeFailedCounter.get() + " circuit breakers"); + } + } + } } diff --git a/solr/core/src/java/org/apache/solr/util/circuitbreaker/LoadAverageCircuitBreaker.java b/solr/core/src/java/org/apache/solr/util/circuitbreaker/LoadAverageCircuitBreaker.java new file mode 100644 index 00000000000..77772b927b3 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/util/circuitbreaker/LoadAverageCircuitBreaker.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.util.circuitbreaker; + +import java.lang.invoke.MethodHandles; +import java.lang.management.ManagementFactory; +import java.lang.management.OperatingSystemMXBean; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Tracks current system load average and triggers if the specified threshold is breached. + * + *

This circuit breaker gets the load average (length of the run queue) over the last minute and + * uses that data to take a decision. We depend on OperatingSystemMXBean which does not allow a + * configurable interval of collection of data. + */ +public class LoadAverageCircuitBreaker extends CircuitBreaker { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final OperatingSystemMXBean operatingSystemMXBean = + ManagementFactory.getOperatingSystemMXBean(); + + private double loadAverageThreshold; + + // Assumption -- the value of these parameters will be set correctly before invoking + // getDebugInfo() + private static final ThreadLocal seenLoadAverage = ThreadLocal.withInitial(() -> 0.0); + + private static final ThreadLocal allowedLoadAverage = ThreadLocal.withInitial(() -> 0.0); + + public LoadAverageCircuitBreaker() { + super(); + } + + @Override + public boolean isTripped() { + double localAllowedLoadAverage = getLoadAverageThreshold(); + double localSeenLoadAverage = calculateLiveLoadAverage(); + + if (localSeenLoadAverage < 0) { + if (log.isWarnEnabled()) { + String msg = "Unable to get load average"; + + log.warn(msg); + } + + return false; + } + + allowedLoadAverage.set(localAllowedLoadAverage); + + seenLoadAverage.set(localSeenLoadAverage); + + return (localSeenLoadAverage >= localAllowedLoadAverage); + } + + @Override + public String getErrorMessage() { + return "Load Average Circuit Breaker triggered as seen load average is above allowed threshold." + + "Seen load average " + + seenLoadAverage.get() + + " and allocated threshold " + + allowedLoadAverage.get(); + } + + public void setThreshold(double thresholdValueUnbounded) { + if (thresholdValueUnbounded <= 0) { + throw new IllegalStateException("Threshold cannot be less than or equal to zero"); + } + loadAverageThreshold = thresholdValueUnbounded; + } + + public double getLoadAverageThreshold() { + return loadAverageThreshold; + } + + protected double calculateLiveLoadAverage() { + return operatingSystemMXBean.getSystemLoadAverage(); + } +} diff --git a/solr/core/src/java/org/apache/solr/util/circuitbreaker/MemoryCircuitBreaker.java b/solr/core/src/java/org/apache/solr/util/circuitbreaker/MemoryCircuitBreaker.java index 3004d732e4d..4a3eb3f5b9f 100644 --- a/solr/core/src/java/org/apache/solr/util/circuitbreaker/MemoryCircuitBreaker.java +++ b/solr/core/src/java/org/apache/solr/util/circuitbreaker/MemoryCircuitBreaker.java @@ -17,32 +17,64 @@ package org.apache.solr.util.circuitbreaker; +import java.io.IOException; import java.lang.invoke.MethodHandles; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; +import org.apache.solr.util.RefCounted; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Tracks the current JVM heap usage and triggers if it exceeds the defined percentage of the - * maximum heap size allocated to the JVM. This circuit breaker is a part of the default - * CircuitBreakerRegistry so is checked for every request -- hence it is realtime. Once the memory - * usage goes below the threshold, it will start allowing queries again. + * Tracks the current JVM heap usage and triggers if a moving heap usage average over 30 seconds + * exceeds the defined percentage of the maximum heap size allocated to the JVM. Once the average + * memory usage goes below the threshold, it will start allowing queries again. * *

The memory threshold is defined as a percentage of the maximum memory allocated -- see - * memThreshold in solrconfig.xml. + * memThreshold in solrconfig.xml. */ public class MemoryCircuitBreaker extends CircuitBreaker { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final MemoryMXBean MEMORY_MX_BEAN = ManagementFactory.getMemoryMXBean(); + // One shared provider / executor for all instances of this class + private static RefCounted averagingMetricProvider; private long heapMemoryThreshold; private static final ThreadLocal seenMemory = ThreadLocal.withInitial(() -> 0L); private static final ThreadLocal allowedMemory = ThreadLocal.withInitial(() -> 0L); + /** Creates an instance which averages over 6 samples during last 30 seconds. */ public MemoryCircuitBreaker() { + this(6, 5); + } + + /** + * Constructor that allows override of sample interval for which the memory usage is fetched. This + * is provided for testing, not intended for general use because the average metric provider + * implementation is the same for all instances of the class. + * + * @param numSamples number of samples to calculate average for + * @param sampleInterval interval between each sample + */ + protected MemoryCircuitBreaker(int numSamples, int sampleInterval) { super(); + synchronized (MemoryCircuitBreaker.class) { + if (averagingMetricProvider == null || averagingMetricProvider.getRefcount() == 0) { + averagingMetricProvider = + new RefCounted<>( + new AveragingMetricProvider( + () -> MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed(), + numSamples, + sampleInterval)) { + @Override + protected void close() { + get().close(); + } + }; + } + averagingMetricProvider.incref(); + } } public void setThreshold(double thresholdValueInPercentage) { @@ -60,14 +92,11 @@ public void setThreshold(double thresholdValueInPercentage) { } } - // TODO: An optimization can be to trip the circuit breaker for a duration of time - // after the circuit breaker condition is matched. This will optimize for per call - // overhead of calculating the condition parameters but can result in false positives. @Override public boolean isTripped() { long localAllowedMemory = getCurrentMemoryThreshold(); - long localSeenMemory = calculateLiveMemoryUsage(); + long localSeenMemory = getAvgMemoryUsage(); allowedMemory.set(localAllowedMemory); @@ -76,6 +105,10 @@ public boolean isTripped() { return (localSeenMemory >= localAllowedMemory); } + protected long getAvgMemoryUsage() { + return (long) averagingMetricProvider.get().getMetricValue(); + } + @Override public String getErrorMessage() { return "Memory Circuit Breaker triggered as JVM heap usage values are greater than allocated threshold. " @@ -89,17 +122,12 @@ private long getCurrentMemoryThreshold() { return heapMemoryThreshold; } - /** - * Calculate the live memory usage for the system. This method has package visibility to allow - * using for testing. - * - * @return Memory usage in bytes. - */ - protected long calculateLiveMemoryUsage() { - // NOTE: MemoryUsageGaugeSet provides memory usage statistics but we do not use them - // here since it will require extra allocations and incur cost, hence it is cheaper to use - // MemoryMXBean directly. Ideally, this call should not add noticeable - // latency to a query -- but if it does, please signify on SOLR-14588 - return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed(); + @Override + public void close() throws IOException { + synchronized (MemoryCircuitBreaker.class) { + if (averagingMetricProvider != null && averagingMetricProvider.getRefcount() > 0) { + averagingMetricProvider.decref(); + } + } } } diff --git a/solr/core/src/java/org/apache/solr/util/tracing/TraceUtils.java b/solr/core/src/java/org/apache/solr/util/tracing/TraceUtils.java index 4d305f373fd..7e9daa37bbc 100644 --- a/solr/core/src/java/org/apache/solr/util/tracing/TraceUtils.java +++ b/solr/core/src/java/org/apache/solr/util/tracing/TraceUtils.java @@ -26,6 +26,7 @@ import io.opentelemetry.context.propagation.TextMapPropagator; import java.util.List; import java.util.function.Consumer; +import java.util.function.Predicate; import javax.servlet.http.HttpServletRequest; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.request.SolrQueryRequest; @@ -64,6 +65,14 @@ public class TraceUtils { public static final String TAG_DB_TYPE_SOLR = "solr"; + public static final Predicate DEFAULT_IS_RECORDING = Span::isRecording; + + /** + * this should only be changed in the context of testing, otherwise it would risk not recording + * trace data. + */ + public static Predicate IS_RECORDING = DEFAULT_IS_RECORDING; + public static Tracer getGlobalTracer() { return GlobalOpenTelemetry.getTracer("solr"); } @@ -94,7 +103,7 @@ public static void setHttpStatus(Span span, int httpStatus) { } public static void ifNotNoop(Span span, Consumer consumer) { - if (span.isRecording()) { + if (IS_RECORDING.test(span)) { consumer.accept(span); } } diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-pluggable-circuitbreaker.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-pluggable-circuitbreaker.xml index 8719a00ea7b..52956f60824 100644 --- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-pluggable-circuitbreaker.xml +++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-pluggable-circuitbreaker.xml @@ -98,6 +98,10 @@ 75 + + 3 + + text diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java index 4491476f030..5da2f862ea2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java @@ -79,15 +79,22 @@ public void testSimpleSliceLeaderElection() throws Exception { .getCoreDescriptor() .getCloudDescriptor() .getShardId()); + String jettyNodeName = jetty.getNodeName(); // must get before shutdown jetty.stop(); stoppedRunners.add(jetty); + waitForState( + "Leader should not be " + jettyNodeName, + collection, + (n, c) -> + c.getLeader("shard1") != null + && !jettyNodeName.equals(c.getLeader("shard1").getNodeName())); } for (JettySolrRunner runner : stoppedRunners) { runner.start(); } waitForState( - "Expected to see nodes come back " + collection, collection, (n, c) -> n.size() == 6); + "Expected to see nodes come back for " + collection, collection, (n, c) -> n.size() == 6); CollectionAdminRequest.deleteCollection(collection).process(cluster.getSolrClient()); // testLeaderElectionAfterClientTimeout @@ -99,6 +106,7 @@ public void testSimpleSliceLeaderElection() throws Exception { // timeout the leader String leader = getLeader(collection); JettySolrRunner jetty = getRunner(leader); + assertNotNull(jetty); cluster.expireZkSession(jetty); for (int i = 0; i < 60; i++) { // wait till leader is changed diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java index 760d3b73d02..29c103cb9a3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java @@ -45,6 +45,7 @@ import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.ZkStateReader; +import org.apache.solr.common.params.CollectionAdminParams; import org.apache.solr.common.params.CollectionParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ShardParams; @@ -1317,4 +1318,31 @@ public void testConfigCaching() throws Exception { cl.shutdown(); } } + + @Test + public void testCreateCollectionBooleanValues() throws Exception { + try (CloudSolrClient client = createCloudClient(null)) { + String collectionName = "testCreateCollectionBooleanValues"; + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("action", CollectionParams.CollectionAction.CREATE.toString()); + params.set("name", collectionName); + params.set("collection.configName", "conf1"); + params.set("numShards", "1"); + params.set(CollectionAdminParams.PER_REPLICA_STATE, "False"); + QueryRequest request = new QueryRequest(params); + request.setPath("/admin/collections"); + + try { + client.request(request); + waitForCollection(ZkStateReader.from(cloudClient), collectionName, 1); + } finally { + try { + CollectionAdminRequest.deleteCollection(collectionName).process(client); + } catch (Exception e) { + // Delete if possible, ignore otherwise. If the test failed, let the original exception + // bubble up + } + } + } + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java index 6e1f4221943..3e1ca33963c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java @@ -43,16 +43,19 @@ import org.apache.solr.common.cloud.PerReplicaStates; import org.apache.solr.common.cloud.PerReplicaStatesOps; import org.apache.solr.common.cloud.Replica; +import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.CommonTestInjection; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.Utils; import org.apache.solr.common.util.ZLibCompressor; import org.apache.solr.handler.admin.ConfigSetsHandler; import org.apache.solr.util.LogLevel; import org.apache.solr.util.TimeOut; +import org.apache.zookeeper.KeeperException; import org.junit.After; import org.junit.Before; import org.slf4j.Logger; @@ -664,4 +667,121 @@ public void testWatchRaceCondition() throws Exception { ExecutorUtil.awaitTermination(executorService); } } + + /** + * Ensure that collection state fetching (getCollectionLive etc.) would not throw exception when + * the state.json is deleted in between the state.json read and PRS entries read + */ + public void testDeletePrsCollection() throws Exception { + ZkStateWriter writer = fixture.writer; + ZkStateReader reader = fixture.reader; + + String collectionName = "c1"; + fixture.zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, true); + + ClusterState clusterState = reader.getClusterState(); + + String nodeName = "node1:10000_solr"; + String sliceName = "shard1"; + Slice slice = new Slice(sliceName, Map.of(), Map.of(), collectionName); + + // create new collection + DocCollection state = + DocCollection.create( + collectionName, + Map.of(sliceName, slice), + Collections.singletonMap(DocCollection.CollectionStateProps.PER_REPLICA_STATE, true), + DocRouter.DEFAULT, + 0, + PerReplicaStatesOps.getZkClientPrsSupplier( + fixture.zkClient, DocCollection.getCollectionPath(collectionName))); + ZkWriteCommand wc = new ZkWriteCommand(collectionName, state); + writer.enqueueUpdate(clusterState, Collections.singletonList(wc), null); + clusterState = writer.writePendingUpdates(); + + TimeOut timeOut = new TimeOut(5000, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); + timeOut.waitFor( + "Timeout on waiting for c1 to show up in cluster state", + () -> reader.getClusterState().getCollectionOrNull(collectionName) != null); + + String collectionPath = ZkStateReader.getCollectionPath(collectionName); + + // now create the replica, take note that this has to be done after DocCollection creation with + // empty slice, otherwise the DocCollection ctor would fetch the PRS entries and throw + // exceptions + String replicaBaseUrl = Utils.getBaseUrlForNodeName(nodeName, "http"); + + String replicaName = "replica1"; + Replica replica = + new Replica( + replicaName, + Map.of( + ZkStateReader.CORE_NAME_PROP, + "core1", + ZkStateReader.STATE_PROP, + Replica.State.ACTIVE.toString(), + ZkStateReader.NODE_NAME_PROP, + nodeName, + ZkStateReader.BASE_URL_PROP, + replicaBaseUrl, + ZkStateReader.REPLICA_TYPE, + Replica.Type.NRT.name()), + collectionName, + sliceName); + + wc = + new ZkWriteCommand( + collectionName, SliceMutator.updateReplica(state, slice, replica.getName(), replica)); + writer.enqueueUpdate(clusterState, Collections.singletonList(wc), null); + clusterState = writer.writePendingUpdates(); + + timeOut.waitFor( + "Timeout on waiting for replica to show up in cluster state", + () -> + reader.getCollectionLive(collectionName).getSlice(sliceName).getReplica(replicaName) + != null); + + try (CommonTestInjection.BreakpointSetter breakpointSetter = + new CommonTestInjection.BreakpointSetter()) { + // set breakpoint such that after state.json fetch and before PRS entry fetch, we can delete + // the state.json and PRS entries to trigger the race condition + breakpointSetter.setImplementation( + PerReplicaStatesOps.class.getName() + "/beforePrsFetch", + (args) -> { + try { + // this is invoked after ZkStateReader.fetchCollectionState has fetched the state.json + // but before PRS entries. + // call delete state.json on ZK directly, very tricky to control execution order with + // writer.enqueueUpdate + reader.getZkClient().clean(collectionPath); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } catch (KeeperException e) { + throw new RuntimeException(e); + } + }); + + // set breakpoint to verify the expected PrsZkNodeNotFoundException is indeed thrown within + // the execution flow, such exception is caught within the logic and not thrown to the + // caller + AtomicBoolean prsZkNodeNotFoundExceptionThrown = new AtomicBoolean(false); + breakpointSetter.setImplementation( + ZkStateReader.class.getName() + "/exercised", + (args) -> { + if (args[0] instanceof PerReplicaStatesOps.PrsZkNodeNotFoundException) { + prsZkNodeNotFoundExceptionThrown.set(true); + } + }); + + timeOut.waitFor( + "Timeout waiting for collection state to become null", + () -> { + // this should not throw exception even if the PRS entry read is delayed artificially + // (by previous command) and deleted after the following getCollectionLive call + return reader.getCollectionLive(collectionName) == null; + }); + + assertTrue(prsZkNodeNotFoundExceptionThrown.get()); + } + } } diff --git a/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java b/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java index a3282439ac3..1ede297abdc 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java @@ -50,7 +50,7 @@ public void before() throws Exception { // make the ConfigSet immutable Files.writeString( tmpConfDir.resolve("configsetprops.json"), - new StringBuilder("{\"immutable\":\"true\"}"), + "{\"immutable\":\"true\"}", StandardCharsets.UTF_8); System.setProperty("managed.schema.mutable", "true"); diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java index e6b194858c9..5e828bc9be7 100644 --- a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java +++ b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java @@ -770,6 +770,45 @@ public void testCustomConfigSetService() throws Exception { } } + @Test + public void testDefaultCoresLocator() throws Exception { + String solrXml = ""; + CoreContainer cc = init(solrXml); + try { + assertTrue(cc.getCoresLocator() instanceof CorePropertiesLocator); + } finally { + cc.shutdown(); + } + } + + @Test + public void testCustomCoresLocator() throws Exception { + String solrXml = + "\n" + + "\n" + + "org.apache.solr.core.TestCoreContainer$CustomCoresLocator\n" + + ""; + CoreContainer cc = init(solrXml); + try { + assertTrue(cc.getCoresLocator() instanceof CustomCoresLocator); + assertSame(cc.getNodeConfig(), ((CustomCoresLocator) cc.getCoresLocator()).getNodeConfig()); + } finally { + cc.shutdown(); + } + } + + public static class CustomCoresLocator extends MockCoresLocator { + private final NodeConfig nodeConfig; + + public CustomCoresLocator(NodeConfig nodeConfig) { + this.nodeConfig = nodeConfig; + } + + public NodeConfig getNodeConfig() { + return nodeConfig; + } + } + private static class MockCoresLocator implements CoresLocator { List cores = new ArrayList<>(); @@ -799,6 +838,11 @@ public void swap(CoreContainer cc, CoreDescriptor cd1, CoreDescriptor cd2) {} public List discover(CoreContainer cc) { return cores; } + + @Override + public CoreDescriptor reload(CoreDescriptor cd, CoreContainer cc) { + return cd; + } } @Test diff --git a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java index fbb2f5ecf57..30b038609cb 100644 --- a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java +++ b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java @@ -743,7 +743,7 @@ private CoreContainer initGoodAndBad( NodeConfig config = SolrXmlConfig.fromString(solrHomeDirectory.toPath(), ""); // OK this should succeed, but at the end we should have recorded a series of errors. - return createCoreContainer(config, new CorePropertiesLocator(config.getCoreRootDirectory())); + return createCoreContainer(config, new CorePropertiesLocator(config)); } // We want to see that the core "heals itself" if an un-corrupted file is written to the diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java index 3445db478e4..c3761cd7b4b 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java +++ b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java @@ -303,11 +303,11 @@ public void testFailAtConfigParseTimeWhenIntTypeIsExpectedAndLongTypeIsGiven() { public void testFailAtConfigParseTimeWhenBoolTypeIsExpectedAndValueIsInvalidString() { String solrXml = - "NOT_A_BOOLEAN"; + "FOO"; SolrException thrown = assertThrows(SolrException.class, () -> SolrXmlConfig.fromString(solrHome, solrXml)); - assertEquals("invalid boolean value: NOT_A_BOOLEAN", thrown.getMessage()); + assertEquals("invalid boolean value: FOO", thrown.getMessage()); } public void testFailAtConfigParseTimeWhenIntTypeIsExpectedAndBoolTypeIsGiven() { diff --git a/solr/core/src/test/org/apache/solr/handler/RequestHandlerBaseTest.java b/solr/core/src/test/org/apache/solr/handler/RequestHandlerBaseTest.java index 455f1c26df9..c3f2f9bd865 100644 --- a/solr/core/src/test/org/apache/solr/handler/RequestHandlerBaseTest.java +++ b/solr/core/src/test/org/apache/solr/handler/RequestHandlerBaseTest.java @@ -26,8 +26,10 @@ import com.codahale.metrics.Counter; import com.codahale.metrics.Meter; import com.codahale.metrics.Timer; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.SolrCore; @@ -145,6 +147,25 @@ public CoreContainer getCoreContainer() { assertEquals(SolrException.ErrorCode.SERVER_ERROR.code, normalizedSolrException.code()); } + @Test + public void testIsInternalShardRequest() { + final SolrQueryRequest solrQueryRequest = + new LocalSolrQueryRequest(solrCore, new ModifiableSolrParams()) { + @Override + public CoreContainer getCoreContainer() { + return coreContainer; + } + }; + + assertFalse(RequestHandlerBase.isInternalShardRequest(solrQueryRequest)); + + solrQueryRequest.setParams(new MapSolrParams(Map.of("isShard", "true"))); + assertTrue(RequestHandlerBase.isInternalShardRequest(solrQueryRequest)); + + solrQueryRequest.setParams(new MapSolrParams(Map.of("distrib.from", "http://foo:1234/solr"))); + assertTrue(RequestHandlerBase.isInternalShardRequest(solrQueryRequest)); + } + // Ideally we wouldn't need to use mocks here, but HandlerMetrics requires a SolrMetricsContext, // which // requires a MetricsManager, which requires ... diff --git a/solr/core/src/test/org/apache/solr/handler/admin/api/ReloadCoreAPITest.java b/solr/core/src/test/org/apache/solr/handler/admin/api/ReloadCoreAPITest.java new file mode 100644 index 00000000000..cd0fdc32324 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/handler/admin/api/ReloadCoreAPITest.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.admin.api; + +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.client.api.model.SolrJerseyResponse; +import org.apache.solr.common.SolrException; +import org.apache.solr.core.CoreContainer; +import org.apache.solr.handler.admin.CoreAdminHandler; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class ReloadCoreAPITest extends SolrTestCaseJ4 { + + private ReloadCoreAPI reloadCoreAPI; + private static final String NON_EXISTENT_CORE = "non_existent_core"; + + @BeforeClass + public static void initializeCoreAndRequestFactory() throws Exception { + initCore("solrconfig.xml", "schema.xml"); + lrf = h.getRequestFactory("/api", 0, 10); + } + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + SolrQueryRequest solrQueryRequest = req(); + SolrQueryResponse solrQueryResponse = new SolrQueryResponse(); + CoreContainer coreContainer = h.getCoreContainer(); + CoreAdminHandler.CoreAdminAsyncTracker coreAdminAsyncTracker = + new CoreAdminHandler.CoreAdminAsyncTracker(); + reloadCoreAPI = + new ReloadCoreAPI( + solrQueryRequest, solrQueryResponse, coreContainer, coreAdminAsyncTracker); + } + + @Test + public void testValidReloadCoreAPIResponse() throws Exception { + SolrJerseyResponse response = + reloadCoreAPI.reloadCore(coreName, new ReloadCoreAPI.ReloadCoreRequestBody()); + assertEquals(0, response.responseHeader.status); + assertNotNull(response.responseHeader.qTime); + } + + @Test + public void testNonExistentCoreExceptionResponse() { + final SolrException solrException = + expectThrows( + SolrException.class, + () -> { + reloadCoreAPI.reloadCore( + NON_EXISTENT_CORE, new ReloadCoreAPI.ReloadCoreRequestBody()); + }); + assertEquals(400, solrException.code()); + assertTrue(solrException.getMessage().contains("No such core: " + NON_EXISTENT_CORE)); + } + + @AfterClass // unique core per test + public static void coreDestroy() { + deleteCore(); + } +} diff --git a/solr/core/src/test/org/apache/solr/handler/admin/api/V2CoreAPIMappingTest.java b/solr/core/src/test/org/apache/solr/handler/admin/api/V2CoreAPIMappingTest.java index e6ecf9fd903..a2a36af2c4f 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/api/V2CoreAPIMappingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/api/V2CoreAPIMappingTest.java @@ -67,7 +67,6 @@ public boolean isCoreSpecific() { @Override public void populateApiBag() { final CoreAdminHandler handler = getRequestHandler(); - apiBag.registerObject(new ReloadCoreAPI(handler)); apiBag.registerObject(new SwapCoresAPI(handler)); apiBag.registerObject(new RenameCoreAPI(handler)); apiBag.registerObject(new UnloadCoreAPI(handler)); @@ -81,15 +80,6 @@ public void populateApiBag() { apiBag.registerObject(new RequestCoreCommandStatusAPI(handler)); } - @Test - public void testReloadCoreAllParams() throws Exception { - final SolrParams v1Params = - captureConvertedV1Params("/cores/coreName", "POST", "{\"reload\": {}}"); - - assertEquals("reload", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(CORE)); - } - @Test public void testSwapCoresAllParams() throws Exception { final SolrParams v1Params = diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java index 7f77a701d05..06e9a59f54c 100644 --- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java @@ -31,6 +31,7 @@ import org.apache.http.client.HttpClient; import org.apache.lucene.tests.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.cloud.MiniSolrCloudCluster; import org.apache.solr.common.util.Utils; @@ -233,47 +234,49 @@ public void testZkMetrics() throws Exception { try { JettySolrRunner j = cluster.getRandomJetty(random()); String url = j.getBaseUrl() + "/admin/metrics?key=solr.node:CONTAINER.zkClient&wt=json"; - HttpClient httpClient = ((HttpSolrClient) j.newClient()).getHttpClient(); - @SuppressWarnings("unchecked") - Map zkMmetrics = - (Map) - Utils.getObjectByPath( - Utils.executeGET(httpClient, url, Utils.JSONCONSUMER), - false, - List.of("metrics", "solr.node:CONTAINER.zkClient")); + try (SolrClient solrClient = j.newClient()) { + HttpClient httpClient = ((HttpSolrClient) solrClient).getHttpClient(); + @SuppressWarnings("unchecked") + Map zkMmetrics = + (Map) + Utils.getObjectByPath( + Utils.executeGET(httpClient, url, Utils.JSONCONSUMER), + false, + List.of("metrics", "solr.node:CONTAINER.zkClient")); - Set allKeys = - Set.of( - "watchesFired", - "reads", - "writes", - "bytesRead", - "bytesWritten", - "multiOps", - "cumulativeMultiOps", - "childFetches", - "cumulativeChildrenFetched", - "existsChecks", - "deletes"); + Set allKeys = + Set.of( + "watchesFired", + "reads", + "writes", + "bytesRead", + "bytesWritten", + "multiOps", + "cumulativeMultiOps", + "childFetches", + "cumulativeChildrenFetched", + "existsChecks", + "deletes"); - for (String k : allKeys) { - assertNotNull(zkMmetrics.get(k)); - } - Utils.executeGET( - httpClient, - j.getBaseURLV2() + "/cluster/zookeeper/children/live_nodes", - Utils.JSONCONSUMER); - @SuppressWarnings("unchecked") - Map zkMmetricsNew = - (Map) - Utils.getObjectByPath( - Utils.executeGET(httpClient, url, Utils.JSONCONSUMER), - false, - List.of("metrics", "solr.node:CONTAINER.zkClient")); + for (String k : allKeys) { + assertNotNull(zkMmetrics.get(k)); + } + Utils.executeGET( + httpClient, + j.getBaseURLV2() + "/cluster/zookeeper/children/live_nodes", + Utils.JSONCONSUMER); + @SuppressWarnings("unchecked") + Map zkMmetricsNew = + (Map) + Utils.getObjectByPath( + Utils.executeGET(httpClient, url, Utils.JSONCONSUMER), + false, + List.of("metrics", "solr.node:CONTAINER.zkClient")); - assertTrue(findDelta(zkMmetrics, zkMmetricsNew, "childFetches") >= 1); - assertTrue(findDelta(zkMmetrics, zkMmetricsNew, "cumulativeChildrenFetched") >= 3); - assertTrue(findDelta(zkMmetrics, zkMmetricsNew, "existsChecks") >= 4); + assertTrue(findDelta(zkMmetrics, zkMmetricsNew, "childFetches") >= 1); + assertTrue(findDelta(zkMmetrics, zkMmetricsNew, "cumulativeChildrenFetched") >= 3); + assertTrue(findDelta(zkMmetrics, zkMmetricsNew, "existsChecks") >= 4); + } } finally { cluster.shutdown(); } diff --git a/solr/core/src/test/org/apache/solr/response/TestRawTransformer.java b/solr/core/src/test/org/apache/solr/response/TestRawTransformer.java index 393674c2eca..c68ada54af6 100644 --- a/solr/core/src/test/org/apache/solr/response/TestRawTransformer.java +++ b/solr/core/src/test/org/apache/solr/response/TestRawTransformer.java @@ -118,10 +118,14 @@ private static void initCloud() throws Exception { @AfterClass public static void afterClass() throws Exception { + if (CLIENT != null) { + org.apache.solr.common.util.IOUtils.closeQuietly(CLIENT); + CLIENT = null; + } if (JSR != null) { JSR.stop(); + JSR = null; } - // NOTE: CLOUD_CLIENT should be stopped automatically in `SolrCloudTestCase.shutdownCluster()` } @After diff --git a/solr/core/src/test/org/apache/solr/schema/DenseVectorFieldTest.java b/solr/core/src/test/org/apache/solr/schema/DenseVectorFieldTest.java index 8a89a6b6422..03b3fa0d750 100644 --- a/solr/core/src/test/org/apache/solr/schema/DenseVectorFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/DenseVectorFieldTest.java @@ -461,7 +461,7 @@ public void indexing_correctDocument_shouldBeIndexed() throws Exception { @Test public void indexing_highDimensionalityVectorDocument_shouldBeIndexed() throws Exception { try { - initCore("solrconfig-basic.xml", "schema-densevector-high-dimensionality.xml"); + initCore("solrconfig_codec.xml", "schema-densevector-high-dimensionality.xml"); List highDimensionalityVector = new ArrayList<>(); for (float i = 0; i < 2048f; i++) { diff --git a/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java index 489809f549e..7c062695097 100644 --- a/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java @@ -90,13 +90,13 @@ public void collect(int doc) throws IOException { @Override @SuppressWarnings({"unchecked"}) - public void finish() throws IOException { + public void complete() throws IOException { @SuppressWarnings({"rawtypes"}) NamedList analytics = new NamedList(); rb.rsp.add("analytics", analytics); analytics.add("mycount", count + base); if (this.delegate instanceof DelegatingCollector) { - ((DelegatingCollector) this.delegate).finish(); + ((DelegatingCollector) this.delegate).complete(); } } } diff --git a/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java b/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java index cc64c320e50..12c8ec9dfa0 100644 --- a/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java +++ b/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java @@ -81,7 +81,7 @@ public void testEmptyCollectionDoesNotThrow() throws Exception { query.getAnalyticsCollector( new ResponseBuilder(null, resp, Collections.emptyList()), searcher.get()); assertNotNull(analyticsCollector); - analyticsCollector.finish(); + analyticsCollector.complete(); LinkedHashMap expectedValues = new LinkedHashMap<>(); expectedValues.put("numDocs", 0); expectedValues.put("sterms", new ArrayList()); @@ -116,7 +116,7 @@ public void testCollectionWithDocuments() throws Exception { DelegatingCollector analyticsCollector = query.getAnalyticsCollector(responseBuilder, searcher.get()); assertNotNull(analyticsCollector); - analyticsCollector.finish(); + analyticsCollector.complete(); LinkedHashMap expectedValues = new LinkedHashMap<>(); expectedValues.put("numDocs", 1); diff --git a/solr/core/src/test/org/apache/solr/search/TestCoordinatorRole.java b/solr/core/src/test/org/apache/solr/search/TestCoordinatorRole.java index 581f048785d..f55aca49ae8 100644 --- a/solr/core/src/test/org/apache/solr/search/TestCoordinatorRole.java +++ b/solr/core/src/test/org/apache/solr/search/TestCoordinatorRole.java @@ -21,8 +21,6 @@ import static org.apache.solr.common.params.CommonParams.TRUE; import java.lang.invoke.MethodHandles; -import java.net.HttpURLConnection; -import java.net.URL; import java.util.ArrayList; import java.util.Date; import java.util.EnumSet; @@ -37,10 +35,13 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.stream.Collectors; +import org.apache.http.HttpResponse; +import org.apache.http.client.methods.HttpGet; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.impl.Http2SolrClient; +import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.request.UpdateRequest; @@ -617,23 +618,27 @@ public void testConfigset() throws Exception { // Tricky to test configset, since operation such as collection status would direct it to the // OS node. - // So we use query and check the cache response header which is determined by the + // So we use query and check the cache response header, which is determined by the // solr-config.xml in the configset - // However using solr client would drop cache respons header hence we need to use plain java - // HttpURLConnection - URL url = new URL(coordinatorJetty.getBaseUrl() + "/c1/select?q=*:*"); - HttpURLConnection urlConnection = (HttpURLConnection) url.openConnection(); - urlConnection.connect(); - - // conf1 has no cache-control - assertNull(urlConnection.getHeaderField("cache-control")); - - url = new URL(coordinatorJetty.getBaseUrl() + "/c2/select?q=*:*"); - urlConnection = (HttpURLConnection) url.openConnection(); - urlConnection.connect(); - - // conf2 has cache-control defined - assertTrue(urlConnection.getHeaderField("cache-control").contains("max-age=30")); + // However using solr client would drop cache response header, hence we need to use the + // underlying httpClient which has SSL correctly configured + + try (HttpSolrClient solrClient = + new HttpSolrClient.Builder(coordinatorJetty.getBaseUrl().toString()).build()) { + HttpResponse response = + solrClient + .getHttpClient() + .execute(new HttpGet(coordinatorJetty.getBaseUrl() + "/c1/select?q=*:*")); + // conf1 has no cache-control + assertNull(response.getFirstHeader("cache-control")); + + response = + solrClient + .getHttpClient() + .execute(new HttpGet(coordinatorJetty.getBaseUrl() + "/c2/select?q=*:*")); + // conf2 has cache-control defined + assertTrue(response.getFirstHeader("cache-control").getValue().contains("max-age=30")); + } } finally { cluster.shutdown(); } diff --git a/solr/core/src/test/org/apache/solr/search/TestDocSet.java b/solr/core/src/test/org/apache/solr/search/TestDocSet.java index 0c41d642f27..60a3d8c91e3 100644 --- a/solr/core/src/test/org/apache/solr/search/TestDocSet.java +++ b/solr/core/src/test/org/apache/solr/search/TestDocSet.java @@ -44,7 +44,7 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.KnnCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.Bits; @@ -383,16 +383,12 @@ public StoredFields storedFields() { } @Override - public TopDocs searchNearestVectors( - String field, float[] target, int k, Bits acceptDoc, int visitedLimits) { - return null; - } + public void searchNearestVectors( + String field, float[] target, KnnCollector knnCollector, Bits acceptDocs) {} @Override - public TopDocs searchNearestVectors( - String field, byte[] target, int k, Bits acceptDoc, int visitedLimits) { - return null; - } + public void searchNearestVectors( + String field, byte[] target, KnnCollector knnCollector, Bits acceptDocs) {} @Override protected void doClose() {} diff --git a/solr/core/src/test/org/apache/solr/search/TestDocValuesIteratorCache.java b/solr/core/src/test/org/apache/solr/search/TestDocValuesIteratorCache.java new file mode 100644 index 00000000000..338be897c70 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/search/TestDocValuesIteratorCache.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search; + +import java.io.Closeable; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collection; +import java.util.Random; +import java.util.Set; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.TestUtil; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.core.SolrCore; +import org.apache.solr.index.NoMergePolicyFactory; +import org.apache.solr.util.EmbeddedSolrServerTestRule; +import org.apache.solr.util.RefCounted; +import org.apache.solr.util.SolrClientTestRule; +import org.junit.ClassRule; + +public class TestDocValuesIteratorCache extends SolrTestCaseJ4 { + + private static final int DOC_COUNT = 1000; + + @ClassRule + public static final SolrClientTestRule solrClientTestRule = + new EmbeddedSolrServerTestRule() { + @Override + protected void before() throws Throwable { + // must set NoMergePolicyFactory, because OrdinalMap building depends on the predictable + // existence of multiple segments; if the merge policy happens to combine into a single + // segment, no OrdinalMap will be built, throwing off our tests + systemSetPropertySolrTestsMergePolicyFactory(NoMergePolicyFactory.class.getName()); + startSolr(LuceneTestCase.createTempDir()); + } + + @Override + protected void after() { + systemClearPropertySolrTestsMergePolicyFactory(); + super.after(); + } + }; + + private static String fieldConfig(String fieldName, boolean multivalued) { + return "\n"; + } + + private static final String SINGLE = "single"; + private static final String MULTI = "multi"; + + @SuppressWarnings("try") + public void test() throws Exception { + Path configSet = LuceneTestCase.createTempDir(); + SolrTestCaseJ4.copyMinConf(configSet.toFile()); + Path schemaXml = configSet.resolve("conf/schema.xml"); + Files.writeString( + schemaXml, + Files.readString(schemaXml) + .replace( + "", fieldConfig(SINGLE, false) + fieldConfig(MULTI, true) + "")); + + solrClientTestRule.newCollection().withConfigSet(configSet.toString()).create(); + + SolrClient client = solrClientTestRule.getSolrClient(); + + Random r = random(); + String[][] expectVals = indexDocs(client, r); + + try (SolrCore core = + ((EmbeddedSolrServer) client).getCoreContainer().getCore(DEFAULT_TEST_CORENAME)) { + RefCounted sref = core.getSearcher(); + try (Closeable c = sref::decref) { + SolrIndexSearcher s = sref.get(); + assertEquals(DOC_COUNT, s.maxDoc()); + SolrDocumentFetcher docFetcher = s.getDocFetcher(); + DocValuesIteratorCache dvIterCache = new DocValuesIteratorCache(s); + final Set getFields = Set.of(SINGLE, MULTI); + final SolrDocument doc = new SolrDocument(); + for (int i = DOC_COUNT * 10; i >= 0; i--) { + int checkId = r.nextInt(DOC_COUNT); + doc.clear(); + docFetcher.decorateDocValueFields(doc, checkId, getFields, dvIterCache); + String[] expected = expectVals[checkId]; + if (expected == null) { + assertTrue(doc.isEmpty()); + } else { + assertEquals(2, doc.size()); + Object singleValue = doc.getFieldValue(SINGLE); + Collection actualVals = doc.getFieldValues(MULTI); + assertEquals(expected.length, actualVals.size() + 1); // +1 for single-valued field + assertEquals(expected[0], singleValue); + int j = 1; + for (Object o : actualVals) { + assertEquals(expected[j++], o); + } + } + } + } + } + } + + private String[][] indexDocs(SolrClient client, Random r) + throws SolrServerException, IOException { + String[][] ret = new String[DOC_COUNT][]; + int pct = r.nextInt(100); + for (int i = 0; i < DOC_COUNT; i++) { + if (r.nextInt(100) > pct) { + client.add(sdoc("id", Integer.toString(i))); + } else { + String str = TestUtil.randomSimpleString(r); + String str1 = TestUtil.randomSimpleString(r); + String str2 = TestUtil.randomSimpleString(r); + client.add(sdoc("id", Integer.toString(i), SINGLE, str, MULTI, str1, MULTI, str2)); + int cmp = str1.compareTo(str2); + if (cmp == 0) { + ret[i] = new String[] {str, str1}; + } else if (cmp < 0) { + ret[i] = new String[] {str, str1, str2}; + } else { + ret[i] = new String[] {str, str2, str1}; + } + } + if (r.nextInt(DOC_COUNT / 5) == 0) { + // aim for 5 segments + client.commit(); + } + } + client.commit(); + return ret; + } +} diff --git a/solr/core/src/test/org/apache/solr/search/facet/DistributedFacetSimpleRefinementLongTailTest.java b/solr/core/src/test/org/apache/solr/search/facet/DistributedFacetSimpleRefinementLongTailTest.java index 40cfb8a5019..0be315afc21 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/DistributedFacetSimpleRefinementLongTailTest.java +++ b/solr/core/src/test/org/apache/solr/search/facet/DistributedFacetSimpleRefinementLongTailTest.java @@ -18,7 +18,6 @@ import java.util.Arrays; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.common.params.SolrParams; @@ -87,7 +86,7 @@ public static void buildIndexes(final List clients, final String sta assertEquals("This indexing code assumes exactly 3 shards/clients", 3, clients.size()); - final AtomicInteger docNum = new AtomicInteger(); + int docNum = 0; final SolrClient shard0 = clients.get(0); final SolrClient shard1 = clients.get(1); final SolrClient shard2 = clients.get(2); @@ -95,9 +94,9 @@ public static void buildIndexes(final List clients, final String sta // the 5 top foo_s terms have 100 docs each on every shard for (int i = 0; i < 100; i++) { for (int j = 0; j < 5; j++) { - shard0.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "aaa" + j, statField, j * 13 - i)); - shard1.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "aaa" + j, statField, j * 3 + i)); - shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "aaa" + j, statField, i * 7 + j)); + shard0.add(sdoc("id", docNum++, "foo_s", "aaa" + j, statField, j * 13 - i)); + shard1.add(sdoc("id", docNum++, "foo_s", "aaa" + j, statField, j * 3 + i)); + shard2.add(sdoc("id", docNum++, "foo_s", "aaa" + j, statField, i * 7 + j)); } } @@ -105,14 +104,14 @@ public static void buildIndexes(final List clients, final String sta // on both shard0 & shard1 ("bbb_") for (int i = 0; i < 50; i++) { for (int j = 0; j < 20; j++) { - shard0.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "bbb" + j, statField, 0)); - shard1.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "bbb" + j, statField, 1)); + shard0.add(sdoc("id", docNum++, "foo_s", "bbb" + j, statField, 0)); + shard1.add(sdoc("id", docNum++, "foo_s", "bbb" + j, statField, 1)); } // distracting term appears on only on shard2 50 times - shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "junkA")); + shard2.add(sdoc("id", docNum++, "foo_s", "junkA")); } // put "bbb0" on shard2 exactly once to sanity check refinement - shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "bbb0", statField, -2)); + shard2.add(sdoc("id", docNum++, "foo_s", "bbb0", statField, -2)); // long 'tail' foo_s term appears in 45 docs on every shard // foo_s:tail is the only term with bar_s sub-pivot terms @@ -122,23 +121,21 @@ public static void buildIndexes(final List clients, final String sta // but the top 5 terms are ccc(0-4) -- 7 on each shard // (4 docs each have junk terms) String sub_term = (i < 35) ? "ccc" + (i % 5) : ((i < 41) ? "tailB" : "junkA"); - shard0.add( - sdoc("id", docNum.incrementAndGet(), "foo_s", "tail", "bar_s", sub_term, statField, i)); - shard1.add( - sdoc("id", docNum.incrementAndGet(), "foo_s", "tail", "bar_s", sub_term, statField, i)); + shard0.add(sdoc("id", docNum++, "foo_s", "tail", "bar_s", sub_term, statField, i)); + shard1.add(sdoc("id", docNum++, "foo_s", "tail", "bar_s", sub_term, statField, i)); // shard2's top 5 sub-pivot terms are junk only it has with 8 docs each // and 5 docs that use "tailB" // NOTE: none of these get statField ! ! sub_term = (i < 40) ? "junkB" + (i % 5) : "tailB"; - shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "tail", "bar_s", sub_term)); + shard2.add(sdoc("id", docNum++, "foo_s", "tail", "bar_s", sub_term)); } // really long tail uncommon foo_s terms on shard2 for (int i = 0; i < 30; i++) { // NOTE: using "Z" here so these sort before bbb0 when they tie for '1' instance each on // shard2 - shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "ZZZ" + i)); + shard2.add(sdoc("id", docNum++, "foo_s", "ZZZ" + i)); } } diff --git a/solr/core/src/test/org/apache/solr/search/neural/KnnQParserTest.java b/solr/core/src/test/org/apache/solr/search/neural/KnnQParserTest.java index 92a05ce5593..5cf2fd41f97 100644 --- a/solr/core/src/test/org/apache/solr/search/neural/KnnQParserTest.java +++ b/solr/core/src/test/org/apache/solr/search/neural/KnnQParserTest.java @@ -39,7 +39,7 @@ public class KnnQParserTest extends SolrTestCaseJ4 { @Before public void prepareIndex() throws Exception { /* vectorDimension="4" similarityFunction="cosine" */ - initCore("solrconfig-basic.xml", "schema-densevector.xml"); + initCore("solrconfig_codec.xml", "schema-densevector.xml"); List docsToIndex = this.prepareDocs(); for (SolrInputDocument doc : docsToIndex) { diff --git a/solr/core/src/test/org/apache/solr/servlet/HideStackTraceTest.java b/solr/core/src/test/org/apache/solr/servlet/HideStackTraceTest.java index dc6af6f2716..54ac3e28b5a 100644 --- a/solr/core/src/test/org/apache/solr/servlet/HideStackTraceTest.java +++ b/solr/core/src/test/org/apache/solr/servlet/HideStackTraceTest.java @@ -145,13 +145,16 @@ public void testHideStackTrace() throws Exception { final String url = solrRule.getBaseUrl().toString() + "/collection1/withError?q=*:*&wt=json"; final HttpGet get = new HttpGet(url); - try (var client = HttpClientUtil.createClient(null); - CloseableHttpResponse response = client.execute(get)) { + var client = HttpClientUtil.createClient(null); + try (CloseableHttpResponse response = client.execute(get)) { + assertEquals(500, response.getStatusLine().getStatusCode()); String responseJson = EntityUtils.toString(response.getEntity()); assertFalse(responseJson.contains("\"trace\"")); assertFalse( responseJson.contains("org.apache.solr.servlet.HideStackTraceTest$ErrorComponent")); + } finally { + HttpClientUtil.close(client); } } diff --git a/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java index 25f66221f73..bca3918786f 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java @@ -65,6 +65,7 @@ public void testComprehensive() throws Exception { "//long[@name='min_foo_l'][.='-34']"); } + @SuppressWarnings("UnnecessaryStringBuilder") public void testTrimAll() throws Exception { SolrInputDocument d = null; diff --git a/solr/core/src/test/org/apache/solr/util/BaseTestCircuitBreaker.java b/solr/core/src/test/org/apache/solr/util/BaseTestCircuitBreaker.java index 607bf31c617..14c83df771a 100644 --- a/solr/core/src/test/org/apache/solr/util/BaseTestCircuitBreaker.java +++ b/solr/core/src/test/org/apache/solr/util/BaseTestCircuitBreaker.java @@ -19,6 +19,7 @@ import static org.hamcrest.CoreMatchers.containsString; +import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.List; @@ -30,9 +31,11 @@ import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; +import org.apache.solr.core.SolrCore; import org.apache.solr.util.circuitbreaker.CPUCircuitBreaker; import org.apache.solr.util.circuitbreaker.CircuitBreaker; import org.apache.solr.util.circuitbreaker.CircuitBreakerManager; +import org.apache.solr.util.circuitbreaker.LoadAverageCircuitBreaker; import org.apache.solr.util.circuitbreaker.MemoryCircuitBreaker; import org.hamcrest.MatcherAssert; import org.junit.After; @@ -41,6 +44,8 @@ public abstract class BaseTestCircuitBreaker extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final CircuitBreaker dummyMemBreaker = new MemoryCircuitBreaker(); + private static final CircuitBreaker dummyCBManager = new CircuitBreakerManager(); protected static void indexDocs() { removeAllExistingCircuitBreakers(); @@ -60,11 +65,13 @@ protected static void indexDocs() { @Override public void tearDown() throws Exception { super.tearDown(); + dummyMemBreaker.close(); + dummyCBManager.close(); } @After public void after() { - h.getCore().getCircuitBreakerRegistry().deregisterAll(); + removeAllExistingCircuitBreakers(); } public void testCBAlwaysTrips() { @@ -114,68 +121,42 @@ public void testCBFakeMemoryPressure() throws Exception { } public void testBadRequestType() { + expectThrows( IllegalArgumentException.class, - () -> new MemoryCircuitBreaker().setRequestTypes(List.of("badRequestType"))); + () -> dummyMemBreaker.setRequestTypes(List.of("badRequestType"))); } public void testBuildingMemoryPressure() { - ExecutorService executor = - ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("TestCircuitBreaker")); + MemoryCircuitBreaker circuitBreaker = new BuildingUpMemoryPressureCircuitBreaker(); + circuitBreaker.setThreshold(75); - AtomicInteger failureCount = new AtomicInteger(); - - try { - removeAllExistingCircuitBreakers(); - - CircuitBreaker circuitBreaker = new BuildingUpMemoryPressureCircuitBreaker(); - MemoryCircuitBreaker memoryCircuitBreaker = (MemoryCircuitBreaker) circuitBreaker; - - memoryCircuitBreaker.setThreshold(75); - - h.getCore().getCircuitBreakerRegistry().register(circuitBreaker); + assertThatHighQueryLoadTrips(circuitBreaker, 1); + } - List> futures = new ArrayList<>(); + public void testFakeCPUCircuitBreaker() { + CPUCircuitBreaker circuitBreaker = new FakeCPUCircuitBreaker(h.getCore()); + circuitBreaker.setThreshold(75); - for (int i = 0; i < 5; i++) { - Future future = - executor.submit( - () -> { - try { - h.query(req("name:\"john smith\"")); - } catch (SolrException e) { - MatcherAssert.assertThat( - e.getMessage(), containsString("Circuit Breakers tripped")); - failureCount.incrementAndGet(); - } catch (Exception e) { - throw new RuntimeException(e.getMessage()); - } - }); + assertThatHighQueryLoadTrips(circuitBreaker, 5); + } - futures.add(future); - } + public void testFakeLoadAverageCircuitBreaker() { + LoadAverageCircuitBreaker circuitBreaker = new FakeLoadAverageCircuitBreaker(); + circuitBreaker.setThreshold(75); - for (Future future : futures) { - try { - future.get(); - } catch (Exception e) { - throw new RuntimeException(e.getMessage()); - } - } - } finally { - ExecutorUtil.shutdownAndAwaitTermination(executor); - assertEquals("Number of failed queries is not correct", 1, failureCount.get()); - } + assertThatHighQueryLoadTrips(circuitBreaker, 5); } - public void testFakeCPUCircuitBreaker() { + /** + * Common assert method to be reused in tests + * + * @param circuitBreaker the breaker to test + * @param numShouldTrip the number of queries that should trip the breaker + */ + private void assertThatHighQueryLoadTrips(CircuitBreaker circuitBreaker, int numShouldTrip) { removeAllExistingCircuitBreakers(); - CircuitBreaker circuitBreaker = new FakeCPUCircuitBreaker(); - CPUCircuitBreaker cpuCircuitBreaker = (CPUCircuitBreaker) circuitBreaker; - - cpuCircuitBreaker.setThreshold(75); - h.getCore().getCircuitBreakerRegistry().register(circuitBreaker); AtomicInteger failureCount = new AtomicInteger(); @@ -212,7 +193,7 @@ public void testFakeCPUCircuitBreaker() { } } finally { ExecutorUtil.shutdownAndAwaitTermination(executor); - assertEquals("Number of failed queries is not correct", 5, failureCount.get()); + assertEquals("Number of failed queries is not correct", numShouldTrip, failureCount.get()); } } @@ -261,17 +242,21 @@ public void testResponseWithCBTiming() { "//lst[@name='process']/double[@name='time']"); } - public void testErrorCode() { + public void testErrorCode() throws Exception { assertEquals( SolrException.ErrorCode.SERVICE_UNAVAILABLE, - CircuitBreaker.getErrorCode(List.of(new CircuitBreakerManager()))); + CircuitBreaker.getErrorCode(List.of(dummyCBManager))); assertEquals( SolrException.ErrorCode.TOO_MANY_REQUESTS, - CircuitBreaker.getErrorCode(List.of(new MemoryCircuitBreaker()))); + CircuitBreaker.getErrorCode(List.of(dummyMemBreaker))); } private static void removeAllExistingCircuitBreakers() { - h.getCore().getCircuitBreakerRegistry().deregisterAll(); + try { + h.getCore().getCircuitBreakerRegistry().deregisterAll(); + } catch (IOException e) { + fail("Failed to unload circuit breakers"); + } } private static class MockCircuitBreaker extends MemoryCircuitBreaker { @@ -289,10 +274,12 @@ public boolean isTripped() { } private static class FakeMemoryPressureCircuitBreaker extends MemoryCircuitBreaker { + public FakeMemoryPressureCircuitBreaker() { + super(1, 1); + } @Override - protected long calculateLiveMemoryUsage() { - // Return a number large enough to trigger a pushback from the circuit breaker + protected long getAvgMemoryUsage() { return Long.MAX_VALUE; } } @@ -301,11 +288,12 @@ private static class BuildingUpMemoryPressureCircuitBreaker extends MemoryCircui private AtomicInteger count; public BuildingUpMemoryPressureCircuitBreaker() { + super(1, 1); this.count = new AtomicInteger(0); } @Override - protected long calculateLiveMemoryUsage() { + protected long getAvgMemoryUsage() { int localCount = count.getAndIncrement(); if (localCount >= 4) { @@ -330,9 +318,20 @@ protected long calculateLiveMemoryUsage() { } private static class FakeCPUCircuitBreaker extends CPUCircuitBreaker { + public FakeCPUCircuitBreaker(SolrCore core) { + super(core); + } + @Override protected double calculateLiveCPUUsage() { - return 92; // Return a value large enough to trigger the circuit breaker + return Double.MAX_VALUE; + } + } + + private static class FakeLoadAverageCircuitBreaker extends LoadAverageCircuitBreaker { + @Override + protected double calculateLiveLoadAverage() { + return Double.MAX_VALUE; } } } diff --git a/solr/core/src/test/org/apache/solr/util/TestUtils.java b/solr/core/src/test/org/apache/solr/util/TestUtils.java index 90b118b7f1c..949c2dc15fe 100644 --- a/solr/core/src/test/org/apache/solr/util/TestUtils.java +++ b/solr/core/src/test/org/apache/solr/util/TestUtils.java @@ -39,83 +39,12 @@ import org.apache.solr.common.util.JavaBinCodec; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; -import org.apache.solr.common.util.StrUtils; import org.apache.solr.common.util.Utils; import org.junit.Assert; /** */ public class TestUtils extends SolrTestCaseJ4 { - public void testJoin() { - assertEquals("a|b|c", StrUtils.join(asList("a", "b", "c"), '|')); - assertEquals("a,b,c", StrUtils.join(asList("a", "b", "c"), ',')); - assertEquals("a\\,b,c", StrUtils.join(asList("a,b", "c"), ',')); - assertEquals("a,b|c", StrUtils.join(asList("a,b", "c"), '|')); - - assertEquals("a\\\\b|c", StrUtils.join(asList("a\\b", "c"), '|')); - } - - public void testEscapeTextWithSeparator() { - assertEquals("a", StrUtils.escapeTextWithSeparator("a", '|')); - assertEquals("a", StrUtils.escapeTextWithSeparator("a", ',')); - - assertEquals("a\\|b", StrUtils.escapeTextWithSeparator("a|b", '|')); - assertEquals("a|b", StrUtils.escapeTextWithSeparator("a|b", ',')); - assertEquals("a,b", StrUtils.escapeTextWithSeparator("a,b", '|')); - assertEquals("a\\,b", StrUtils.escapeTextWithSeparator("a,b", ',')); - assertEquals("a\\\\b", StrUtils.escapeTextWithSeparator("a\\b", ',')); - - assertEquals("a\\\\\\,b", StrUtils.escapeTextWithSeparator("a\\,b", ',')); - } - - public void testSplitEscaping() { - List arr = StrUtils.splitSmart("\\r\\n:\\t\\f\\b", ":", true); - assertEquals(2, arr.size()); - assertEquals("\r\n", arr.get(0)); - assertEquals("\t\f\b", arr.get(1)); - - arr = StrUtils.splitSmart("\\r\\n:\\t\\f\\b", ":", false); - assertEquals(2, arr.size()); - assertEquals("\\r\\n", arr.get(0)); - assertEquals("\\t\\f\\b", arr.get(1)); - - arr = StrUtils.splitWS("\\r\\n \\t\\f\\b", true); - assertEquals(2, arr.size()); - assertEquals("\r\n", arr.get(0)); - assertEquals("\t\f\b", arr.get(1)); - - arr = StrUtils.splitWS("\\r\\n \\t\\f\\b", false); - assertEquals(2, arr.size()); - assertEquals("\\r\\n", arr.get(0)); - assertEquals("\\t\\f\\b", arr.get(1)); - - arr = StrUtils.splitSmart("\\:foo\\::\\:bar\\:", ":", true); - assertEquals(2, arr.size()); - assertEquals(":foo:", arr.get(0)); - assertEquals(":bar:", arr.get(1)); - - arr = StrUtils.splitWS("\\ foo\\ \\ bar\\ ", true); - assertEquals(2, arr.size()); - assertEquals(" foo ", arr.get(0)); - assertEquals(" bar ", arr.get(1)); - - arr = StrUtils.splitFileNames("/h/s,/h/\\,s,"); - assertEquals(2, arr.size()); - assertEquals("/h/s", arr.get(0)); - assertEquals("/h/,s", arr.get(1)); - - arr = StrUtils.splitFileNames("/h/s"); - assertEquals(1, arr.size()); - assertEquals("/h/s", arr.get(0)); - } - - public void testToLower() { - assertEquals(List.of(), StrUtils.toLower(List.of())); - assertEquals(List.of(""), StrUtils.toLower(List.of(""))); - assertEquals(List.of("foo"), StrUtils.toLower(List.of("foo"))); - assertEquals(List.of("bar", "baz-123"), StrUtils.toLower(List.of("BAR", "Baz-123"))); - } - public void testNamedLists() { SimpleOrderedMap map = new SimpleOrderedMap<>(); map.add("test", 10); diff --git a/solr/licenses/commons-io-2.11.0.jar.sha1 b/solr/licenses/commons-io-2.11.0.jar.sha1 deleted file mode 100644 index da98d09a124..00000000000 --- a/solr/licenses/commons-io-2.11.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a2503f302b11ebde7ebc3df41daebe0e4eea3689 diff --git a/solr/licenses/commons-io-2.14.0.jar.sha1 b/solr/licenses/commons-io-2.14.0.jar.sha1 new file mode 100644 index 00000000000..bf1e8ae3d24 --- /dev/null +++ b/solr/licenses/commons-io-2.14.0.jar.sha1 @@ -0,0 +1 @@ +a4c6e1f6c196339473cd2e1b037f0eb97c62755b diff --git a/solr/licenses/error_prone_annotations-2.18.0.jar.sha1 b/solr/licenses/error_prone_annotations-2.18.0.jar.sha1 deleted file mode 100644 index df2166e6403..00000000000 --- a/solr/licenses/error_prone_annotations-2.18.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -89b684257096f548fa39a7df9fdaa409d4d4df91 diff --git a/solr/licenses/error_prone_annotations-2.22.0.jar.sha1 b/solr/licenses/error_prone_annotations-2.22.0.jar.sha1 new file mode 100644 index 00000000000..a0a78a421a9 --- /dev/null +++ b/solr/licenses/error_prone_annotations-2.22.0.jar.sha1 @@ -0,0 +1 @@ +bfb9e4281a4cea34f0ec85b3acd47621cfab35b4 diff --git a/solr/licenses/lucene-analysis-common-9.7.0.jar.sha1 b/solr/licenses/lucene-analysis-common-9.7.0.jar.sha1 deleted file mode 100644 index 48853a554ef..00000000000 --- a/solr/licenses/lucene-analysis-common-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -27ba6caaa4587a982cd451f7217b5a982bcfc44a diff --git a/solr/licenses/lucene-analysis-common-9.8.0.jar.sha1 b/solr/licenses/lucene-analysis-common-9.8.0.jar.sha1 new file mode 100644 index 00000000000..6b9555a1364 --- /dev/null +++ b/solr/licenses/lucene-analysis-common-9.8.0.jar.sha1 @@ -0,0 +1 @@ +36f0363325ca7bf62c180160d1ed5165c7c37795 diff --git a/solr/licenses/lucene-analysis-icu-9.7.0.jar.sha1 b/solr/licenses/lucene-analysis-icu-9.7.0.jar.sha1 deleted file mode 100644 index e17e815b879..00000000000 --- a/solr/licenses/lucene-analysis-icu-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -94293b169fb8572f440a5a4a523320ecf9778ffe diff --git a/solr/licenses/lucene-analysis-icu-9.8.0.jar.sha1 b/solr/licenses/lucene-analysis-icu-9.8.0.jar.sha1 new file mode 100644 index 00000000000..512bf46865e --- /dev/null +++ b/solr/licenses/lucene-analysis-icu-9.8.0.jar.sha1 @@ -0,0 +1 @@ +7133d34e92770f59eb28686f4d511b9f3f32e970 diff --git a/solr/licenses/lucene-analysis-kuromoji-9.7.0.jar.sha1 b/solr/licenses/lucene-analysis-kuromoji-9.7.0.jar.sha1 deleted file mode 100644 index 1cb53b61d8e..00000000000 --- a/solr/licenses/lucene-analysis-kuromoji-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2df800a38b64867b8dcd61fc2cd986114e4a80cb diff --git a/solr/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 b/solr/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 new file mode 100644 index 00000000000..4d7de125029 --- /dev/null +++ b/solr/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 @@ -0,0 +1 @@ +be44282e1f6b91a0650fcceb558053d6bdd4863d diff --git a/solr/licenses/lucene-analysis-morfologik-9.7.0.jar.sha1 b/solr/licenses/lucene-analysis-morfologik-9.7.0.jar.sha1 deleted file mode 100644 index 08b3abe8f12..00000000000 --- a/solr/licenses/lucene-analysis-morfologik-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dfb4313f3c68d337310522840d7144c1605d084a diff --git a/solr/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 b/solr/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 new file mode 100644 index 00000000000..db1649319c3 --- /dev/null +++ b/solr/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 @@ -0,0 +1 @@ +b054f2c7b11fc7c5601b4c3cdf18aa7508612898 diff --git a/solr/licenses/lucene-analysis-nori-9.7.0.jar.sha1 b/solr/licenses/lucene-analysis-nori-9.7.0.jar.sha1 deleted file mode 100644 index 47ae76abb25..00000000000 --- a/solr/licenses/lucene-analysis-nori-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a01e8153f34d72e8c8c0180c1dea5b10f677dd3a diff --git a/solr/licenses/lucene-analysis-nori-9.8.0.jar.sha1 b/solr/licenses/lucene-analysis-nori-9.8.0.jar.sha1 new file mode 100644 index 00000000000..cb1a7f78e51 --- /dev/null +++ b/solr/licenses/lucene-analysis-nori-9.8.0.jar.sha1 @@ -0,0 +1 @@ +bd1f80d33346f7e588685484ef29a304db5190e4 diff --git a/solr/licenses/lucene-analysis-opennlp-9.7.0.jar.sha1 b/solr/licenses/lucene-analysis-opennlp-9.7.0.jar.sha1 deleted file mode 100644 index 3419a3d6303..00000000000 --- a/solr/licenses/lucene-analysis-opennlp-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -af475c4941f250d6a94d26130e884798ca57b982 diff --git a/solr/licenses/lucene-analysis-opennlp-9.8.0.jar.sha1 b/solr/licenses/lucene-analysis-opennlp-9.8.0.jar.sha1 new file mode 100644 index 00000000000..92c8bd6d8a0 --- /dev/null +++ b/solr/licenses/lucene-analysis-opennlp-9.8.0.jar.sha1 @@ -0,0 +1 @@ +f878344c9742c33a42dd06b51a0a2e73cb49f16c diff --git a/solr/licenses/lucene-analysis-phonetic-9.7.0.jar.sha1 b/solr/licenses/lucene-analysis-phonetic-9.7.0.jar.sha1 deleted file mode 100644 index 3ea0a099d5a..00000000000 --- a/solr/licenses/lucene-analysis-phonetic-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b7d47d54683b0b1e09b271c32d1b7d3eb1990f49 diff --git a/solr/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 b/solr/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 new file mode 100644 index 00000000000..8aa6dd91877 --- /dev/null +++ b/solr/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 @@ -0,0 +1 @@ +b9ffdc7a52d2087ecb03318ec06305b480cdfe82 diff --git a/solr/licenses/lucene-analysis-smartcn-9.7.0.jar.sha1 b/solr/licenses/lucene-analysis-smartcn-9.7.0.jar.sha1 deleted file mode 100644 index 29630dabbb0..00000000000 --- a/solr/licenses/lucene-analysis-smartcn-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e68b9816e6cff8ee15f5b350cf2ffa54f9828b7 diff --git a/solr/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 b/solr/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 new file mode 100644 index 00000000000..40c216271ed --- /dev/null +++ b/solr/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 @@ -0,0 +1 @@ +f73e2007b133fb699e517ef13b4952844f0150d8 diff --git a/solr/licenses/lucene-analysis-stempel-9.7.0.jar.sha1 b/solr/licenses/lucene-analysis-stempel-9.7.0.jar.sha1 deleted file mode 100644 index 409b414b486..00000000000 --- a/solr/licenses/lucene-analysis-stempel-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d23b1f05b471e05d0d6068b3ece7c8c65672eae7 diff --git a/solr/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 b/solr/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 new file mode 100644 index 00000000000..08c368f78ac --- /dev/null +++ b/solr/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 @@ -0,0 +1 @@ +2c09cbc021a8f81a01600a1d2a999361e70f7aed diff --git a/solr/licenses/lucene-backward-codecs-9.7.0.jar.sha1 b/solr/licenses/lucene-backward-codecs-9.7.0.jar.sha1 deleted file mode 100644 index f186a6837fc..00000000000 --- a/solr/licenses/lucene-backward-codecs-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6389463bfbfcf902c8d31d12e9513a6818ac9d5e diff --git a/solr/licenses/lucene-backward-codecs-9.8.0.jar.sha1 b/solr/licenses/lucene-backward-codecs-9.8.0.jar.sha1 new file mode 100644 index 00000000000..bd8949e70af --- /dev/null +++ b/solr/licenses/lucene-backward-codecs-9.8.0.jar.sha1 @@ -0,0 +1 @@ +e98fb408028f40170e6d87c16422bfdc0bb2e392 diff --git a/solr/licenses/lucene-classification-9.7.0.jar.sha1 b/solr/licenses/lucene-classification-9.7.0.jar.sha1 deleted file mode 100644 index 7ed14dfe1a7..00000000000 --- a/solr/licenses/lucene-classification-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4e5eadd2126228e0c64ad5b6e5858ab9727a4356 diff --git a/solr/licenses/lucene-classification-9.8.0.jar.sha1 b/solr/licenses/lucene-classification-9.8.0.jar.sha1 new file mode 100644 index 00000000000..60037652b8f --- /dev/null +++ b/solr/licenses/lucene-classification-9.8.0.jar.sha1 @@ -0,0 +1 @@ +a9ba5a08bb0e34aba034b9411c33a5a977e5a571 diff --git a/solr/licenses/lucene-codecs-9.7.0.jar.sha1 b/solr/licenses/lucene-codecs-9.7.0.jar.sha1 deleted file mode 100644 index 12fdb9869ca..00000000000 --- a/solr/licenses/lucene-codecs-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ce18930d286e29c9f78f594c26cd5653ef3fe83e diff --git a/solr/licenses/lucene-codecs-9.8.0.jar.sha1 b/solr/licenses/lucene-codecs-9.8.0.jar.sha1 new file mode 100644 index 00000000000..c01ff967edd --- /dev/null +++ b/solr/licenses/lucene-codecs-9.8.0.jar.sha1 @@ -0,0 +1 @@ +5661a90424b164551302a14d946e5cf35d90be72 diff --git a/solr/licenses/lucene-core-9.7.0.jar.sha1 b/solr/licenses/lucene-core-9.7.0.jar.sha1 deleted file mode 100644 index e524210f516..00000000000 --- a/solr/licenses/lucene-core-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ad391210ffd806931334be9670a35af00c56f959 diff --git a/solr/licenses/lucene-core-9.8.0.jar.sha1 b/solr/licenses/lucene-core-9.8.0.jar.sha1 new file mode 100644 index 00000000000..1c357709b86 --- /dev/null +++ b/solr/licenses/lucene-core-9.8.0.jar.sha1 @@ -0,0 +1 @@ +5e8421c5f8573bcf22e9265fc7e19469545a775a diff --git a/solr/licenses/lucene-expressions-9.7.0.jar.sha1 b/solr/licenses/lucene-expressions-9.7.0.jar.sha1 deleted file mode 100644 index df99d15fd55..00000000000 --- a/solr/licenses/lucene-expressions-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -297e1cfade4ef71466cc9d4f361d81807c8dc4c8 diff --git a/solr/licenses/lucene-expressions-9.8.0.jar.sha1 b/solr/licenses/lucene-expressions-9.8.0.jar.sha1 new file mode 100644 index 00000000000..2b13fab40dc --- /dev/null +++ b/solr/licenses/lucene-expressions-9.8.0.jar.sha1 @@ -0,0 +1 @@ +7725476acfcb9bdfeff1b813ce15c39c6b857dc2 diff --git a/solr/licenses/lucene-grouping-9.7.0.jar.sha1 b/solr/licenses/lucene-grouping-9.7.0.jar.sha1 deleted file mode 100644 index 81519bec541..00000000000 --- a/solr/licenses/lucene-grouping-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e6f0c229f4861be641047c33b05067176e4279c diff --git a/solr/licenses/lucene-grouping-9.8.0.jar.sha1 b/solr/licenses/lucene-grouping-9.8.0.jar.sha1 new file mode 100644 index 00000000000..c67fb0ff341 --- /dev/null +++ b/solr/licenses/lucene-grouping-9.8.0.jar.sha1 @@ -0,0 +1 @@ +d39184518351178c404ed9669fc6cb6111f2288d diff --git a/solr/licenses/lucene-highlighter-9.7.0.jar.sha1 b/solr/licenses/lucene-highlighter-9.7.0.jar.sha1 deleted file mode 100644 index 31744c10d96..00000000000 --- a/solr/licenses/lucene-highlighter-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -facb7c7ee0f75ed457a2d98f10d6430e25a53691 diff --git a/solr/licenses/lucene-highlighter-9.8.0.jar.sha1 b/solr/licenses/lucene-highlighter-9.8.0.jar.sha1 new file mode 100644 index 00000000000..c1a7f60dc79 --- /dev/null +++ b/solr/licenses/lucene-highlighter-9.8.0.jar.sha1 @@ -0,0 +1 @@ +1ac38c8278dbd63dfab30744a41dd955a415a31c diff --git a/solr/licenses/lucene-join-9.7.0.jar.sha1 b/solr/licenses/lucene-join-9.7.0.jar.sha1 deleted file mode 100644 index e9134ce0f3e..00000000000 --- a/solr/licenses/lucene-join-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d041bdc0947a14223cf68357407ee18b21027587 diff --git a/solr/licenses/lucene-join-9.8.0.jar.sha1 b/solr/licenses/lucene-join-9.8.0.jar.sha1 new file mode 100644 index 00000000000..842065e6321 --- /dev/null +++ b/solr/licenses/lucene-join-9.8.0.jar.sha1 @@ -0,0 +1 @@ +3d64fc57bb6e718d906413a9f73c713e6d4d8bb0 diff --git a/solr/licenses/lucene-memory-9.7.0.jar.sha1 b/solr/licenses/lucene-memory-9.7.0.jar.sha1 deleted file mode 100644 index 9ac3c50a4c9..00000000000 --- a/solr/licenses/lucene-memory-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0fade51ee353e15ddbbc45262aafe6f99ed020f1 diff --git a/solr/licenses/lucene-memory-9.8.0.jar.sha1 b/solr/licenses/lucene-memory-9.8.0.jar.sha1 new file mode 100644 index 00000000000..375f8028682 --- /dev/null +++ b/solr/licenses/lucene-memory-9.8.0.jar.sha1 @@ -0,0 +1 @@ +5283ac71d6ccecb5e00c7b52df2faec012f2625a diff --git a/solr/licenses/lucene-misc-9.7.0.jar.sha1 b/solr/licenses/lucene-misc-9.7.0.jar.sha1 deleted file mode 100644 index 9df2fb89f24..00000000000 --- a/solr/licenses/lucene-misc-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7fcf451e2376526c3a027958812866cc5b0ff13f diff --git a/solr/licenses/lucene-misc-9.8.0.jar.sha1 b/solr/licenses/lucene-misc-9.8.0.jar.sha1 new file mode 100644 index 00000000000..3e2022b60e4 --- /dev/null +++ b/solr/licenses/lucene-misc-9.8.0.jar.sha1 @@ -0,0 +1 @@ +9a57b049cf51a5e9c9c1909c420f645f1b6f9a54 diff --git a/solr/licenses/lucene-queries-9.7.0.jar.sha1 b/solr/licenses/lucene-queries-9.7.0.jar.sha1 deleted file mode 100644 index 80c638e986c..00000000000 --- a/solr/licenses/lucene-queries-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -126989d4622419aa06fcbf3a342e859cab8c8799 diff --git a/solr/licenses/lucene-queries-9.8.0.jar.sha1 b/solr/licenses/lucene-queries-9.8.0.jar.sha1 new file mode 100644 index 00000000000..75cdfee9f96 --- /dev/null +++ b/solr/licenses/lucene-queries-9.8.0.jar.sha1 @@ -0,0 +1 @@ +628db4ef46f1c6a05145bdac1d1bc4ace6341b13 diff --git a/solr/licenses/lucene-queryparser-9.7.0.jar.sha1 b/solr/licenses/lucene-queryparser-9.7.0.jar.sha1 deleted file mode 100644 index 277242ad56c..00000000000 --- a/solr/licenses/lucene-queryparser-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6e77bde908ff698354e4a2149e6dd4658b56d7b0 diff --git a/solr/licenses/lucene-queryparser-9.8.0.jar.sha1 b/solr/licenses/lucene-queryparser-9.8.0.jar.sha1 new file mode 100644 index 00000000000..865bd1783cd --- /dev/null +++ b/solr/licenses/lucene-queryparser-9.8.0.jar.sha1 @@ -0,0 +1 @@ +982faf2bfa55542bf57fbadef54c19ac00f57cae diff --git a/solr/licenses/lucene-sandbox-9.7.0.jar.sha1 b/solr/licenses/lucene-sandbox-9.7.0.jar.sha1 deleted file mode 100644 index 60ad0045f4d..00000000000 --- a/solr/licenses/lucene-sandbox-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9f3e8e1947f2f1c5784132444af51a060ff0b4bf diff --git a/solr/licenses/lucene-sandbox-9.8.0.jar.sha1 b/solr/licenses/lucene-sandbox-9.8.0.jar.sha1 new file mode 100644 index 00000000000..5e95a9d52e1 --- /dev/null +++ b/solr/licenses/lucene-sandbox-9.8.0.jar.sha1 @@ -0,0 +1 @@ +06493dbd14d02537716822254866a94458f4d842 diff --git a/solr/licenses/lucene-spatial-extras-9.7.0.jar.sha1 b/solr/licenses/lucene-spatial-extras-9.7.0.jar.sha1 deleted file mode 100644 index 5a4ee73e3ee..00000000000 --- a/solr/licenses/lucene-spatial-extras-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -01b0bc7a407d8c35a70a1adf7966bb3e7caae928 diff --git a/solr/licenses/lucene-spatial-extras-9.8.0.jar.sha1 b/solr/licenses/lucene-spatial-extras-9.8.0.jar.sha1 new file mode 100644 index 00000000000..54f649ad950 --- /dev/null +++ b/solr/licenses/lucene-spatial-extras-9.8.0.jar.sha1 @@ -0,0 +1 @@ +9d9a731822ad6eefa1ba288a0c158d478522f165 diff --git a/solr/licenses/lucene-spatial3d-9.7.0.jar.sha1 b/solr/licenses/lucene-spatial3d-9.7.0.jar.sha1 deleted file mode 100644 index dfa1e68bbf4..00000000000 --- a/solr/licenses/lucene-spatial3d-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7c6b1b6e0a70c9cd177371e648648c2f896742a2 diff --git a/solr/licenses/lucene-spatial3d-9.8.0.jar.sha1 b/solr/licenses/lucene-spatial3d-9.8.0.jar.sha1 new file mode 100644 index 00000000000..f29a79e5e9f --- /dev/null +++ b/solr/licenses/lucene-spatial3d-9.8.0.jar.sha1 @@ -0,0 +1 @@ +ce752a52b2d4eac90633c7df7982e29504f99e76 diff --git a/solr/licenses/lucene-suggest-9.7.0.jar.sha1 b/solr/licenses/lucene-suggest-9.7.0.jar.sha1 deleted file mode 100644 index a1661406108..00000000000 --- a/solr/licenses/lucene-suggest-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5c37fd9a5d71dc87fe1cd4c18ff295ec8cfac170 diff --git a/solr/licenses/lucene-suggest-9.8.0.jar.sha1 b/solr/licenses/lucene-suggest-9.8.0.jar.sha1 new file mode 100644 index 00000000000..25ba9a373ad --- /dev/null +++ b/solr/licenses/lucene-suggest-9.8.0.jar.sha1 @@ -0,0 +1 @@ +f977f96f2093b7fddea6b67caa2e1c5b10edebf6 diff --git a/solr/licenses/lucene-test-framework-9.7.0.jar.sha1 b/solr/licenses/lucene-test-framework-9.7.0.jar.sha1 deleted file mode 100644 index e4c93a23107..00000000000 --- a/solr/licenses/lucene-test-framework-9.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c9339d7fdb424940ab437134f06b9f64937d45e2 diff --git a/solr/licenses/lucene-test-framework-9.8.0.jar.sha1 b/solr/licenses/lucene-test-framework-9.8.0.jar.sha1 new file mode 100644 index 00000000000..29315f64529 --- /dev/null +++ b/solr/licenses/lucene-test-framework-9.8.0.jar.sha1 @@ -0,0 +1 @@ +44903631a74e9cf5551452f905ab8a5b0652f1a5 diff --git a/solr/licenses/opentelemetry-api-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-api-1.29.0.jar.sha1 deleted file mode 100644 index 7425920b87a..00000000000 --- a/solr/licenses/opentelemetry-api-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -45010687a1181dc886fd12403e48cf76e94c65b1 diff --git a/solr/licenses/opentelemetry-api-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-api-1.30.1.jar.sha1 new file mode 100644 index 00000000000..b21be2984d1 --- /dev/null +++ b/solr/licenses/opentelemetry-api-1.30.1.jar.sha1 @@ -0,0 +1 @@ +a32dfbd7f01de6711fd0e970f8d4b4c0405056d6 diff --git a/solr/licenses/opentelemetry-api-events-1.29.0-alpha.jar.sha1 b/solr/licenses/opentelemetry-api-events-1.29.0-alpha.jar.sha1 deleted file mode 100644 index 0b08d51c6f2..00000000000 --- a/solr/licenses/opentelemetry-api-events-1.29.0-alpha.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cdc6b637c6374d4ae4be5cad381f6a566fb4310e diff --git a/solr/licenses/opentelemetry-api-events-1.30.1-alpha.jar.sha1 b/solr/licenses/opentelemetry-api-events-1.30.1-alpha.jar.sha1 new file mode 100644 index 00000000000..1ec9a6f975e --- /dev/null +++ b/solr/licenses/opentelemetry-api-events-1.30.1-alpha.jar.sha1 @@ -0,0 +1 @@ +fb5fec3cdd62b98d61d1648f0027ea0b9767758b diff --git a/solr/licenses/opentelemetry-context-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-context-1.29.0.jar.sha1 deleted file mode 100644 index 7f8f3d28249..00000000000 --- a/solr/licenses/opentelemetry-context-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4cf6857f268b9637ea330fffc70c1e6421d1d55 diff --git a/solr/licenses/opentelemetry-context-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-context-1.30.1.jar.sha1 new file mode 100644 index 00000000000..5009f902e27 --- /dev/null +++ b/solr/licenses/opentelemetry-context-1.30.1.jar.sha1 @@ -0,0 +1 @@ +58f665ff01ce6b964cdf0b8cb5cd1c196dfe94ce diff --git a/solr/licenses/opentelemetry-exporter-common-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-exporter-common-1.29.0.jar.sha1 deleted file mode 100644 index 2f7556109de..00000000000 --- a/solr/licenses/opentelemetry-exporter-common-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7fa947205c2e85383c5b24139325f36a9e474067 diff --git a/solr/licenses/opentelemetry-exporter-common-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-exporter-common-1.30.1.jar.sha1 new file mode 100644 index 00000000000..4e601b99c00 --- /dev/null +++ b/solr/licenses/opentelemetry-exporter-common-1.30.1.jar.sha1 @@ -0,0 +1 @@ +f299d336dba1039478497f37b273dfa764c6faef diff --git a/solr/licenses/opentelemetry-exporter-otlp-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-exporter-otlp-1.29.0.jar.sha1 deleted file mode 100644 index 7146545d1ea..00000000000 --- a/solr/licenses/opentelemetry-exporter-otlp-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -471956f2773b5409355a09da6ef4cb8976c82db2 diff --git a/solr/licenses/opentelemetry-exporter-otlp-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-exporter-otlp-1.30.1.jar.sha1 new file mode 100644 index 00000000000..55848ca06a6 --- /dev/null +++ b/solr/licenses/opentelemetry-exporter-otlp-1.30.1.jar.sha1 @@ -0,0 +1 @@ +15692246539571c41180aff2b55abe527b939a7b diff --git a/solr/licenses/opentelemetry-exporter-otlp-common-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-exporter-otlp-common-1.29.0.jar.sha1 deleted file mode 100644 index fe86cf5eb9e..00000000000 --- a/solr/licenses/opentelemetry-exporter-otlp-common-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -992edd4ba5d473abcd6b4ca126f11149b2bdba0c diff --git a/solr/licenses/opentelemetry-exporter-otlp-common-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-exporter-otlp-common-1.30.1.jar.sha1 new file mode 100644 index 00000000000..609f1e9ee85 --- /dev/null +++ b/solr/licenses/opentelemetry-exporter-otlp-common-1.30.1.jar.sha1 @@ -0,0 +1 @@ +947cf43a6411c4a323e14594431040a476ad43e8 diff --git a/solr/licenses/opentelemetry-exporter-sender-okhttp-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-exporter-sender-okhttp-1.29.0.jar.sha1 deleted file mode 100644 index 1ea79b62365..00000000000 --- a/solr/licenses/opentelemetry-exporter-sender-okhttp-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3a2848644687cfe541c8b2b4b05f5d4e58c2709 diff --git a/solr/licenses/opentelemetry-exporter-sender-okhttp-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-exporter-sender-okhttp-1.30.1.jar.sha1 new file mode 100644 index 00000000000..b7bad990390 --- /dev/null +++ b/solr/licenses/opentelemetry-exporter-sender-okhttp-1.30.1.jar.sha1 @@ -0,0 +1 @@ +9f3a14515500e4df260ce7b10a668237a95ac791 diff --git a/solr/licenses/opentelemetry-extension-incubator-1.29.0-alpha.jar.sha1 b/solr/licenses/opentelemetry-extension-incubator-1.29.0-alpha.jar.sha1 deleted file mode 100644 index e781a342e63..00000000000 --- a/solr/licenses/opentelemetry-extension-incubator-1.29.0-alpha.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4af0adab426582e9acea130d39b11791ccf0b513 diff --git a/solr/licenses/opentelemetry-extension-incubator-1.30.1-alpha.jar.sha1 b/solr/licenses/opentelemetry-extension-incubator-1.30.1-alpha.jar.sha1 new file mode 100644 index 00000000000..d6483a14fe2 --- /dev/null +++ b/solr/licenses/opentelemetry-extension-incubator-1.30.1-alpha.jar.sha1 @@ -0,0 +1 @@ +bfcea9bd71f97dd4e8a4f92c15ba5659fb07ff05 diff --git a/solr/licenses/opentelemetry-sdk-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-sdk-1.29.0.jar.sha1 deleted file mode 100644 index 23cae568fb8..00000000000 --- a/solr/licenses/opentelemetry-sdk-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5d3a0e83ab6a2849c5b4123d539f994a721ddcd5 diff --git a/solr/licenses/opentelemetry-sdk-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-sdk-1.30.1.jar.sha1 new file mode 100644 index 00000000000..918c08509ae --- /dev/null +++ b/solr/licenses/opentelemetry-sdk-1.30.1.jar.sha1 @@ -0,0 +1 @@ +4d15a9ea26e8e6ea93287a9f4ee02d91e5a74392 diff --git a/solr/licenses/opentelemetry-sdk-common-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-sdk-common-1.29.0.jar.sha1 deleted file mode 100644 index 4033aa6f1bf..00000000000 --- a/solr/licenses/opentelemetry-sdk-common-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4a84b83c226c91a54bd0e9244d49908875cea3a diff --git a/solr/licenses/opentelemetry-sdk-common-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-sdk-common-1.30.1.jar.sha1 new file mode 100644 index 00000000000..5d133711ae5 --- /dev/null +++ b/solr/licenses/opentelemetry-sdk-common-1.30.1.jar.sha1 @@ -0,0 +1 @@ +8e437ba87004bb63069d04fb06beae65b98dd13a diff --git a/solr/licenses/opentelemetry-sdk-extension-autoconfigure-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-sdk-extension-autoconfigure-1.29.0.jar.sha1 deleted file mode 100644 index 635e3eada28..00000000000 --- a/solr/licenses/opentelemetry-sdk-extension-autoconfigure-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -36bd08550cbdcc4f58a4949e0f97f97443220a2d diff --git a/solr/licenses/opentelemetry-sdk-extension-autoconfigure-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-sdk-extension-autoconfigure-1.30.1.jar.sha1 new file mode 100644 index 00000000000..3a03d895bdb --- /dev/null +++ b/solr/licenses/opentelemetry-sdk-extension-autoconfigure-1.30.1.jar.sha1 @@ -0,0 +1 @@ +7f27a7b5d80efb75c39cead75dc7ba795e92d9ad diff --git a/solr/licenses/opentelemetry-sdk-extension-autoconfigure-spi-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-sdk-extension-autoconfigure-spi-1.29.0.jar.sha1 deleted file mode 100644 index 299eb7083b6..00000000000 --- a/solr/licenses/opentelemetry-sdk-extension-autoconfigure-spi-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b810dedc1fc8b28892eca5fa945e1c82cb95be4f diff --git a/solr/licenses/opentelemetry-sdk-extension-autoconfigure-spi-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-sdk-extension-autoconfigure-spi-1.30.1.jar.sha1 new file mode 100644 index 00000000000..20078e803d9 --- /dev/null +++ b/solr/licenses/opentelemetry-sdk-extension-autoconfigure-spi-1.30.1.jar.sha1 @@ -0,0 +1 @@ +05778b81d9d704217c749f4c9e2a580c23143f49 diff --git a/solr/licenses/opentelemetry-sdk-logs-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-sdk-logs-1.29.0.jar.sha1 deleted file mode 100644 index ff6a4151576..00000000000 --- a/solr/licenses/opentelemetry-sdk-logs-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -68151d4465c12183db8edc4f9d8f0a878bada16b diff --git a/solr/licenses/opentelemetry-sdk-logs-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-sdk-logs-1.30.1.jar.sha1 new file mode 100644 index 00000000000..98a559361d3 --- /dev/null +++ b/solr/licenses/opentelemetry-sdk-logs-1.30.1.jar.sha1 @@ -0,0 +1 @@ +5985d0950746ad12b49cc42c063f26ddfbcaaacb diff --git a/solr/licenses/opentelemetry-sdk-metrics-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-sdk-metrics-1.29.0.jar.sha1 deleted file mode 100644 index 813a65b66fc..00000000000 --- a/solr/licenses/opentelemetry-sdk-metrics-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6bb59616f1180286bc2ccf40e34d636984581ba9 diff --git a/solr/licenses/opentelemetry-sdk-metrics-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-sdk-metrics-1.30.1.jar.sha1 new file mode 100644 index 00000000000..1d8b8f9ac96 --- /dev/null +++ b/solr/licenses/opentelemetry-sdk-metrics-1.30.1.jar.sha1 @@ -0,0 +1 @@ +b12825541c5dae52a0fb35045c1b36df3ca8f632 diff --git a/solr/licenses/opentelemetry-sdk-testing-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-sdk-testing-1.29.0.jar.sha1 deleted file mode 100644 index 87fa435be2c..00000000000 --- a/solr/licenses/opentelemetry-sdk-testing-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3f36cd924e7631a6888d2280a3b368b3dce3acfc diff --git a/solr/licenses/opentelemetry-sdk-testing-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-sdk-testing-1.30.1.jar.sha1 new file mode 100644 index 00000000000..ce2ac546fdd --- /dev/null +++ b/solr/licenses/opentelemetry-sdk-testing-1.30.1.jar.sha1 @@ -0,0 +1 @@ +99ca5001ea7afb4b3ef66436199c7726effef285 diff --git a/solr/licenses/opentelemetry-sdk-trace-1.29.0.jar.sha1 b/solr/licenses/opentelemetry-sdk-trace-1.29.0.jar.sha1 deleted file mode 100644 index 1a9ce5eb65d..00000000000 --- a/solr/licenses/opentelemetry-sdk-trace-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1008bf3794f6fc10238b1f63d0546ae817ad1783 diff --git a/solr/licenses/opentelemetry-sdk-trace-1.30.1.jar.sha1 b/solr/licenses/opentelemetry-sdk-trace-1.30.1.jar.sha1 new file mode 100644 index 00000000000..c3a51a19506 --- /dev/null +++ b/solr/licenses/opentelemetry-sdk-trace-1.30.1.jar.sha1 @@ -0,0 +1 @@ +4c5531fbc44178a7bcfeb7021ae80e70a7c43458 diff --git a/solr/licenses/opentelemetry-semconv-1.29.0-alpha.jar.sha1 b/solr/licenses/opentelemetry-semconv-1.29.0-alpha.jar.sha1 deleted file mode 100644 index aa0f7608da5..00000000000 --- a/solr/licenses/opentelemetry-semconv-1.29.0-alpha.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8ee51f51d9c1c959b537c8dba67d7a524204b974 diff --git a/solr/licenses/snappy-java-1.1.10.1.jar.sha1 b/solr/licenses/snappy-java-1.1.10.1.jar.sha1 deleted file mode 100644 index 6958970d2ab..00000000000 --- a/solr/licenses/snappy-java-1.1.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4a1e1a22cba39145dfa20f2fef4e1ca38c8e02a1 diff --git a/solr/licenses/snappy-java-1.1.10.4.jar.sha1 b/solr/licenses/snappy-java-1.1.10.4.jar.sha1 new file mode 100644 index 00000000000..3022bc5d1fd --- /dev/null +++ b/solr/licenses/snappy-java-1.1.10.4.jar.sha1 @@ -0,0 +1 @@ +50d0390056017158bdc75c063efd5c2a898d5f0c diff --git a/solr/modules/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java b/solr/modules/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java index 69e44360f2b..c10b42c0cfe 100644 --- a/solr/modules/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java +++ b/solr/modules/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java @@ -215,14 +215,17 @@ public void load( } parser.parse(inputStream, parsingHandler, metadata, context); } catch (TikaException e) { - if (ignoreTikaException) - log.warn( - new StringBuilder("skip extracting text due to ") - .append(e.getLocalizedMessage()) - .append(". metadata=") - .append(metadata.toString()) - .toString()); // nowarn - else throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); + if (ignoreTikaException) { + if (log.isWarnEnabled()) { + log.warn( + "skip extracting text due to {}. metadata={}", + e.getLocalizedMessage(), + metadata, + e); + } + } else { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); + } } if (extractOnly == false) { addDoc(handler); diff --git a/solr/modules/hdfs/src/test/org/apache/hadoop/fs/FileUtil.java b/solr/modules/hdfs/src/test/org/apache/hadoop/fs/FileUtil.java index f8d800dfaf7..f9a8717c1c7 100644 --- a/solr/modules/hdfs/src/test/org/apache/hadoop/fs/FileUtil.java +++ b/solr/modules/hdfs/src/test/org/apache/hadoop/fs/FileUtil.java @@ -1899,8 +1899,6 @@ public static FileContext write(final FileContext fileContext, return write(fileContext, path, charseq, StandardCharsets.UTF_8); } - @InterfaceAudience.LimitedPrivate({"ViewDistributedFileSystem"}) - @InterfaceStability.Unstable /** * Used in ViewDistributedFileSystem rename API to get access to the protected * API of FileSystem interface. Even though Rename with options API @@ -1909,6 +1907,8 @@ public static FileContext write(final FileContext fileContext, * out casting to the specific filesystem. This util method is proposed to get * the access to FileSystem#rename with options. */ + @InterfaceAudience.LimitedPrivate({"ViewDistributedFileSystem"}) + @InterfaceStability.Unstable @SuppressWarnings("deprecation") public static void rename(FileSystem srcFs, Path src, Path dst, final Options.Rename... options) throws IOException { diff --git a/solr/modules/hdfs/src/test/org/apache/solr/hdfs/store/blockcache/BlockCacheTest.java b/solr/modules/hdfs/src/test/org/apache/solr/hdfs/store/blockcache/BlockCacheTest.java index b14b012280c..6cd2cf35d63 100644 --- a/solr/modules/hdfs/src/test/org/apache/solr/hdfs/store/blockcache/BlockCacheTest.java +++ b/solr/modules/hdfs/src/test/org/apache/solr/hdfs/store/blockcache/BlockCacheTest.java @@ -43,8 +43,8 @@ public void testBlockCache() { byte[] buffer = new byte[1024]; Random random = random(); byte[] newData = new byte[blockSize]; - AtomicLong hitsInCache = new AtomicLong(); - AtomicLong missesInCache = new AtomicLong(); + long hitsInCache = 0L; + long missesInCache = 0L; long storeTime = 0; long fetchTime = 0; int passes = 10000; @@ -59,9 +59,9 @@ public void testBlockCache() { blockCacheKey.setPath("/"); if (blockCache.fetch(blockCacheKey, buffer)) { - hitsInCache.incrementAndGet(); + hitsInCache += 1; } else { - missesInCache.incrementAndGet(); + missesInCache += 1; } byte[] testData = testData(random, blockSize, newData); @@ -77,8 +77,8 @@ public void testBlockCache() { assertArrayEquals("buffer content differs", testData, buffer); } } - System.out.println("Cache Hits = " + hitsInCache.get()); - System.out.println("Cache Misses = " + missesInCache.get()); + System.out.println("Cache Hits = " + hitsInCache); + System.out.println("Cache Misses = " + missesInCache); System.out.println("Store = " + (storeTime / (double) passes) / 1000000.0); System.out.println("Fetch = " + (fetchTime / (double) passes) / 1000000.0); System.out.println("# of Elements = " + blockCache.getSize()); diff --git a/solr/modules/jwt-auth/build.gradle b/solr/modules/jwt-auth/build.gradle index 3bed1606e4a..1b420899e33 100644 --- a/solr/modules/jwt-auth/build.gradle +++ b/solr/modules/jwt-auth/build.gradle @@ -39,6 +39,7 @@ dependencies { implementation 'org.apache.httpcomponents:httpclient' implementation 'org.apache.httpcomponents:httpcore' implementation 'org.eclipse.jetty:jetty-client' + implementation 'org.eclipse.jetty:jetty-http' implementation 'org.eclipse.jetty.toolchain:jetty-servlet-api' implementation 'com.google.guava:guava' implementation 'org.slf4j:slf4j-api' diff --git a/solr/modules/jwt-auth/src/java/org/apache/solr/security/jwt/JWTAuthPlugin.java b/solr/modules/jwt-auth/src/java/org/apache/solr/security/jwt/JWTAuthPlugin.java index 3e3f7578fc9..bb64f458f4e 100644 --- a/solr/modules/jwt-auth/src/java/org/apache/solr/security/jwt/JWTAuthPlugin.java +++ b/solr/modules/jwt-auth/src/java/org/apache/solr/security/jwt/JWTAuthPlugin.java @@ -257,9 +257,9 @@ public void init(Map pluginConfig) { issuerConfigs.addAll(parseIssuers(pluginConfig)); verificationKeyResolver = new JWTVerificationkeyResolver(issuerConfigs, requireIssuer); - if (issuerConfigs.size() > 0 && getPrimaryIssuer().getAuthorizationEndpoint() != null) { + if (!issuerConfigs.isEmpty() && getPrimaryIssuer().getAuthorizationEndpoint() != null) { adminUiScope = (String) pluginConfig.get(PARAM_ADMINUI_SCOPE); - if (adminUiScope == null && requiredScopes.size() > 0) { + if (adminUiScope == null && !requiredScopes.isEmpty()) { adminUiScope = requiredScopes.get(0); log.warn( "No adminUiScope given, using first scope in 'scope' list as required scope for accessing Admin UI"); @@ -384,7 +384,7 @@ private Optional parseIssuerFromTopLevelConfig(Map 0) { + if (!finalRoles.isEmpty()) { return new JWTAuthenticationResponse( AuthCode.AUTHENTICATED, new JWTPrincipalWithUserRoles( @@ -972,7 +972,7 @@ protected boolean interceptInternodeRequest(Request request) { Object userToken = request.getAttributes().get(Http2SolrClient.REQ_PRINCIPAL_KEY); if (userToken instanceof JWTPrincipal) { JWTPrincipal jwtPrincipal = (JWTPrincipal) userToken; - request.header(HttpHeaders.AUTHORIZATION, "Bearer " + jwtPrincipal.getToken()); + request.headers(h -> h.put(HttpHeaders.AUTHORIZATION, "Bearer " + jwtPrincipal.getToken())); return true; } return false; diff --git a/solr/modules/jwt-auth/src/java/org/apache/solr/security/jwt/JWTVerificationkeyResolver.java b/solr/modules/jwt-auth/src/java/org/apache/solr/security/jwt/JWTVerificationkeyResolver.java index a9254c7d5d7..2fe75a1baf5 100644 --- a/solr/modules/jwt-auth/src/java/org/apache/solr/security/jwt/JWTVerificationkeyResolver.java +++ b/solr/modules/jwt-auth/src/java/org/apache/solr/security/jwt/JWTVerificationkeyResolver.java @@ -59,9 +59,9 @@ public class JWTVerificationkeyResolver implements VerificationKeyResolver { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private VerificationJwkSelector verificationJwkSelector = new VerificationJwkSelector(); + private final VerificationJwkSelector verificationJwkSelector = new VerificationJwkSelector(); - private Map issuerConfigs = new HashMap<>(); + private final Map issuerConfigs = new HashMap<>(); private final boolean requireIssuer; /** @@ -73,10 +73,7 @@ public class JWTVerificationkeyResolver implements VerificationKeyResolver { public JWTVerificationkeyResolver( Collection issuerConfigs, boolean requireIssuer) { this.requireIssuer = requireIssuer; - issuerConfigs.forEach( - ic -> { - this.issuerConfigs.put(ic.getIss(), ic); - }); + issuerConfigs.forEach(ic -> this.issuerConfigs.put(ic.getIss(), ic)); } @Override diff --git a/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginIntegrationTest.java b/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginIntegrationTest.java index 23cefdbca51..c2613d8550c 100644 --- a/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginIntegrationTest.java +++ b/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginIntegrationTest.java @@ -86,7 +86,6 @@ */ @SolrTestCaseJ4.SuppressSSL public class JWTAuthPluginIntegrationTest extends SolrCloudAuthTestCase { - private final String COLLECTION = "jwtColl"; private static String mockOAuthToken; private static Path pemFilePath; @@ -143,7 +142,7 @@ public void mockOAuth2Server() throws Exception { } @Test - public void mockOAuth2ServerWrongPEMInTruststore() throws Exception { + public void mockOAuth2ServerWrongPEMInTruststore() { // JWTAuthPlugin throws SSLHandshakeException when fetching JWK, so this trips cluster init assertThrows(Exception.class, () -> configureClusterMockOauth(2, wrongPemFilePath, 2000)); } @@ -209,6 +208,7 @@ public void testMetrics() throws Exception { String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); CloseableHttpClient cl = HttpClientUtil.createClient(null); + String COLLECTION = "jwtColl"; createCollection(cluster, COLLECTION); // Missing token @@ -524,8 +524,10 @@ private static MockOAuth2Server createMockOAuthServer(Path p12CertPath, String s TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); trustManagerFactory.init(keystore); - MockWebServerWrapper mockWebServerWrapper = new MockWebServerWrapper(); - MockWebServer mockWebServer = mockWebServerWrapper.getMockWebServer(); + MockWebServer mockWebServer; + try (MockWebServerWrapper mockWebServerWrapper = new MockWebServerWrapper()) { + mockWebServer = mockWebServerWrapper.getMockWebServer(); + } SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); sslContext.init( keyManagerFactory.getKeyManagers(), /*trustManagerFactory.getTrustManagers()*/ diff --git a/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginTest.java b/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginTest.java index 9e04865c6c3..211b75c1b5f 100644 --- a/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginTest.java +++ b/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginTest.java @@ -636,11 +636,7 @@ public void initWithIdpCertWrongDoubleConfig() { authConf.put("trustedCerts", trustedPemCert); authConf.put("trustedCertsFile", "/path/to/cert.pem"); plugin = new JWTAuthPlugin(); - expectThrows( - SolrException.class, - () -> { - plugin.init(authConf); - }); + expectThrows(SolrException.class, () -> plugin.init(authConf)); } @Test @@ -656,12 +652,11 @@ public void parseInvalidPemToX509() { expectThrows( SolrException.class, CertificateException.class, - () -> { - CryptoKeys.parseX509Certs( - new ByteArrayInputStream( - ("-----BEGIN CERTIFICATE-----\n" + "foo\n" + "-----END CERTIFICATE-----\n") - .getBytes(StandardCharsets.UTF_8))); - }); + () -> + CryptoKeys.parseX509Certs( + new ByteArrayInputStream( + ("-----BEGIN CERTIFICATE-----\n" + "foo\n" + "-----END CERTIFICATE-----\n") + .getBytes(StandardCharsets.UTF_8)))); } @Test diff --git a/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTIssuerConfigTest.java b/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTIssuerConfigTest.java index 57c0261b897..6416b60c61c 100644 --- a/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTIssuerConfigTest.java +++ b/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTIssuerConfigTest.java @@ -37,6 +37,7 @@ import org.junit.Test; import org.noggit.JSONUtil; +@SuppressWarnings("HttpUrlsUsage") public class JWTIssuerConfigTest extends SolrTestCase { private JWTIssuerConfig testIssuer; private Map testIssuerConfigMap; @@ -153,7 +154,7 @@ public void jwksUrlwithHttpBehaviors() { JWTIssuerConfig issuerConfig = new JWTIssuerConfig(issuerConfigMap); - SolrException e = expectThrows(SolrException.class, () -> issuerConfig.getHttpsJwks()); + SolrException e = expectThrows(SolrException.class, issuerConfig::getHttpsJwks); assertEquals(400, e.code()); assertEquals( "jwksUrl is using http protocol. HTTPS required for IDP communication. Please use SSL or start your nodes with -Dsolr.auth.jwt.allowOutboundHttp=true to allow HTTP for test purposes.", diff --git a/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTVerificationkeyResolverTest.java b/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTVerificationkeyResolverTest.java index 216154efbb6..3406e439dbb 100644 --- a/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTVerificationkeyResolverTest.java +++ b/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTVerificationkeyResolverTest.java @@ -43,6 +43,7 @@ import org.mockito.junit.MockitoRule; /** Tests the multi jwks resolver that can fetch keys from multiple JWKs */ +@SuppressWarnings("ArraysAsListWithZeroOrOneArgument") public class JWTVerificationkeyResolverTest extends SolrTestCaseJ4 { private JWTVerificationkeyResolver resolver; @@ -117,6 +118,7 @@ public void notFoundKey() throws JoseException { resolver.resolveKey(k5.getJws(), null); } + @SuppressWarnings("NewClassNamingConvention") public static class KeyHolder { private final RsaJsonWebKey key; private final String kid; diff --git a/solr/modules/ltr/src/java/org/apache/solr/ltr/LTRRescorer.java b/solr/modules/ltr/src/java/org/apache/solr/ltr/LTRRescorer.java index 64070349bfa..19ac717bdda 100644 --- a/solr/modules/ltr/src/java/org/apache/solr/ltr/LTRRescorer.java +++ b/solr/modules/ltr/src/java/org/apache/solr/ltr/LTRRescorer.java @@ -218,7 +218,7 @@ protected static boolean scoreSingleHit( LTRScoringQuery.ModelWeight.ModelScorer scorer, ScoreDoc[] reranked) throws IOException { - /** + /* * Scorer for a LTRScoringQuery.ModelWeight should never be null since we always have to call * score even if no feature scorers match, since a model might use that info to return a * non-zero score. Same applies for the case of advancing a diff --git a/solr/modules/ltr/src/java/org/apache/solr/ltr/feature/FieldValueFeature.java b/solr/modules/ltr/src/java/org/apache/solr/ltr/feature/FieldValueFeature.java index bab34fa8d3e..583e080954c 100644 --- a/solr/modules/ltr/src/java/org/apache/solr/ltr/feature/FieldValueFeature.java +++ b/solr/modules/ltr/src/java/org/apache/solr/ltr/feature/FieldValueFeature.java @@ -129,6 +129,17 @@ public FieldValueFeatureWeight( } } + /** + * Override this method in sub classes that wish to use not an absolute time but an interval + * such as document age or remaining shelf life relative to a specific date or relative to now. + * + * @param val value of the field + * @return value after transformation + */ + protected long readNumericDocValuesDate(long val) { + return val; + } + /** * Return a FeatureScorer that uses docValues or storedFields if no docValues are present * @@ -261,6 +272,8 @@ private float readNumericDocValues() throws IOException { } else if (NumberType.DOUBLE.equals(numberType)) { // handle double value conversion return (float) Double.longBitsToDouble(docValues.longValue()); + } else if (NumberType.DATE.equals(numberType)) { + return readNumericDocValuesDate(docValues.longValue()); } // just take the long value return docValues.longValue(); diff --git a/solr/modules/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java b/solr/modules/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java index 8bed4efe7ed..b10d9d7f952 100644 --- a/solr/modules/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java +++ b/solr/modules/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java @@ -17,8 +17,10 @@ package org.apache.solr.ltr.feature; import java.io.IOException; +import java.time.Instant; import java.util.LinkedHashMap; import java.util.Map; +import java.util.concurrent.TimeUnit; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -692,6 +694,113 @@ public void testThatDateValuesAreCorrectlyParsed() throws Exception { } } + public static class RelativeDateFieldValueFeature extends FieldValueFeature { + + private boolean since = false; + private boolean until = false; + + public boolean getSince() { + return this.since; + } + + public void setSince(boolean since) { + this.since = since; + } + + public boolean getUntil() { + return this.until; + } + + public void setUntil(boolean until) { + this.until = until; + } + + public RelativeDateFieldValueFeature(String name, Map params) { + super(name, params); + } + + @Override + protected void validate() throws FeatureException { + if (since != until) { + return; + } + throw new FeatureException( + getClass().getSimpleName() + ": exactly one of 'since' and 'until' must be provided"); + } + + @Override + public FeatureWeight createWeight( + IndexSearcher searcher, + boolean needsScores, + SolrQueryRequest request, + Query originalQuery, + Map efi) + throws IOException { + return new FieldValueFeatureWeight(searcher, request, originalQuery, efi) { + private final long timeZero = Instant.parse("2000-01-01T00:00:00.000Z").toEpochMilli(); + + @Override + public long readNumericDocValuesDate(long val) { + if (since) return TimeUnit.MILLISECONDS.toMinutes(val - this.timeZero); + if (until) return TimeUnit.MILLISECONDS.toMinutes(this.timeZero - val); + return 0; + } + }; + } + } + + @Test + public void testRelativeDateFieldValueFeature() throws Exception { + final String field = "dvDateField"; + for (boolean since : new boolean[] {false, true}) { + final String[][] inputsAndTests = { + new String[] { + "2000-01-01T00:00:00.000Z", + "/response/docs/[0]/=={'[fv]':'" + + FeatureLoggerTestUtils.toFeatureVector(field, "0.0") + + "'}" + }, + new String[] { + "2000-01-01T00:01:02.003Z", + "/response/docs/[0]/=={'[fv]':'" + + FeatureLoggerTestUtils.toFeatureVector(field, (since ? "1.0" : "-1.0")) + + "'}" + }, + new String[] { + "2000-01-01T01:02:03.004Z", + "/response/docs/[0]/=={'[fv]':'" + + FeatureLoggerTestUtils.toFeatureVector(field, (since ? "62.0" : "-62.0")) + + "'}" + } + }; + + final String fstore = "testRelativeDateFieldValueFeature" + field + "_" + since; + final String model = fstore + "-model"; + loadFeature( + field, + RelativeDateFieldValueFeature.class.getName(), + fstore, + "{\"field\":\"" + field + "\", \"" + (since ? "since" : "until") + "\": true}"); + loadModel( + model, + LinearModel.class.getName(), + new String[] {field}, + fstore, + "{\"weights\":{\"" + field + "\":1.0}}"); + + for (String[] inputAndTest : inputsAndTests) { + assertU(adoc("id", "21", field, inputAndTest[0])); + assertU(commit()); + + final SolrQuery query = new SolrQuery("id:21"); + query.add("rq", "{!ltr model=" + model + " reRankDocs=4}"); + query.add("fl", "[fv]"); + + assertJQ("/query" + query.toQueryString(), inputAndTest[1]); + } + } + } + /** * This class is used to track which specific FieldValueFeature is used so that we can test, * whether the fallback mechanism works correctly. diff --git a/solr/modules/opentelemetry/src/java/org/apache/solr/opentelemetry/OtelTracerConfigurator.java b/solr/modules/opentelemetry/src/java/org/apache/solr/opentelemetry/OtelTracerConfigurator.java index 221cb571b7b..cdddd55c46d 100644 --- a/solr/modules/opentelemetry/src/java/org/apache/solr/opentelemetry/OtelTracerConfigurator.java +++ b/solr/modules/opentelemetry/src/java/org/apache/solr/opentelemetry/OtelTracerConfigurator.java @@ -65,7 +65,9 @@ void prepareConfiguration(NamedList args) { setDefaultIfNotConfigured("OTEL_EXPORTER_OTLP_PROTOCOL", "grpc"); setDefaultIfNotConfigured("OTEL_TRACES_SAMPLER", "parentbased_always_on"); setDefaultIfNotConfigured("OTEL_PROPAGATORS", "tracecontext,baggage"); - addOtelResourceAttributes(Map.of("host.name", System.getProperty("host"))); + if (System.getProperty("host") != null) { + addOtelResourceAttributes(Map.of("host.name", System.getProperty("host"))); + } final String currentConfig = getCurrentOtelConfigAsString(); log.info("OpenTelemetry tracer enabled with configuration: {}", currentConfig); diff --git a/solr/packaging/test/test_ssl.bats b/solr/packaging/test/test_ssl.bats index 2e9b01e263d..10266768b7e 100644 --- a/solr/packaging/test/test_ssl.bats +++ b/solr/packaging/test/test_ssl.bats @@ -58,6 +58,11 @@ teardown() { run solr api -get "https://localhost:${SOLR_PORT}/solr/test/select?q=*:*" assert_output --partial '"numFound":0' + + run curl --cacert "$ssl_dir/solr-ssl.pem" "https://localhost:${SOLR_PORT}/solr/test/select?q=*:*" + assert_output --partial '"numFound":0' + + run ! curl "https://localhost:${SOLR_PORT}/solr/test/select?q=*:*" } @test "use different hostname when not checking peer-name" { @@ -81,7 +86,7 @@ teardown() { export SOLR_SSL_NEED_CLIENT_AUTH=false export SOLR_SSL_WANT_CLIENT_AUTH=false export SOLR_SSL_CHECK_PEER_NAME=false - export SOLR_HOST=localhost + export SOLR_HOST=127.0.0.1 solr start -c solr assert --started https://localhost:${SOLR_PORT}/solr --timeout 5000 @@ -89,11 +94,18 @@ teardown() { run solr create -c test -s 2 assert_output --partial "Created collection 'test'" + run solr api -get "https://localhost:${SOLR_PORT}/solr/test/select?q=*:*" + assert_output --partial '"numFound":0' + + # Just test that curl can connect via insecure or via a custom host header + run curl --http2 --cacert "$ssl_dir/solr-ssl.pem" "https://localhost:${SOLR_PORT}/solr/test/select?q=*:*" + assert_output --partial 'no alternative certificate subject name matches target host name' + # Just test that curl can connect via insecure or via a custom host header run curl --http2 --cacert "$ssl_dir/solr-ssl.pem" -k "https://localhost:${SOLR_PORT}/solr/test/select?q=*:*" assert_output --partial '"numFound":0' - run curl --http2 --cacert "$ssl_dir/solr-ssl.pem" -H "Host: test.solr.apache.org" "https://127.0.0.1:${SOLR_PORT}/solr/test/select?q=*:*" + run curl --http2 --cacert "$ssl_dir/solr-ssl.pem" --resolve "test.solr.apache.org:${SOLR_PORT}:127.0.0.1" "https://test.solr.apache.org:${SOLR_PORT}/solr/test/select?q=*:*" assert_output --partial '"numFound":0' # This is a client setting, so we don't need to restart Solr to make sure that it fails @@ -102,6 +114,18 @@ teardown() { # This should fail the peername check run ! solr api -get "https://localhost:${SOLR_PORT}/solr/test/select?q=*:*" assert_output --partial 'Server refused connection' + + # Restart the server enabling the SNI hostcheck + export SOLR_SSL_CHECK_PEER_NAME=false + export SOLR_OPTS="${SOLR_OPTS} -Dsolr.jetty.ssl.sniHostCheck=true" + solr restart -c + # This should fail the SNI Hostname check + run ! solr api -verbose -get "https://localhost:${SOLR_PORT}/solr/admin/collections?action=CLUSTERSTATUS" + assert_output --partial 'Invalid SNI' + + # Using the right hostname should not fail the SNI Hostname check + run curl --http2 --cacert "$ssl_dir/solr-ssl.pem" --resolve "test.solr.apache.org:${SOLR_PORT}:127.0.0.1" "https://test.solr.apache.org:${SOLR_PORT}/solr/admin/collections?action=CLUSTERSTATUS" + assert_output --partial '"urlScheme":"https"' } @test "start solr with ssl and auth" { diff --git a/solr/prometheus-exporter/build.gradle b/solr/prometheus-exporter/build.gradle index a64fa08147d..66beb70aed7 100644 --- a/solr/prometheus-exporter/build.gradle +++ b/solr/prometheus-exporter/build.gradle @@ -56,8 +56,8 @@ dependencies { testImplementation project(':solr:test-framework') testImplementation 'com.carrotsearch.randomizedtesting:randomizedtesting-runner' testImplementation 'junit:junit' + testImplementation 'org.apache.lucene:lucene-test-framework' - testImplementation 'commons-io:commons-io' testImplementation 'org.apache.httpcomponents:httpclient' testImplementation 'org.apache.httpcomponents:httpcore' } diff --git a/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrClientFactory.java b/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrClientFactory.java index f34aad926e3..c1c4a891593 100644 --- a/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrClientFactory.java +++ b/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrClientFactory.java @@ -17,6 +17,7 @@ package org.apache.solr.prometheus.exporter; +import java.util.List; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -28,37 +29,43 @@ public class SolrClientFactory { - private PrometheusExporterSettings settings; + private final PrometheusExporterSettings settings; + private final SolrScrapeConfiguration configuration; - public SolrClientFactory(PrometheusExporterSettings settings) { + public SolrClientFactory( + PrometheusExporterSettings settings, SolrScrapeConfiguration configuration) { this.settings = settings; + this.configuration = configuration; } - public Http2SolrClient createStandaloneSolrClient(String solrHost) { - Http2SolrClient http2SolrClient = + private static Http2SolrClient.Builder newHttp2SolrClientBuilder( + String solrHost, PrometheusExporterSettings settings, SolrScrapeConfiguration configuration) { + var builder = new Http2SolrClient.Builder(solrHost) .withIdleTimeout(settings.getHttpReadTimeout(), TimeUnit.MILLISECONDS) .withConnectionTimeout(settings.getHttpConnectionTimeout(), TimeUnit.MILLISECONDS) - .withResponseParser(new NoOpResponseParser("json")) - .build(); + .withResponseParser(new NoOpResponseParser("json")); + if (configuration.getBasicAuthUser() != null) { + builder.withBasicAuthCredentials( + configuration.getBasicAuthUser(), configuration.getBasicAuthPwd()); + } + return builder; + } - return http2SolrClient; + public Http2SolrClient createStandaloneSolrClient(String solrHost) { + return newHttp2SolrClientBuilder(solrHost, settings, configuration).build(); } public CloudSolrClient createCloudSolrClient(String zookeeperConnectionString) { ConnectStringParser parser = new ConnectStringParser(zookeeperConnectionString); + List zkHosts = + parser.getServerAddresses().stream() + .map(address -> address.getHostString() + ":" + address.getPort()) + .collect(Collectors.toList()); CloudSolrClient client = - new CloudHttp2SolrClient.Builder( - parser.getServerAddresses().stream() - .map(address -> address.getHostString() + ":" + address.getPort()) - .collect(Collectors.toList()), - Optional.ofNullable(parser.getChrootPath())) - .withInternalClientBuilder( - new Http2SolrClient.Builder() - .withIdleTimeout(settings.getHttpReadTimeout(), TimeUnit.MILLISECONDS) - .withConnectionTimeout( - settings.getHttpConnectionTimeout(), TimeUnit.MILLISECONDS)) + new CloudHttp2SolrClient.Builder(zkHosts, Optional.ofNullable(parser.getChrootPath())) + .withInternalClientBuilder(newHttp2SolrClientBuilder(null, settings, configuration)) .withResponseParser(new NoOpResponseParser("json")) .build(); diff --git a/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java b/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java index 2cad046fa44..946df18cd78 100644 --- a/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java +++ b/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java @@ -96,6 +96,13 @@ public class SolrExporter { + ARG_NUM_THREADS_DEFAULT + "."; + private static final String[] ARG_CREDENTIALS_FLAGS = {"-u", "--credentials"}; + private static final String ARG_CREDENTIALS_METAVAR = "CREDENTIALS"; + private static final String ARG_CREDENTIALS_DEST = "credentials"; + private static final String ARG_CREDENTIALS_DEFAULT = ""; + private static final String ARG_CREDENTIALS_HELP = + "Specify the credentials in the format username:password. Example: --credentials solr:SolrRocks"; + public static final CollectorRegistry defaultRegistry = new CollectorRegistry(); private final int port; @@ -161,7 +168,7 @@ private SolrScraper createScraper( SolrScrapeConfiguration configuration, PrometheusExporterSettings settings, String clusterId) { - SolrClientFactory factory = new SolrClientFactory(settings); + SolrClientFactory factory = new SolrClientFactory(settings, configuration); switch (configuration.getType()) { case STANDALONE: @@ -242,6 +249,14 @@ public static void main(String[] args) { .setDefault(ARG_CLUSTER_ID_DEFAULT) .help(ARG_CLUSTER_ID_HELP); + parser + .addArgument(ARG_CREDENTIALS_FLAGS) + .metavar(ARG_CREDENTIALS_METAVAR) + .dest(ARG_CREDENTIALS_DEST) + .type(String.class) + .setDefault(ARG_CREDENTIALS_DEFAULT) + .help(ARG_CREDENTIALS_HELP); + try { Namespace res = parser.parseArgs(args); @@ -266,6 +281,14 @@ public static void main(String[] args) { clusterId = defaultClusterId; } + if (!res.getString(ARG_CREDENTIALS_DEST).isEmpty()) { + String credentials = res.getString(ARG_CREDENTIALS_DEST); + if (credentials.indexOf(':') > 0) { + String[] credentialsArray = credentials.split(":", 2); + scrapeConfiguration.withBasicAuthCredentials(credentialsArray[0], credentialsArray[1]); + } + } + SolrExporter solrExporter = new SolrExporter( port, diff --git a/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrScrapeConfiguration.java b/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrScrapeConfiguration.java index a1e1fbdf27b..f61447b810d 100644 --- a/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrScrapeConfiguration.java +++ b/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrScrapeConfiguration.java @@ -29,6 +29,8 @@ public enum ConnectionType { private final ConnectionType type; private final String zookeeperConnectionString; private final String solrHost; + private String basicAuthUser; + private String basicAuthPwd; private SolrScrapeConfiguration( ConnectionType type, String zookeeperConnectionString, String solrHost) { @@ -57,6 +59,20 @@ public static SolrScrapeConfiguration standalone(String solrHost) { return new SolrScrapeConfiguration(ConnectionType.STANDALONE, null, solrHost); } + public SolrScrapeConfiguration withBasicAuthCredentials(String user, String password) { + this.basicAuthUser = user; + this.basicAuthPwd = password; + return this; + } + + public String getBasicAuthUser() { + return basicAuthUser; + } + + public String getBasicAuthPwd() { + return basicAuthPwd; + } + @Override public String toString() { if (type == ConnectionType.CLOUD) { diff --git a/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrCloudScraperTest.java b/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrCloudScraperTest.java index f3839a1d8cd..2ebc3752cae 100644 --- a/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrCloudScraperTest.java +++ b/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrCloudScraperTest.java @@ -21,15 +21,11 @@ import io.prometheus.client.Collector; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.stream.Collectors; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.impl.NoOpResponseParser; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; @@ -42,6 +38,7 @@ import org.apache.solr.prometheus.exporter.MetricsConfiguration; import org.apache.solr.prometheus.exporter.PrometheusExporterSettings; import org.apache.solr.prometheus.exporter.SolrClientFactory; +import org.apache.solr.prometheus.exporter.SolrScrapeConfiguration; import org.apache.solr.prometheus.utils.Helpers; import org.junit.After; import org.junit.Before; @@ -55,16 +52,11 @@ public class SolrCloudScraperTest extends PrometheusExporterTestBase { private ExecutorService executor; private SolrCloudScraper createSolrCloudScraper() { - var solrClient = - new CloudSolrClient.Builder( - Collections.singletonList(cluster.getZkServer().getZkAddress()), Optional.empty()) - .withResponseParser(new NoOpResponseParser("json")) - .build(); - - solrClient.connect(); - - SolrClientFactory factory = new SolrClientFactory(PrometheusExporterSettings.builder().build()); - + PrometheusExporterSettings settings = PrometheusExporterSettings.builder().build(); + SolrScrapeConfiguration scrapeConfiguration = + SolrScrapeConfiguration.standalone(cluster.getZkServer().getZkAddress()); + SolrClientFactory factory = new SolrClientFactory(settings, scrapeConfiguration); + var solrClient = factory.createCloudSolrClient(cluster.getZkServer().getZkAddress()); return new SolrCloudScraper(solrClient, executor, factory, "test"); } @@ -93,10 +85,7 @@ public void setUp() throws Exception { public void tearDown() throws Exception { super.tearDown(); IOUtils.closeQuietly(solrCloudScraper); - if (null != executor) { - executor.shutdownNow(); - executor = null; - } + ExecutorUtil.shutdownNowAndAwaitTermination(executor); } @Test diff --git a/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrStandaloneScraperBasicAuthTest.java b/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrStandaloneScraperBasicAuthTest.java new file mode 100644 index 00000000000..b08e771e7d3 --- /dev/null +++ b/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrStandaloneScraperBasicAuthTest.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.prometheus.scraper; + +import io.prometheus.client.Collector; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.concurrent.ExecutorService; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.client.solrj.impl.Http2SolrClient; +import org.apache.solr.common.util.ExecutorUtil; +import org.apache.solr.common.util.IOUtils; +import org.apache.solr.common.util.SolrNamedThreadFactory; +import org.apache.solr.prometheus.PrometheusExporterTestBase; +import org.apache.solr.prometheus.exporter.MetricsConfiguration; +import org.apache.solr.prometheus.exporter.PrometheusExporterSettings; +import org.apache.solr.prometheus.exporter.SolrClientFactory; +import org.apache.solr.prometheus.exporter.SolrScrapeConfiguration; +import org.apache.solr.prometheus.utils.Helpers; +import org.apache.solr.util.SolrJettyTestRule; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; + +public class SolrStandaloneScraperBasicAuthTest extends SolrTestCaseJ4 { + + @ClassRule public static final SolrJettyTestRule solrRule = new SolrJettyTestRule(); + + private static Http2SolrClient solrClient; + private static MetricsConfiguration configuration; + private static SolrStandaloneScraper solrScraper; + private static ExecutorService executor; + + private static String user = "solr"; + private static String pass = "SolrRocks"; + private static String securityJson = + "{\n" + + "\"authentication\":{ \n" + + " \"blockUnknown\": true, \n" + + " \"class\":\"solr.BasicAuthPlugin\",\n" + + " \"credentials\":{\"solr\":\"IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c=\"}, \n" + + " \"realm\":\"My Solr users\", \n" + + " \"forwardCredentials\": false \n" + + "},\n" + + "\"authorization\":{\n" + + " \"class\":\"solr.RuleBasedAuthorizationPlugin\",\n" + + " \"permissions\":[{\"name\":\"security-edit\",\n" + + " \"role\":\"admin\"}],\n" + + " \"user-role\":{\"solr\":\"admin\"}\n" + + "}}"; + + @BeforeClass + public static void setupSolrHome() throws Exception { + Path solrHome = LuceneTestCase.createTempDir(); + Files.write(solrHome.resolve("security.json"), securityJson.getBytes(StandardCharsets.UTF_8)); + solrRule.startSolr(solrHome); + + Path configSet = LuceneTestCase.createTempDir(); + SolrStandaloneScraperTest.createConf(configSet); + solrRule + .newCollection() + .withConfigSet(configSet.toString()) + .withBasicAuthCredentials(user, pass) + .create(); + + configuration = + Helpers.loadConfiguration("conf/prometheus-solr-exporter-scraper-test-config.xml"); + + PrometheusExporterSettings settings = PrometheusExporterSettings.builder().build(); + SolrScrapeConfiguration scrapeConfiguration = + SolrScrapeConfiguration.standalone(solrRule.getBaseUrl()) + .withBasicAuthCredentials(user, pass); + solrClient = + new SolrClientFactory(settings, scrapeConfiguration) + .createStandaloneSolrClient(solrRule.getBaseUrl()); + executor = + ExecutorUtil.newMDCAwareFixedThreadPool( + 25, new SolrNamedThreadFactory("solr-cloud-scraper-tests")); + solrScraper = new SolrStandaloneScraper(solrClient, executor, "test"); + + Helpers.indexAllDocs(solrClient); + } + + @AfterClass + public static void cleanup() throws Exception { + // scraper also closes the client + IOUtils.closeQuietly(solrScraper); + ExecutorUtil.shutdownNowAndAwaitTermination(executor); + } + + @Test + public void search() throws Exception { + List samples = + solrScraper.search(configuration.getSearchConfiguration().get(0)).asList(); + + assertEquals(1, samples.size()); + + Collector.MetricFamilySamples sampleFamily = samples.get(0); + assertEquals("solr_facets_category", sampleFamily.name); + assertEquals(PrometheusExporterTestBase.FACET_VALUES.size(), sampleFamily.samples.size()); + + for (Collector.MetricFamilySamples.Sample sample : sampleFamily.samples) { + assertEquals( + PrometheusExporterTestBase.FACET_VALUES.get(sample.labelValues.get(0)), + sample.value, + 0.001); + } + } +} diff --git a/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrStandaloneScraperTest.java b/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrStandaloneScraperTest.java index 1db0142c2ea..7f756c0fb3e 100644 --- a/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrStandaloneScraperTest.java +++ b/solr/prometheus-exporter/src/test/org/apache/solr/prometheus/scraper/SolrStandaloneScraperTest.java @@ -18,27 +18,34 @@ package org.apache.solr.prometheus.scraper; import io.prometheus.client.Collector; -import java.io.File; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; -import org.apache.commons.io.FileUtils; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.impl.Http2SolrClient; -import org.apache.solr.client.solrj.impl.NoOpResponseParser; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.prometheus.PrometheusExporterTestBase; import org.apache.solr.prometheus.collector.MetricSamples; import org.apache.solr.prometheus.exporter.MetricsConfiguration; +import org.apache.solr.prometheus.exporter.PrometheusExporterSettings; +import org.apache.solr.prometheus.exporter.SolrClientFactory; +import org.apache.solr.prometheus.exporter.SolrScrapeConfiguration; import org.apache.solr.prometheus.utils.Helpers; -import org.apache.solr.util.RestTestBase; +import org.apache.solr.util.SolrJettyTestRule; import org.junit.AfterClass; import org.junit.BeforeClass; +import org.junit.ClassRule; import org.junit.Test; -public class SolrStandaloneScraperTest extends RestTestBase { +public class SolrStandaloneScraperTest extends SolrTestCaseJ4 { + + @ClassRule public static final SolrJettyTestRule solrRule = new SolrJettyTestRule(); private static MetricsConfiguration configuration; private static SolrStandaloneScraper solrScraper; @@ -47,42 +54,45 @@ public class SolrStandaloneScraperTest extends RestTestBase { @BeforeClass public static void setupBeforeClass() throws Exception { - File tmpSolrHome = createTempDir().toFile(); - tmpSolrHome.deleteOnExit(); - - FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile()); - - initCore("solrconfig.xml", "managed-schema"); + solrRule.startSolr(LuceneTestCase.createTempDir()); - createJettyAndHarness( - tmpSolrHome.getAbsolutePath(), "solrconfig.xml", "managed-schema", "/solr", true, null); + Path configSet = LuceneTestCase.createTempDir(); + createConf(configSet); + solrRule.newCollection().withConfigSet(configSet.toString()).create(); + PrometheusExporterSettings settings = PrometheusExporterSettings.builder().build(); + SolrScrapeConfiguration scrapeConfiguration = + SolrScrapeConfiguration.standalone(solrRule.getBaseUrl()); + solrClient = + new SolrClientFactory(settings, scrapeConfiguration) + .createStandaloneSolrClient(solrRule.getBaseUrl()); executor = ExecutorUtil.newMDCAwareFixedThreadPool( 25, new SolrNamedThreadFactory("solr-cloud-scraper-tests")); configuration = Helpers.loadConfiguration("conf/prometheus-solr-exporter-scraper-test-config.xml"); - - solrClient = - new Http2SolrClient.Builder(restTestHarness.getAdminURL()) - .withResponseParser(new NoOpResponseParser("json")) - .build(); solrScraper = new SolrStandaloneScraper(solrClient, executor, "test"); Helpers.indexAllDocs(solrClient); } + public static void createConf(Path configSet) throws IOException { + Path subHome = configSet.resolve("conf"); + Files.createDirectories(subHome); + + Path top = SolrTestCaseJ4.TEST_PATH().resolve("collection1").resolve("conf"); + Files.copy(top.resolve("managed-schema.xml"), subHome.resolve("schema.xml")); + Files.copy(top.resolve("solrconfig.xml"), subHome.resolve("solrconfig.xml")); + + Files.copy(top.resolve("stopwords.txt"), subHome.resolve("stopwords.txt")); + Files.copy(top.resolve("synonyms.txt"), subHome.resolve("synonyms.txt")); + } + @AfterClass - public static void cleanUp() throws Exception { + public static void cleanup() throws Exception { + // scraper also closes the client IOUtils.closeQuietly(solrScraper); - IOUtils.closeQuietly(solrClient); - cleanUpHarness(); - if (null != executor) { - executor.shutdownNow(); - executor = null; - } - solrScraper = null; - solrClient = null; + ExecutorUtil.shutdownNowAndAwaitTermination(executor); } @Test @@ -107,8 +117,7 @@ public void pingCores() throws Exception { assertEquals(1, samples.samples.size()); assertEquals(1.0, samples.samples.get(0).value, 0.001); assertEquals(List.of("base_url", "cluster_id"), samples.samples.get(0).labelNames); - assertEquals( - List.of(restTestHarness.getAdminURL(), "test"), samples.samples.get(0).labelValues); + assertEquals(List.of(solrRule.getBaseUrl(), "test"), samples.samples.get(0).labelValues); } @Test @@ -127,7 +136,7 @@ public void metricsForHost() throws Exception { assertEquals(1, metricsByHost.size()); List replicaSamples = - metricsByHost.get(restTestHarness.getAdminURL()).asList(); + metricsByHost.get(solrRule.getBaseUrl()).asList(); assertEquals(1, replicaSamples.size()); diff --git a/solr/server/solr/configsets/_default/conf/solrconfig.xml b/solr/server/solr/configsets/_default/conf/solrconfig.xml index ecd3693f642..d68a091f163 100644 --- a/solr/server/solr/configsets/_default/conf/solrconfig.xml +++ b/solr/server/solr/configsets/_default/conf/solrconfig.xml @@ -35,7 +35,7 @@ that you fully re-index after changing this setting as it can affect both how text is indexed and queried. --> - 9.7 + 9.8 - 9.7 + 9.8