diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 2253d5034c4..9c36511438b 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -36,7 +36,8 @@ Improvements Optimizations --------------------- -(No changes) +* SOLR-17568: The CLI bin/solr export tool now contacts the appropriate nodes directly for data instead of proxying through one. + (David Smiley) Bug Fixes --------------------- @@ -93,6 +94,12 @@ Deprecation Removals * SOLR-17564: Remove code in Assign used for backwards compatibility with Collections created prior to 7.0 (Paul McArthur) +* SOLR-17576: Remove deprecated master/slave option language from ReplicationHandler. (Eric Pugh) + +* SOLR-16781: Support for `` directives (used in solrconfig.xml to add JARs on a core-by-core basis) has been removed. Users + looking for similar functionality can use Solr's package manager. Users that don't need to vary JAR access on a per-core basis + have many options, including the `` tag and directly modifying Solr's classpath prior to JVM startup. (Jason Gerlowski) + * SOLR-17540: Removed the Hadoop Auth module, and thus Kerberos authentication and other exotic options. (Eric Pugh) Dependency Upgrades @@ -132,6 +139,8 @@ Other Changes * SOLR-16903: Update CLI tools to use java.nio.file.Path instead of java.io.File (Andrey Bozhko) +* SOLR-17568: SolrCloud no longer reroutes/proxies a core request to another node if not found locally. (David Smiley) + ================== 9.8.0 ================== New Features --------------------- @@ -229,6 +238,10 @@ Bug Fixes * SOLR-16976: Remove log4j-jul jar and use slf4j bridge for JUL to prevent exception from being logged when remote JMX is enabled (Shawn Heisey, Stephen Zhou, Eric Pugh, Christine Poerschke, David Smiley) +* SOLR-17575: Fixed broken backwards compatibility with the legacy "langid.whitelist" config in Solr Langid. (Jan Høydahl, Alexander Zagniotov) + +* SOLR-17574: Fix AllowListUrlChecker when liveNodes changes. Remove ClusterState.getHostAllowList (Bruno Roustant, David Smiley) + Dependency Upgrades --------------------- (No changes) @@ -258,6 +271,8 @@ led to the suppression of exceptions. (Andrey Bozhko) * SOLR-17556: "home" and "data" directories used by Solr examples have been updated to align with documented best practices. (Eric Pugh, Houston Putman) +* SOLR-17577: Remove "solr.indexfetcher.sotimeout" system property that was for optimizing replication tests. It was disabled, but not removed. (Eric Pugh) + ================== 9.7.1 ================== Bug Fixes --------------------- diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd index 7dbd249116c..1dca1a9e81c 100755 --- a/solr/bin/solr.cmd +++ b/solr/bin/solr.cmd @@ -1170,11 +1170,9 @@ for %%a in (%*) do ( if "!arg:~0,1!" equ "-" set "option=!arg!" ) else ( set "option!option!=%%a" - if "!option!" equ "-s" set "SOLR_HOME=%%a" if "!option!" equ "--solr-home" set "SOLR_HOME=%%a" - if "!option!" equ "-d" set "SOLR_SERVER_DIR=%%a" - if "!option!" equ "--server-dir" set "SOLR_SERVER_DIR=%%a" - if not "!option!" equ "-s" if not "!option!" equ "--solr-home" if not "!option!" equ "-d" if not "!option!" equ "--server-dir" ( + if "!option!" equ "--server-dir" set "SOLR_SERVER_DIR=%%a" + if not "!option!" equ "--solr-home" if not "!option!" equ "--server-dir" ( set "AUTH_PARAMS=!AUTH_PARAMS! !option! %%a" ) set "option=" diff --git a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java index 6912af39510..d156710a675 100644 --- a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java +++ b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java @@ -165,7 +165,7 @@ public void call(SolrQueryRequest req, SolrQueryResponse rsp) { core = getCoreByCollection(collectionName, isPreferLeader); if (core == null) { // this collection exists , but this node does not have a replica for that collection - extractRemotePath(collectionName, collectionName); + extractRemotePath(collectionName); if (action == REMOTEQUERY) { action = ADMIN_OR_REMOTEQUERY; coreUrl = coreUrl.replace("/solr/", "/solr/____v2/c/"); diff --git a/solr/core/src/java/org/apache/solr/cli/ExportTool.java b/solr/core/src/java/org/apache/solr/cli/ExportTool.java index 59bca949dc1..b5a58377ba3 100644 --- a/solr/core/src/java/org/apache/solr/cli/ExportTool.java +++ b/solr/core/src/java/org/apache/solr/cli/ExportTool.java @@ -660,8 +660,8 @@ class CoreHandler { } boolean exportDocsFromCore() throws IOException, SolrServerException { - - try (SolrClient client = CLIUtils.getSolrClient(baseurl, credentials)) { + // reference the replica's node URL, not the baseUrl in scope, which could be anywhere + try (SolrClient client = CLIUtils.getSolrClient(replica.getBaseUrl(), credentials)) { expectedDocs = getDocCount(replica.getCoreName(), client, query); QueryRequest request; ModifiableSolrParams params = new ModifiableSolrParams(); diff --git a/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java b/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java index 783d58e0a57..50c0162881a 100644 --- a/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java +++ b/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java @@ -647,6 +647,11 @@ protected Map startSolr( if (!isWindows && cwdPath.length() > 1 && solrHome.startsWith(cwdPath)) solrHome = solrHome.substring(cwdPath.length() + 1); + final var syspropArg = + ("techproducts".equals(cli.getOptionValue(EXAMPLE_OPTION))) + ? "-Dsolr.modules=clustering,extraction,langid,ltr,scripting -Dsolr.ltr.enabled=true -Dsolr.clustering.enabled=true" + : ""; + String startCmd = String.format( Locale.ROOT, @@ -661,7 +666,8 @@ protected Map startSolr( forceArg, verboseArg, extraArgs, - jvmOptsArg); + jvmOptsArg, + syspropArg); startCmd = startCmd.replaceAll("\\s+", " ").trim(); // for pretty printing echo("\nStarting up Solr on port " + port + " using command:"); diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java index 53160cd75ee..a9f9b417abf 100644 --- a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java +++ b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java @@ -272,7 +272,7 @@ public final ConfigSet loadConfigSet(CoreDescriptor dcore) { NamedList properties = loadConfigSetProperties(dcore, coreLoader); boolean trusted = isConfigSetTrusted(coreLoader); - SolrConfig solrConfig = createSolrConfig(dcore, coreLoader, trusted); + SolrConfig solrConfig = createSolrConfig(dcore, coreLoader); return new ConfigSet( configSetName(dcore), solrConfig, @@ -314,13 +314,12 @@ public ConfigSetService(SolrResourceLoader loader, boolean shareSchema) { * * @param cd the core's CoreDescriptor * @param loader the core's resource loader - * @param isTrusted is the configset trusted? * @return a SolrConfig object */ - protected SolrConfig createSolrConfig( - CoreDescriptor cd, SolrResourceLoader loader, boolean isTrusted) throws IOException { + protected SolrConfig createSolrConfig(CoreDescriptor cd, SolrResourceLoader loader) + throws IOException { return SolrConfig.readFromResourceLoader( - loader, cd.getConfigName(), isTrusted, cd.getSubstitutableProperties()); + loader, cd.getConfigName(), cd.getSubstitutableProperties()); } /** diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java index b3c95a23fe7..e7adaf8d2f6 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java +++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java @@ -17,7 +17,6 @@ package org.apache.solr.core; import static org.apache.solr.common.params.CommonParams.NAME; -import static org.apache.solr.common.params.CommonParams.PATH; import static org.apache.solr.core.ConfigOverlay.ZNODEVER; import static org.apache.solr.core.SolrConfig.PluginOpts.LAZY; import static org.apache.solr.core.SolrConfig.PluginOpts.MULTI_OK; @@ -31,7 +30,6 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.lang.invoke.MethodHandles; -import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; @@ -96,7 +94,7 @@ /** * Provides a static reference to a Config object modeling the main configuration data for a Solr - * instance -- typically found in "solrconfig.xml". + * core -- typically found in "solrconfig.xml". */ public class SolrConfig implements MapSerializable { @@ -143,16 +141,13 @@ public enum PluginOpts { * @param name the configuration name used by the loader if the stream is null */ public SolrConfig(Path instanceDir, String name) throws IOException { - this(new SolrResourceLoader(instanceDir), name, true, null); + this(new SolrResourceLoader(instanceDir), name, null); } public static SolrConfig readFromResourceLoader( - SolrResourceLoader loader, - String name, - boolean isConfigsetTrusted, - Properties substitutableProperties) { + SolrResourceLoader loader, String name, Properties substitutableProperties) { try { - return new SolrConfig(loader, name, isConfigsetTrusted, substitutableProperties); + return new SolrConfig(loader, name, substitutableProperties); } catch (Exception e) { String resource; if (loader instanceof ZkSolrResourceLoader) { @@ -196,15 +191,9 @@ public InputStream apply(String s) { * * @param loader the resource loader * @param name the configuration name - * @param isConfigsetTrusted false if configset was uploaded using unsecured configset upload API, - * true otherwise * @param substitutableProperties optional properties to substitute into the XML */ - private SolrConfig( - SolrResourceLoader loader, - String name, - boolean isConfigsetTrusted, - Properties substitutableProperties) { + private SolrConfig(SolrResourceLoader loader, String name, Properties substitutableProperties) { this.resourceLoader = loader; this.resourceName = name; this.substituteProperties = substitutableProperties; @@ -237,7 +226,7 @@ private SolrConfig( rootDataHashCode = this.root.txt().hashCode(); getRequestParams(); - initLibs(loader, isConfigsetTrusted); + initLibs(loader); String val = root.child( IndexSchema.LUCENE_MATCH_VERSION_PARAM, @@ -934,11 +923,10 @@ public PluginInfo getPluginInfo(String type) { SolrException.ErrorCode.SERVER_ERROR, "Multiple plugins configured for type: " + type); } - private void initLibs(SolrResourceLoader loader, boolean isConfigsetTrusted) { + private void initLibs(SolrResourceLoader loader) { // TODO Want to remove SolrResourceLoader.getInstancePath; it can be on a Standalone subclass. // For Zk subclass, it's needed for the time being as well. We could remove that one if we - // remove two things in SolrCloud: (1) instancePath/lib and (2) solrconfig lib directives with - // relative paths. Can wait till 9.0. + // remove "instancePath/lib" in SolrCloud. Can wait till 9.0. Path instancePath = loader.getInstancePath(); List urls = new ArrayList<>(); @@ -950,48 +938,15 @@ private void initLibs(SolrResourceLoader loader, boolean isConfigsetTrusted) { log.warn("Couldn't add files from {} to classpath: {}", libPath, e); } } - - List nodes = root.getAll("lib"); - if (nodes != null && nodes.size() > 0) { - if (!isConfigsetTrusted) { - throw new SolrException( - ErrorCode.UNAUTHORIZED, - "The configset for this collection was uploaded without any authentication in place," - + " and use of is not available for collections with untrusted configsets. To use this component, re-upload the configset" - + " after enabling authentication and authorization."); - } - - for (int i = 0; i < nodes.size(); i++) { - ConfigNode node = nodes.get(i); - String baseDir = node.attr("dir"); - String path = node.attr(PATH); - if (null != baseDir) { - // :TODO: add support for a simpler 'glob' mutually exclusive of regex - Path dir = instancePath.resolve(baseDir); - String regex = node.attr("regex"); - try { - if (regex == null) urls.addAll(SolrResourceLoader.getURLs(dir)); - else urls.addAll(SolrResourceLoader.getFilteredURLs(dir, regex)); - } catch (IOException e) { - log.warn("Couldn't add files from {} filtered by {} to classpath: {}", dir, regex, e); - } - } else if (null != path) { - final Path dir = instancePath.resolve(path); - try { - urls.add(dir.toUri().toURL()); - } catch (MalformedURLException e) { - log.warn("Couldn't add file {} to classpath: {}", dir, e); - } - } else { - throw new RuntimeException("lib: missing mandatory attributes: 'dir' or 'path'"); - } - } - } - if (!urls.isEmpty()) { loader.addToClassLoader(urls); loader.reloadLuceneSPI(); } + + List nodes = root.getAll("lib"); + if (nodes != null && nodes.size() > 0) { + log.warn(" entries no longer supported in solrconfig.xml; ignoring..."); + } } public int getMultipartUploadLimitKB() { diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java index 95c4a1be611..a933bb6e1d6 100644 --- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java +++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java @@ -27,8 +27,6 @@ import static org.apache.solr.handler.ReplicationHandler.CONF_FILES; import static org.apache.solr.handler.ReplicationHandler.FETCH_FROM_LEADER; import static org.apache.solr.handler.ReplicationHandler.LEADER_URL; -import static org.apache.solr.handler.ReplicationHandler.LEGACY_LEADER_URL; -import static org.apache.solr.handler.ReplicationHandler.LEGACY_SKIP_COMMIT_ON_LEADER_VERSION_ZERO; import static org.apache.solr.handler.ReplicationHandler.SIZE; import static org.apache.solr.handler.ReplicationHandler.SKIP_COMMIT_ON_LEADER_VERSION_ZERO; import static org.apache.solr.handler.admin.api.ReplicationAPIBase.CHECKSUM; @@ -279,17 +277,11 @@ public IndexFetcher( if (fetchFromLeader != null && fetchFromLeader instanceof Boolean) { this.fetchFromLeader = (boolean) fetchFromLeader; } - Object skipCommitOnLeaderVersionZero = - ReplicationHandler.getObjectWithBackwardCompatibility( - initArgs, - SKIP_COMMIT_ON_LEADER_VERSION_ZERO, - LEGACY_SKIP_COMMIT_ON_LEADER_VERSION_ZERO); + Object skipCommitOnLeaderVersionZero = initArgs.get(SKIP_COMMIT_ON_LEADER_VERSION_ZERO); if (skipCommitOnLeaderVersionZero != null && skipCommitOnLeaderVersionZero instanceof Boolean) { this.skipCommitOnLeaderVersionZero = (boolean) skipCommitOnLeaderVersionZero; } - String leaderUrl = - ReplicationHandler.getObjectWithBackwardCompatibility( - initArgs, LEADER_URL, LEGACY_LEADER_URL); + String leaderUrl = (String) initArgs.get(LEADER_URL); if (leaderUrl == null && !this.fetchFromLeader) throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "'leaderUrl' is required for a follower"); @@ -304,13 +296,8 @@ public IndexFetcher( useInternalCompression = ReplicationHandler.INTERNAL.equals(compress); useExternalCompression = ReplicationHandler.EXTERNAL.equals(compress); connTimeout = getParameter(initArgs, HttpClientUtil.PROP_CONNECTION_TIMEOUT, 30000, null); + soTimeout = getParameter(initArgs, HttpClientUtil.PROP_SO_TIMEOUT, 120000, null); - // allow a leader override for tests - you specify this in /replication follower section of - // solrconfig and some test don't want to define this - soTimeout = Integer.getInteger("solr.indexfetcher.sotimeout", -1); - if (soTimeout == -1) { - soTimeout = getParameter(initArgs, HttpClientUtil.PROP_SO_TIMEOUT, 120000, null); - } String httpBasicAuthUser = (String) initArgs.get(HttpClientUtil.PROP_BASIC_AUTH_USER); String httpBasicAuthPassword = (String) initArgs.get(HttpClientUtil.PROP_BASIC_AUTH_PASS); solrClient = diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java index ebea07e26bc..d059ad38d53 100644 --- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java @@ -76,7 +76,6 @@ import org.apache.solr.client.api.model.SolrJerseyResponse; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; -import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.CoreAdminParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; @@ -307,8 +306,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw } else if (command.equals(CMD_SHOW_COMMITS)) { populateCommitInfo(rsp); } else if (command.equals(CMD_DETAILS)) { - getReplicationDetails( - rsp, getBoolWithBackwardCompatibility(solrParams, "follower", "slave", true)); + getReplicationDetails(rsp, solrParams.getBool("follower", true)); } else if (CMD_ENABLE_REPL.equalsIgnoreCase(command)) { replicationEnabled.set(true); rsp.add(STATUS, OK_STATUS); @@ -348,14 +346,6 @@ private void getFileStream(SolrParams solrParams, SolrQueryResponse rsp, SolrQue return; } - if (solrParams.getParams(CommonParams.WT) == null) { - reportErrorOnResponse( - rsp, - "Missing wt parameter", - new SolrException(SolrException.ErrorCode.BAD_REQUEST, "wt not specified in request")); - return; - } - coreReplicationAPI.fetchFile( fileName, dirType, @@ -367,39 +357,6 @@ private void getFileStream(SolrParams solrParams, SolrQueryResponse rsp, SolrQue solrParams.getLong(GENERATION)); } - static boolean getBoolWithBackwardCompatibility( - SolrParams params, String preferredKey, String alternativeKey, boolean defaultValue) { - Boolean value = params.getBool(preferredKey); - if (value != null) { - return value; - } - return params.getBool(alternativeKey, defaultValue); - } - - @SuppressWarnings("unchecked") - static T getObjectWithBackwardCompatibility( - SolrParams params, String preferredKey, String alternativeKey, T defaultValue) { - Object value = params.get(preferredKey); - if (value != null) { - return (T) value; - } - value = params.get(alternativeKey); - if (value != null) { - return (T) value; - } - return defaultValue; - } - - @SuppressWarnings("unchecked") - public static T getObjectWithBackwardCompatibility( - NamedList params, String preferredKey, String alternativeKey) { - Object value = params.get(preferredKey); - if (value != null) { - return (T) value; - } - return (T) params.get(alternativeKey); - } - private void reportErrorOnResponse(SolrQueryResponse response, String message, Exception e) { response.add(STATUS, ERR_STATUS); response.add(MESSAGE, message); @@ -432,8 +389,7 @@ private void deleteSnapshot(ModifiableSolrParams params, SolrQueryResponse rsp) private void fetchIndex(SolrParams solrParams, SolrQueryResponse rsp) throws InterruptedException { - String leaderUrl = - getObjectWithBackwardCompatibility(solrParams, LEADER_URL, LEGACY_LEADER_URL, null); + String leaderUrl = solrParams.get(LEADER_URL, null); if (!isFollower && leaderUrl == null) { reportErrorOnResponse(rsp, "No follower configured or no 'leaderUrl' specified", null); return; @@ -500,11 +456,7 @@ static Long getCheckSum(Checksum checksum, Path f) { private volatile IndexFetcher currentIndexFetcher; public IndexFetchResult doFetch(SolrParams solrParams, boolean forceReplication) { - String leaderUrl = - solrParams == null - ? null - : ReplicationHandler.getObjectWithBackwardCompatibility( - solrParams, LEADER_URL, LEGACY_LEADER_URL, null); + String leaderUrl = solrParams == null ? null : solrParams.get(LEADER_URL, null); if (!indexFetchLock.tryLock()) return IndexFetchResult.LOCK_OBTAIN_FAILED; if (core.getCoreContainer().isShutDown()) { log.warn("I was asked to replicate but CoreContainer is shutting down"); @@ -1290,14 +1242,14 @@ public void inform(SolrCore core) { } else { replicationHandlerConfig.numberBackupsToKeep = 0; } - NamedList follower = getObjectWithBackwardCompatibility(initArgs, "follower", "slave"); + NamedList follower = (NamedList) initArgs.get("follower"); boolean enableFollower = isEnabled(follower); if (enableFollower) { currentIndexFetcher = pollingIndexFetcher = new IndexFetcher(follower, this, core); setupPolling((String) follower.get(ReplicationAPIBase.POLL_INTERVAL)); isFollower = true; } - NamedList leader = getObjectWithBackwardCompatibility(initArgs, "leader", "master"); + NamedList leader = (NamedList) initArgs.get("leader"); boolean enableLeader = isEnabled(leader); if (enableLeader || (enableFollower && !currentIndexFetcher.fetchFromLeader)) { @@ -1577,11 +1529,6 @@ private Long readIntervalNs(String interval) { public static final String LEADER_URL = "leaderUrl"; - /** - * @deprecated Only used for backwards compatibility. Use {@link #LEADER_URL} - */ - @Deprecated public static final String LEGACY_LEADER_URL = "masterUrl"; - public static final String FETCH_FROM_LEADER = "fetchFromLeader"; // in case of TLOG replica, if leaderVersion = zero, don't do commit @@ -1591,14 +1538,6 @@ private Long readIntervalNs(String interval) { // state from leader public static final String SKIP_COMMIT_ON_LEADER_VERSION_ZERO = "skipCommitOnLeaderVersionZero"; - /** - * @deprecated Only used for backwards compatibility. Use {@link - * #SKIP_COMMIT_ON_LEADER_VERSION_ZERO} - */ - @Deprecated - public static final String LEGACY_SKIP_COMMIT_ON_LEADER_VERSION_ZERO = - "skipCommitOnMasterVersionZero"; - public static final String MESSAGE = "message"; public static final String COMMAND = "command"; diff --git a/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java index 897d9921e2c..b174e177f1a 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java @@ -243,9 +243,8 @@ private boolean isWithinGenerationLag( IndexFetcher indexFetcher = null; try { // may not be the best way to get leader's replicableCommit - NamedList follower = - ReplicationHandler.getObjectWithBackwardCompatibility( - replicationHandler.getInitArgs(), "follower", "slave"); + NamedList follower = (NamedList) replicationHandler.getInitArgs().get("follower"); + indexFetcher = new IndexFetcher(follower, replicationHandler, core); NamedList replicableCommitOnLeader = indexFetcher.getLatestVersion(); diff --git a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java index f34793aa744..1370c775540 100644 --- a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java +++ b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java @@ -254,7 +254,7 @@ public void updateFileContents(SolrQueryRequest req, SolrQueryResponse rsp) try { InMemoryResourceLoader loader = new InMemoryResourceLoader(coreContainer, mutableId, SOLR_CONFIG_XML, data); - SolrConfig.readFromResourceLoader(loader, SOLR_CONFIG_XML, requestIsTrusted, null); + SolrConfig.readFromResourceLoader(loader, SOLR_CONFIG_XML, null); } catch (Exception exc) { updateFileError = exc; } diff --git a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java index 0b14b0d4cf4..955aa1d98a9 100644 --- a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java +++ b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java @@ -677,9 +677,7 @@ ManagedIndexSchema deleteNestedDocsFieldsIfNeeded(ManagedIndexSchema schema, boo SolrConfig loadSolrConfig(String configSet) { ZkSolrResourceLoader zkLoader = zkLoaderForConfigSet(configSet); - boolean trusted = isConfigSetTrusted(configSet); - - return SolrConfig.readFromResourceLoader(zkLoader, SOLR_CONFIG_XML, trusted, null); + return SolrConfig.readFromResourceLoader(zkLoader, SOLR_CONFIG_XML, null); } ManagedIndexSchema loadLatestSchema(String configSet) { diff --git a/solr/core/src/java/org/apache/solr/security/AllowListUrlChecker.java b/solr/core/src/java/org/apache/solr/security/AllowListUrlChecker.java index 9bcede9b060..9fbffc4cdfb 100644 --- a/solr/core/src/java/org/apache/solr/security/AllowListUrlChecker.java +++ b/solr/core/src/java/org/apache/solr/security/AllowListUrlChecker.java @@ -27,6 +27,7 @@ import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.core.NodeConfig; @@ -85,6 +86,9 @@ public String toString() { /** Allow list of hosts. Elements in the list will be host:port (no protocol or context). */ private final Set hostAllowList; + private volatile Set liveHostUrlsCache; + private volatile Set liveNodesCache; + /** * @param urlAllowList List of allowed URLs. URLs must be well-formed, missing protocol is * tolerated. An empty list means there is no explicit allow-list of URLs, in this case no URL @@ -136,11 +140,10 @@ public void checkAllowList(List urls) throws MalformedURLException { */ public void checkAllowList(List urls, ClusterState clusterState) throws MalformedURLException { - Set clusterHostAllowList = - clusterState == null ? Collections.emptySet() : clusterState.getHostAllowList(); + Set liveHostUrls = getLiveHostUrls(clusterState); for (String url : urls) { String hostPort = parseHostPort(url); - if (clusterHostAllowList.stream().noneMatch(hostPort::equalsIgnoreCase) + if (liveHostUrls.stream().noneMatch(hostPort::equalsIgnoreCase) && hostAllowList.stream().noneMatch(hostPort::equalsIgnoreCase)) { throw new SolrException( SolrException.ErrorCode.FORBIDDEN, @@ -154,6 +157,33 @@ public void checkAllowList(List urls, ClusterState clusterState) } } + /** + * Gets the set of live hosts urls (host:port) built from the set of live nodes. The set is cached + * to be reused until the live nodes change. + */ + private Set getLiveHostUrls(ClusterState clusterState) { + if (clusterState == null) { + return Set.of(); + } + if (liveHostUrlsCache == null || clusterState.getLiveNodes() != liveNodesCache) { + synchronized (this) { + Set liveNodes = clusterState.getLiveNodes(); + if (liveHostUrlsCache == null || liveNodes != liveNodesCache) { + liveHostUrlsCache = buildLiveHostUrls(liveNodes); + liveNodesCache = liveNodes; + } + } + } + return liveHostUrlsCache; + } + + @VisibleForTesting + Set buildLiveHostUrls(Set liveNodes) { + return liveNodes.stream() + .map((liveNode) -> liveNode.substring(0, liveNode.indexOf('_'))) + .collect(Collectors.toSet()); + } + /** Whether this checker has been created with a non-empty allow-list of URLs. */ public boolean hasExplicitAllowList() { return !hostAllowList.isEmpty(); diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java index 7241be77390..1e6c9f42e29 100644 --- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java +++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java @@ -40,7 +40,6 @@ import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; @@ -279,7 +278,7 @@ protected void init() throws Exception { } else { // if we couldn't find it locally, look on other nodes if (idx > 0) { - extractRemotePath(collectionName, origCorename); + extractRemotePath(collectionName); if (action == REMOTEQUERY) { path = path.substring(idx); return; @@ -462,10 +461,10 @@ protected void extractHandlerFromURLPath(SolrRequestParsers parser) throws Excep } } - protected void extractRemotePath(String collectionName, String origCorename) + protected void extractRemotePath(String collectionName) throws KeeperException, InterruptedException, SolrException { assert core == null; - coreUrl = getRemoteCoreUrl(collectionName, origCorename); + coreUrl = getRemoteCoreUrl(collectionName); // don't proxy for internal update requests invalidStates = checkStateVersionsAreValid(queryParams.get(CloudSolrClient.STATE_VERSION)); if (coreUrl != null @@ -1090,39 +1089,16 @@ private SolrCore checkProps(ZkNodeProps zkProps) { return core; } - private List getSlicesForAllCollections(ClusterState clusterState, boolean activeSlices) { - // looks across *all* collections - if (activeSlices) { - return clusterState - .collectionStream() - .flatMap(coll -> Arrays.stream(coll.getActiveSlicesArr())) - .toList(); - } else { - return clusterState.collectionStream().flatMap(coll -> coll.getSlices().stream()).toList(); - } - } - - protected String getRemoteCoreUrl(String collectionName, String origCorename) - throws SolrException { + protected String getRemoteCoreUrl(String collectionName) throws SolrException { ClusterState clusterState = cores.getZkController().getClusterState(); final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); - Slice[] slices = (docCollection != null) ? docCollection.getActiveSlicesArr() : null; - List activeSlices; - boolean byCoreName = false; + if (docCollection == null) { + return null; + } + Collection activeSlices = docCollection.getActiveSlices(); int totalReplicas = 0; - if (slices == null) { - byCoreName = true; - // all collections! - activeSlices = getSlicesForAllCollections(clusterState, true); - if (activeSlices.isEmpty()) { - activeSlices = getSlicesForAllCollections(clusterState, false); - } - } else { - activeSlices = List.of(slices); - } - for (Slice s : activeSlices) { totalReplicas += s.getReplicas().size(); } @@ -1145,48 +1121,30 @@ protected String getRemoteCoreUrl(String collectionName, String origCorename) "No active replicas found for collection: " + collectionName); } - String coreUrl = - getCoreUrl(collectionName, origCorename, clusterState, activeSlices, byCoreName, true); + String coreUrl = getCoreUrl(activeSlices, true, clusterState.getLiveNodes()); if (coreUrl == null) { - coreUrl = - getCoreUrl(collectionName, origCorename, clusterState, activeSlices, byCoreName, false); + coreUrl = getCoreUrl(activeSlices, false, clusterState.getLiveNodes()); } return coreUrl; } private String getCoreUrl( - String collectionName, - String origCorename, - ClusterState clusterState, - List slices, - boolean byCoreName, - boolean activeReplicas) { - String coreUrl; - Set liveNodes = clusterState.getLiveNodes(); + Collection slices, boolean activeReplicas, Set liveNodes) { - List shuffledSlices; - if (slices.size() < 2) { - shuffledSlices = slices; - } else { - shuffledSlices = new ArrayList<>(slices); - Collections.shuffle(shuffledSlices, Utils.RANDOM); - } + Iterator shuffledSlices = new RandomIterator<>(Utils.RANDOM, slices); + while (shuffledSlices.hasNext()) { + Slice slice = shuffledSlices.next(); - for (Slice slice : shuffledSlices) { - List randomizedReplicas = new ArrayList<>(slice.getReplicas()); - Collections.shuffle(randomizedReplicas, Utils.RANDOM); + Iterator shuffledReplicas = new RandomIterator<>(Utils.RANDOM, slice.getReplicas()); + while (shuffledReplicas.hasNext()) { + Replica replica = shuffledReplicas.next(); - for (Replica replica : randomizedReplicas) { if (!activeReplicas || (liveNodes.contains(replica.getNodeName()) && replica.getState() == Replica.State.ACTIVE)) { - if (byCoreName && !Objects.equals(origCorename, replica.getStr(CORE_NAME_PROP))) { - // if it's by core name, make sure they match - continue; - } if (Objects.equals(replica.getBaseUrl(), cores.getZkController().getBaseUrl())) { // don't count a local core continue; diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-replication-legacy.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-replication-legacy.xml deleted file mode 100644 index ddd116be38a..00000000000 --- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-replication-legacy.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - ${tests.luceneMatchVersion:LATEST} - - - ${solr.data.dir:} - - - - - - - - true - - - - - - - - - - - - - - commit - - - http://127.0.0.1:TEST_PORT/solr/collection1 - 00:00:01 - COMPRESSION - - - - - - - max-age=30, public - - - - diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-test-misc.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-test-misc.xml index 1020db8319e..cbfe9752574 100644 --- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-test-misc.xml +++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-test-misc.xml @@ -28,12 +28,6 @@ - - - - - - prefix-${solr.test.sys.prop2}-suffix diff --git a/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/managed-schema.xml b/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/managed-schema.xml deleted file mode 100644 index 25a37e6eee3..00000000000 --- a/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/managed-schema.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - - - diff --git a/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/solrconfig.xml b/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/solrconfig.xml deleted file mode 100644 index 315bfffbf6c..00000000000 --- a/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/solrconfig.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - - - - ${solr.data.dir:} - - - - ${tests.luceneMatchVersion:LATEST} - - - - - - explicit - true - text - - - - - - - diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java index b41d698ac4c..01614b7218e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java @@ -1421,48 +1421,6 @@ public void testUploadWithScriptUpdateProcessor() throws Exception { scriptRequest("newcollection2"); } - @Test - public void testUploadWithLibDirective() throws Exception { - final String untrustedSuffix = "-untrusted"; - uploadConfigSetWithAssertions("with-lib-directive", untrustedSuffix, null); - // try to create a collection with the uploaded configset - ignoreException("without any authentication in place"); - Throwable thrown = - expectThrows( - SolrClient.RemoteSolrException.class, - () -> { - createCollection( - "newcollection3", - "with-lib-directive" + untrustedSuffix, - 1, - 1, - cluster.getSolrClient()); - }); - unIgnoreException("without any authentication in place"); - - assertThat(thrown.getMessage(), containsString("Underlying core creation failed")); - - // Authorization on - final String trustedSuffix = "-trusted"; - uploadConfigSetWithAssertions("with-lib-directive", trustedSuffix, "solr"); - // try to create a collection with the uploaded configset - CollectionAdminResponse resp = - createCollection( - "newcollection3", "with-lib-directive" + trustedSuffix, 1, 1, cluster.getSolrClient()); - - SolrInputDocument doc = sdoc("id", "4055", "subject", "Solr"); - cluster.getSolrClient().add("newcollection3", doc); - cluster.getSolrClient().commit("newcollection3"); - assertEquals( - "4055", - cluster - .getSolrClient() - .query("newcollection3", params("q", "*:*")) - .getResults() - .get(0) - .get("id")); - } - @Test public void testUploadWithForbiddenContent() throws Exception { // Uploads a config set containing a script, a class file and jar file, will return 400 error diff --git a/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java b/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java index ee9177b2105..4c348104b35 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java @@ -84,7 +84,7 @@ public InputStream openResource(String resource) throws IOException { long startTime = System.currentTimeMillis(); int numReads = 100; for (int i = 0; i < numReads; i++) { - allConfigs.add(SolrConfig.readFromResourceLoader(srl, "solrconfig.xml", true, null)); + allConfigs.add(SolrConfig.readFromResourceLoader(srl, "solrconfig.xml", null)); } assertEquals(numReads, allConfigs.size()); System.gc(); diff --git a/solr/core/src/test/org/apache/solr/core/TestConfig.java b/solr/core/src/test/org/apache/solr/core/TestConfig.java index 2c17cbf1e1a..745fa21c4c5 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfig.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfig.java @@ -45,20 +45,6 @@ public static void beforeClass() throws Exception { public void testLib() throws IOException { SolrResourceLoader loader = h.getCore().getResourceLoader(); InputStream data = null; - String[] expectedFiles = - new String[] { - "empty-file-main-lib.txt", - "empty-file-a1.txt", - "empty-file-a2.txt", - "empty-file-b1.txt", - "empty-file-b2.txt", - "empty-file-c1.txt" - }; - for (String f : expectedFiles) { - data = loader.openResource(f); - assertNotNull("Should have found file " + f, data); - data.close(); - } String[] unexpectedFiles = new String[] {"empty-file-c2.txt", "empty-file-d2.txt"}; for (String f : unexpectedFiles) { data = null; diff --git a/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java b/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java index 1efa74d2d03..b04420664e8 100644 --- a/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java +++ b/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java @@ -29,8 +29,7 @@ public static void beforeClass() throws Exception { initCore("solrconfig-minimal.xml", "schema-minimal.xml"); } - // Make sure the content of the lib/ core subfolder is loaded even if there is no node in - // the solrconfig + // Make sure the content of the lib/ core subfolder is loaded @Test public void testLib() throws IOException { SolrResourceLoader loader = h.getCore().getResourceLoader(); diff --git a/solr/core/src/test/org/apache/solr/handler/TestHealthCheckHandlerLegacyMode.java b/solr/core/src/test/org/apache/solr/handler/TestHealthCheckHandlerLegacyMode.java index b376e52bd9b..fe25a08da3c 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestHealthCheckHandlerLegacyMode.java +++ b/solr/core/src/test/org/apache/solr/handler/TestHealthCheckHandlerLegacyMode.java @@ -74,8 +74,6 @@ public void setUp() throws Exception { buildUrl(followerJetty.getLocalPort()), DEFAULT_TEST_CORENAME); followerClientHealthCheck = ReplicationTestHelper.createNewSolrClient(buildUrl(followerJetty.getLocalPort())); - - System.setProperty("solr.indexfetcher.sotimeout2", "45000"); } public void clearIndexWithReplication() throws Exception { @@ -116,7 +114,6 @@ public void tearDown() throws Exception { followerClientHealthCheck.close(); followerClientHealthCheck = null; } - System.clearProperty("solr.indexfetcher.sotimeout"); } @Test diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java index d1566b9c80d..b30e20f70bf 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java @@ -78,7 +78,6 @@ import org.apache.solr.util.TimeOut; import org.junit.After; import org.junit.Before; -import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -103,14 +102,6 @@ public class TestReplicationHandler extends SolrTestCaseJ4 { // index from previous test method static int nDocs = 500; - /* For testing backward compatibility, remove for 10.x */ - private static boolean useLegacyParams = false; - - @BeforeClass - public static void beforeClass() { - useLegacyParams = rarely(); - } - @Override @Before public void setUp() throws Exception { @@ -134,8 +125,6 @@ public void setUp() throws Exception { followerClient = ReplicationTestHelper.createNewSolrClient( buildUrl(followerJetty.getLocalPort()), DEFAULT_TEST_CORENAME); - - System.setProperty("solr.indexfetcher.sotimeout2", "45000"); } public void clearIndexWithReplication() throws Exception { @@ -168,7 +157,6 @@ public void tearDown() throws Exception { followerClient.close(); followerClient = null; } - System.clearProperty("solr.indexfetcher.sotimeout"); } static JettySolrRunner createAndStartJetty(SolrInstance instance) throws Exception { @@ -391,38 +379,6 @@ public void doTestDetails() throws Exception { } } - @Test - public void testLegacyConfiguration() throws Exception { - SolrInstance solrInstance = null; - JettySolrRunner instanceJetty = null; - SolrClient client = null; - try { - solrInstance = - new SolrInstance( - createTempDir("solr-instance").toFile(), - "replication-legacy", - leaderJetty.getLocalPort()); - solrInstance.setUp(); - instanceJetty = createAndStartJetty(solrInstance); - client = - ReplicationTestHelper.createNewSolrClient( - buildUrl(instanceJetty.getLocalPort()), DEFAULT_TEST_CORENAME); - - NamedList details = getDetails(client); - - assertEquals("repeater isLeader?", "true", details.get("isLeader")); - assertEquals("repeater isFollower?", "true", details.get("isFollower")); - assertNotNull("repeater has leader section", details.get("leader")); - assertNotNull("repeater has follower section", details.get("follower")); - - } finally { - if (instanceJetty != null) { - instanceJetty.stop(); - } - if (client != null) client.close(); - } - } - /** * Verify that empty commits and/or commits with openSearcher=false on the leader do not cause * subsequent replication problems on the follower @@ -782,11 +738,8 @@ private NamedList getFollowerDetails() throws SolrServerException, IOExc ModifiableSolrParams params = new ModifiableSolrParams(); params.set(CommonParams.QT, "/replication"); params.set("command", "details"); - if (useLegacyParams) { - params.set("slave", "true"); - } else { - params.set("follower", "true"); - } + params.set("follower", "true"); + QueryResponse response = followerClient.query(params); // details/follower/timesIndexReplicated @@ -828,9 +781,6 @@ public void doTestIndexFetchWithLeaderUrl() throws Exception { assertEquals(nDocs, leaderQueryResult.getNumFound()); String urlKey = "leaderUrl"; - if (useLegacyParams) { - urlKey = "masterUrl"; - } // index fetch String leaderUrl = @@ -1670,48 +1620,6 @@ public void testEmptyBackups() throws Exception { } } - public void testGetBoolWithBackwardCompatibility() { - assertTrue(ReplicationHandler.getBoolWithBackwardCompatibility(params(), "foo", "bar", true)); - assertFalse(ReplicationHandler.getBoolWithBackwardCompatibility(params(), "foo", "bar", false)); - assertTrue( - ReplicationHandler.getBoolWithBackwardCompatibility( - params("foo", "true"), "foo", "bar", false)); - assertTrue( - ReplicationHandler.getBoolWithBackwardCompatibility( - params("bar", "true"), "foo", "bar", false)); - assertTrue( - ReplicationHandler.getBoolWithBackwardCompatibility( - params("foo", "true", "bar", "false"), "foo", "bar", false)); - } - - public void testGetObjectWithBackwardCompatibility() { - assertEquals( - "aaa", - ReplicationHandler.getObjectWithBackwardCompatibility(params(), "foo", "bar", "aaa")); - assertEquals( - "bbb", - ReplicationHandler.getObjectWithBackwardCompatibility( - params("foo", "bbb"), "foo", "bar", "aaa")); - assertEquals( - "bbb", - ReplicationHandler.getObjectWithBackwardCompatibility( - params("bar", "bbb"), "foo", "bar", "aaa")); - assertEquals( - "bbb", - ReplicationHandler.getObjectWithBackwardCompatibility( - params("foo", "bbb", "bar", "aaa"), "foo", "bar", "aaa")); - assertNull(ReplicationHandler.getObjectWithBackwardCompatibility(params(), "foo", "bar", null)); - } - - public void testGetObjectWithBackwardCompatibilityFromNL() { - NamedList nl = new NamedList<>(); - assertNull(ReplicationHandler.getObjectWithBackwardCompatibility(nl, "foo", "bar")); - nl.add("bar", "bbb"); - assertEquals("bbb", ReplicationHandler.getObjectWithBackwardCompatibility(nl, "foo", "bar")); - nl.add("foo", "aaa"); - assertEquals("aaa", ReplicationHandler.getObjectWithBackwardCompatibility(nl, "foo", "bar")); - } - private static class AddExtraDocs implements Runnable { SolrClient leaderClient; diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java index 67b5cc47d9c..7bd54f68c77 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java @@ -92,8 +92,6 @@ public void setUp() throws Exception { followerClient = ReplicationTestHelper.createNewSolrClient( TestReplicationHandler.buildUrl(followerJetty.getLocalPort()), DEFAULT_TEST_CORENAME); - - System.setProperty("solr.indexfetcher.sotimeout2", "45000"); } @Override @@ -118,7 +116,6 @@ public void tearDown() throws Exception { followerClient = null; } System.clearProperty(TEST_URL_ALLOW_LIST); - System.clearProperty("solr.indexfetcher.sotimeout"); IndexFetcher.usableDiskSpaceProvider = originalDiskSpaceprovider; IndexFetcher.testWait = originalTestWait; diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/handler/component/TestShardHandlerFactory.java index 541d2d845f3..970fa7bb48a 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TestShardHandlerFactory.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TestShardHandlerFactory.java @@ -17,21 +17,16 @@ package org.apache.solr.handler.component; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Set; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.impl.LBSolrClient; import org.apache.solr.client.solrj.request.QueryRequest; -import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ShardParams; @@ -155,18 +150,6 @@ public void getShardsAllowList() { } } - @Test - public void testLiveNodesToHostUrl() { - Set liveNodes = - new HashSet<>(Arrays.asList("1.2.3.4:8983_solr", "1.2.3.4:9000_", "1.2.3.4:9001_solr-2")); - ClusterState cs = new ClusterState(liveNodes, new HashMap<>()); - Set hostSet = cs.getHostAllowList(); - assertThat(hostSet.size(), is(3)); - assertThat(hostSet, hasItem("1.2.3.4:8983")); - assertThat(hostSet, hasItem("1.2.3.4:9000")); - assertThat(hostSet, hasItem("1.2.3.4:9001")); - } - @Test public void testXML() { Path home = TEST_PATH(); diff --git a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java index bd452178f63..2cc881cecc2 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java +++ b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java @@ -183,8 +183,7 @@ private Runnable indexSchemaLoader(String configsetName, final ZkController zkCo try { SolrResourceLoader loader = new ZkSolrResourceLoader(loaderPath, configsetName, null, zkController); - SolrConfig solrConfig = - SolrConfig.readFromResourceLoader(loader, "solrconfig.xml", true, null); + SolrConfig solrConfig = SolrConfig.readFromResourceLoader(loader, "solrconfig.xml", null); ManagedIndexSchemaFactory factory = new ManagedIndexSchemaFactory(); factory.init(new NamedList<>()); diff --git a/solr/core/src/test/org/apache/solr/security/AllowListUrlCheckerTest.java b/solr/core/src/test/org/apache/solr/security/AllowListUrlCheckerTest.java index b32c2124c15..0a4f57ba5af 100644 --- a/solr/core/src/test/org/apache/solr/security/AllowListUrlCheckerTest.java +++ b/solr/core/src/test/org/apache/solr/security/AllowListUrlCheckerTest.java @@ -24,11 +24,14 @@ import java.net.MalformedURLException; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; +import org.apache.solr.common.cloud.ClusterState; import org.junit.Test; /** Tests {@link AllowListUrlChecker}. */ @@ -196,6 +199,51 @@ public void testHostParsingNoProtocol() throws Exception { equalTo(AllowListUrlChecker.parseHostPorts(urls("https://abc-1.com:8983/solr")))); } + @Test + public void testLiveNodesToHostUrlCache() throws Exception { + // Given some live nodes defined in the cluster state. + Set liveNodes = Set.of("1.2.3.4:8983_solr", "1.2.3.4:9000_", "1.2.3.4:9001_solr-2"); + ClusterState clusterState1 = new ClusterState(liveNodes, new HashMap<>()); + + // When we call the AllowListUrlChecker.checkAllowList method on both valid and invalid urls. + AtomicInteger callCount = new AtomicInteger(); + AllowListUrlChecker checker = + new AllowListUrlChecker(List.of()) { + @Override + Set buildLiveHostUrls(Set liveNodes) { + callCount.incrementAndGet(); + return super.buildLiveHostUrls(liveNodes); + } + }; + for (int i = 0; i < 3; i++) { + checker.checkAllowList( + List.of("1.2.3.4:8983", "1.2.3.4:9000", "1.2.3.4:9001"), clusterState1); + SolrException exception = + expectThrows( + SolrException.class, + () -> checker.checkAllowList(List.of("1.1.3.4:8983"), clusterState1)); + assertThat(exception.code(), equalTo(SolrException.ErrorCode.FORBIDDEN.code)); + } + // Then we verify that the AllowListUrlChecker caches the live host urls and only builds them + // once. + assertThat(callCount.get(), equalTo(1)); + + // And when the ClusterState live nodes change. + liveNodes = Set.of("2.3.4.5:8983_solr", "2.3.4.5:9000_", "2.3.4.5:9001_solr-2"); + ClusterState clusterState2 = new ClusterState(liveNodes, new HashMap<>()); + for (int i = 0; i < 3; i++) { + checker.checkAllowList( + List.of("2.3.4.5:8983", "2.3.4.5:9000", "2.3.4.5:9001"), clusterState2); + SolrException exception = + expectThrows( + SolrException.class, + () -> checker.checkAllowList(List.of("1.1.3.4:8983"), clusterState2)); + assertThat(exception.code(), equalTo(SolrException.ErrorCode.FORBIDDEN.code)); + } + // Then the AllowListUrlChecker rebuilds the cache of live host urls. + assertThat(callCount.get(), equalTo(2)); + } + private static List urls(String... urls) { return Arrays.asList(urls); } diff --git a/solr/example/README.md b/solr/example/README.md index 09bb2dfb277..f0c8e46a4be 100644 --- a/solr/example/README.md +++ b/solr/example/README.md @@ -73,18 +73,6 @@ For a list of other tutorials and introductory articles. Notes About These Examples -------------------------- -### References to Jar Files Outside This Directory - -Various example SolrHome dirs contained in this directory may use "" -statements in the solrconfig.xml file to reference plugin jars outside of -this directory for loading modules via relative paths. - -If you make a copy of this example server and wish to use the -ExtractingRequestHandler (SolrCell), the clustering component, -or any other modules, you will need to -copy the required jars or update the paths to those jars in your -solrconfig.xml. - ### Logging By default, Jetty & Solr will log to the console and logs/solr.log. This can diff --git a/solr/modules/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java b/solr/modules/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java index 21921440cae..f4f1b9cc83c 100644 --- a/solr/modules/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java +++ b/solr/modules/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java @@ -111,8 +111,8 @@ private void initParams(SolrParams params) { overwrite = params.getBool(OVERWRITE, false); langAllowlist = new HashSet<>(); threshold = params.getDouble(THRESHOLD, DOCID_THRESHOLD_DEFAULT); - String legacyAllowList = params.get(LANG_WHITELIST, ""); - if (legacyAllowList.length() > 0) { + final String legacyAllowList = params.get(LANG_WHITELIST, "").trim(); + if (!legacyAllowList.isEmpty()) { // nowarn compile time string concatenation log.warn( LANG_WHITELIST @@ -120,11 +120,10 @@ private void initParams(SolrParams params) { + LANG_ALLOWLIST + " instead."); // nowarn } - if (params.get(LANG_ALLOWLIST, legacyAllowList).length() > 0) { - for (String lang : params.get(LANG_ALLOWLIST, "").split(",")) { - langAllowlist.add(lang); - } - } + Arrays.stream(params.get(LANG_ALLOWLIST, legacyAllowList).split(",")) + .map(String::trim) + .filter(lang -> !lang.isEmpty()) + .forEach(langAllowlist::add); // Mapping params (field centric) enableMapping = params.getBool(MAP_ENABLE, false); diff --git a/solr/modules/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java b/solr/modules/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java index 4d8d398a25c..15e62d11a50 100644 --- a/solr/modules/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java +++ b/solr/modules/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java @@ -18,6 +18,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Set; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.ModifiableSolrParams; @@ -464,6 +465,31 @@ public void testMapIndividual() throws Exception { assertTrue(mappedIndividual.containsKey("text2_ru")); } + @Test + public void testAllowlist() throws Exception { + ModifiableSolrParams parameters = new ModifiableSolrParams(); + parameters.add("langid.fl", "name,subject"); + parameters.add("langid.langField", "language_s"); + parameters.add("langid.allowlist", "no,en ,, ,sv, sv"); + liProcessor = createLangIdProcessor(parameters); + + // Make sure that empty language codes have been filtered out and others trimmed. + assertEquals(Set.of("no", "en", "sv"), liProcessor.langAllowlist); + } + + @Test + public void testAllowlistBackwardsCompatabilityWithLegacyAllowlist() throws Exception { + // The "legacy allowlist" is "langid.whitelist" + ModifiableSolrParams parameters = new ModifiableSolrParams(); + parameters.add("langid.fl", "name,subject"); + parameters.add("langid.langField", "language_s"); + parameters.add("langid.whitelist", "no,en ,, ,sv, sv"); + liProcessor = createLangIdProcessor(parameters); + + // Make sure that empty language codes have been filtered out and others trimmed. + assertEquals(Set.of("no", "en", "sv"), liProcessor.langAllowlist); + } + // Various utility methods private SolrInputDocument englishDoc() { diff --git a/solr/modules/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml b/solr/modules/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml index b863d61728c..c20ee2026f6 100644 --- a/solr/modules/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml +++ b/solr/modules/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml @@ -16,9 +16,6 @@ - - - diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/config-sets.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/config-sets.adoc index 57707e94c6e..b44c8a82825 100644 --- a/solr/solr-ref-guide/modules/configuration-guide/pages/config-sets.adoc +++ b/solr/solr-ref-guide/modules/configuration-guide/pages/config-sets.adoc @@ -39,7 +39,7 @@ The Configsets API has some other operations as well, and likewise, so does the To upload a file to a configset already stored on ZooKeeper, you can use xref:deployment-guide:solr-control-script-reference.adoc#copy-between-local-files-and-zookeeper-znodes[`bin/solr zk cp`]. CAUTION: By default, ZooKeeper's file size limit is 1MB. -If your files are larger than this, you'll need to either xref:deployment-guide:zookeeper-ensemble.adoc#increasing-the-file-size-limit[increase the ZooKeeper file size limit] or store them xref:libs.adoc#lib-directives-in-solrconfig[on the filesystem] of every node in a cluster. +If your files are larger than this, you'll need to either xref:deployment-guide:zookeeper-ensemble.adoc#increasing-the-file-size-limit[increase the ZooKeeper file size limit] or store them xref:libs.adoc[on the filesystem] of every node in a cluster. === Forbidden File Types diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc index 45987271de6..1a5dc43fe48 100644 --- a/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc +++ b/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc @@ -97,8 +97,6 @@ Upon creation of a collection using an "untrusted" configset, the following func * The XSLT transformer (`tr` parameter) cannot be used at request processing time. * If specified in the configset, the ScriptUpdateProcessorFactory will not initialize. -* Collections won't initialize if directives are used in the configset. -(Note: Libraries added to Solr's classpath don't need the directive) If you use any of these parameters or features, you must have enabled security features in your Solr installation and you must upload the configset as an authenticated user. diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solr-xml.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solr-xml.adoc index 549753d5f3a..13c90737ffe 100644 --- a/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solr-xml.adoc +++ b/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solr-xml.adoc @@ -253,8 +253,7 @@ Note that specifying `sharedLib` will not remove `$SOLR_HOME/lib` from Solr's cl + Takes a list of bundled xref:solr-modules.adoc[] to enable on startup. This way of adding modules will add them to the shared class loader, making them -available to every collection in Solr, unlike `` tag in `solrconfig.xml` which is only -for that one collection. Example value: `extracting,ltr`. See the +available to every collection in Solr. Example value: `extracting,ltr`. See the xref:solr-modules.adoc[Solr Modules] chapter for more details. `allowPaths`:: @@ -506,7 +505,7 @@ Optional parameter to provide a compression implementation for state.json over t |=== + The class to use for logging. -The corresponding JAR file must be available to Solr, perhaps through a `` directive in `solrconfig.xml`. +The corresponding JAR file must be available to Solr, perhaps through a `` directive. `enabled`:: + diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solrconfig-xml.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solrconfig-xml.adoc index 3680b928896..324a1fdd09e 100644 --- a/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solrconfig-xml.adoc +++ b/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solrconfig-xml.adoc @@ -49,7 +49,6 @@ The `solrconfig.xml` file is located in the `conf/` directory for each collectio Several well-commented example files can be found in the `server/solr/configsets/` directories demonstrating best practices for many different types of installations. Some `solrconfig.xml` aspects are documented in other sections. -See xref:libs.adoc#lib-directives-in-solrconfig[lib directives in SolrConfig], which can be used for both Plugins and Resources. **** // This tags the below list so it can be used in the parent page section list diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/libs.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/libs.adoc index 269e56f4169..2ad86d6355b 100644 --- a/solr/solr-ref-guide/modules/configuration-guide/pages/libs.adoc +++ b/solr/solr-ref-guide/modules/configuration-guide/pages/libs.adoc @@ -47,32 +47,3 @@ Certain plugins or add-ons to plugins require placement here, and they will have Solr incorporates Jetty for providing HTTP server functionality. Jetty has some directories that contain `.jar` files for itself and its own plugins / modules or JVM level plugins (e.g., loggers). Solr plugins won't work in these locations. - -== Lib Directives in SolrConfig - -_Both_ plugin and xref:resource-loading.adoc[resource] file paths are configurable via `` directives in `solrconfig.xml`. -When a directive matches a directory, then resources can be resolved from it. -When a directive matches a `.jar` file, Solr plugins and their dependencies are resolved from it. -Resources can be placed in a `.jar` too but that's unusual. -It's erroneous to refer to any other type of file. - -A `` directive must have one (not both) of these two attributes: - -* `path`: used to refer to a single directory (for resources) or file (for a plugin `.jar`) - -* `dir`: used to refer to _all_ direct descendants of the specified directory. Optionally supply a `regex` attribute to filter these to those matching the regular expression. - -All directories are resolved as relative to the Solr core's `instanceDir`. - -These examples show how to load modules into Solr: - -[source,xml] ----- - - - - - - - ----- diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/resource-loading.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/resource-loading.adoc index bcdbe17c5ef..3825f362de0 100644 --- a/solr/solr-ref-guide/modules/configuration-guide/pages/resource-loading.adoc +++ b/solr/solr-ref-guide/modules/configuration-guide/pages/resource-loading.adoc @@ -37,10 +37,5 @@ Prefer to put resources here. == Resources in Other Places -Resources can also be placed in an arbitrary directory and xref:libs.adoc#lib-directives-in-solrconfig[referenced] from a `` directive in `solrconfig.xml`, provided the directive refers to a directory and not the actual resource file. -Example: `` -This choice may make sense if the resource is too large for a configset in ZooKeeper. -However it's up to you to somehow ensure that all nodes in your cluster have access to these resources. - Finally, and this is very unusual, resources can also be packaged inside `.jar` files from which they will be referenced. That might make sense for default resources wherein a plugin user can override it via placing the same-named file in a configSet. diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/solr-modules.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/solr-modules.adoc index 93a5f0b4ff6..99740b32db1 100644 --- a/solr/solr-ref-guide/modules/configuration-guide/pages/solr-modules.adoc +++ b/solr/solr-ref-guide/modules/configuration-guide/pages/solr-modules.adoc @@ -45,10 +45,5 @@ You can also specify the modules to include when using the Solr CLI to start Sol bin/solr start -e techproducts -Dsolr.modules=scripting ---- -NOTE: If you only wish to enable a module for certain collections, you may add `` tags to `solrconfig.xml` in applicable configset(s). -as explained in xref:configuration-guide:libs.adoc[Lib Directories]. -Collection-level plugins will work if the module is enabled either per collection (``) or for the whole Solr node. -Node-level plugins such as those specified in `solr.xml` will not work when using the `` option in `solrconfig.xml` because configsets configure collections, not the node. They must be enabled for the entire Solr node, as described above. - Some modules may have been made available as packages for the xref:configuration-guide:package-manager.adoc[Package Manager], check by listing available packages. diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/update-request-processors.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/update-request-processors.adoc index 8ac9faf031f..3abebae9747 100644 --- a/solr/solr-ref-guide/modules/configuration-guide/pages/update-request-processors.adoc +++ b/solr/solr-ref-guide/modules/configuration-guide/pages/update-request-processors.adoc @@ -424,7 +424,7 @@ The {solr-javadocs}/modules/langid/index.html[`langid`] module provides:: The {solr-javadocs}/modules/analysis-extras/index.html[`analysis-extras`] module provides:: {solr-javadocs}/modules/analysis-extras/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.html[OpenNLPExtractNamedEntitiesUpdateProcessorFactory]::: Update document(s) to be indexed with named entities extracted using an OpenNLP NER model. -Note that in order to use model files larger than 1MB on SolrCloud, you must either xref:deployment-guide:zookeeper-ensemble#increasing-the-file-size-limit[configure both ZooKeeper server and clients] or xref:libs.adoc#lib-directives-in-solrconfig[store the model files on the filesystem] on each node hosting a collection replica. +Note that in order to use model files larger than 1MB on SolrCloud, you must xref:deployment-guide:zookeeper-ensemble#increasing-the-file-size-limit[configure both ZooKeeper server and clients]. === Update Processor Factories You Should _Not_ Modify or Remove diff --git a/solr/solr-ref-guide/modules/deployment-guide/pages/jwt-authentication-plugin.adoc b/solr/solr-ref-guide/modules/deployment-guide/pages/jwt-authentication-plugin.adoc index aec5f269d24..4a5094f6225 100644 --- a/solr/solr-ref-guide/modules/deployment-guide/pages/jwt-authentication-plugin.adoc +++ b/solr/solr-ref-guide/modules/deployment-guide/pages/jwt-authentication-plugin.adoc @@ -169,7 +169,7 @@ Let's comment on this config: <1> Plugin class <2> Make sure to block anyone without a valid token (this is also the default) <3> Fetch the user id from another claim than the default `sub` -<4> Require that the `roles` claim is one of "A" or "B" and that the `dept` claim is "IT" +<4> Require that the `foo` claim is one of "A" or "B" and that the `dept` claim is "IT" <5> Require one of the scopes `solr:read`, `solr:write` or `solr:admin` <6> Only accept RSA algorithms for signatures <7> Array of issuer configurations diff --git a/solr/solr-ref-guide/modules/deployment-guide/pages/rule-based-authorization-plugin.adoc b/solr/solr-ref-guide/modules/deployment-guide/pages/rule-based-authorization-plugin.adoc index c6dde76583d..d756a3e4f7f 100644 --- a/solr/solr-ref-guide/modules/deployment-guide/pages/rule-based-authorization-plugin.adoc +++ b/solr/solr-ref-guide/modules/deployment-guide/pages/rule-based-authorization-plugin.adoc @@ -389,7 +389,6 @@ If edit permissions should only be applied to specific collections, a custom per Note that this allows schema read permissions for _all_ collections. If read permissions should only be applied to specific collections, a custom permission would need to be created. * *config-edit*: this permission is allowed to edit a collection's configuration using the xref:configuration-guide:config-api.adoc[], the xref:configuration-guide:request-parameters-api.adoc[], and other APIs which modify `configoverlay.json`. -Because configs xref:configuration-guide:libs.adoc#lib-directives-in-solrconfig[can add libraries/custom code] from various locations, loading any new code via a trusted SolrConfig is explicitly allowed for users with this permission. Note that this allows configuration edit permissions for _all_ collections. If edit permissions should only be applied to specific collections, a custom permission would need to be created. * *config-read*: this permission is allowed to read a collection's configuration using the xref:configuration-guide:config-api.adoc[], the xref:configuration-guide:request-parameters-api.adoc[], xref:configuration-guide:configsets-api.adoc#configsets-list[Configsets API], the Admin UI's xref:configuration-guide:configuration-files.adoc#files-screen[Files Screen], and other APIs accessing configuration. diff --git a/solr/solr-ref-guide/modules/indexing-guide/pages/indexing-with-tika.adoc b/solr/solr-ref-guide/modules/indexing-guide/pages/indexing-with-tika.adoc index b1344ed5519..b0cdb7eba30 100644 --- a/solr/solr-ref-guide/modules/indexing-guide/pages/indexing-with-tika.adoc +++ b/solr/solr-ref-guide/modules/indexing-guide/pages/indexing-with-tika.adoc @@ -421,13 +421,8 @@ Also see the section <> for an example. If you have started Solr with one of the supplied xref:configuration-guide:config-sets.adoc[example configsets], you may already have the `ExtractingRequestHandler` configured by default. -First, you must enable the xref:#module[Module]. -If `solrconfig.xml` is not already configured, you will need to modify it to find the `ExtractingRequestHandler` and its dependencies: - -[source,xml] ----- - ----- +First, the `extraction` xref:#module[module] must be enabled. +This can be done by specifying the environment variable `SOLR_MODULES=extraction` in your startup configuration. You can then configure the `ExtractingRequestHandler` in `solrconfig.xml`. The following is the default configuration found in Solr's `sample_techproducts_configs` configset, which you can modify as needed: diff --git a/solr/solr-ref-guide/modules/query-guide/pages/learning-to-rank.adoc b/solr/solr-ref-guide/modules/query-guide/pages/learning-to-rank.adoc index 1d3f575ba03..fcdf78e6315 100644 --- a/solr/solr-ref-guide/modules/query-guide/pages/learning-to-rank.adoc +++ b/solr/solr-ref-guide/modules/query-guide/pages/learning-to-rank.adoc @@ -111,10 +111,6 @@ In the form of JSON files your trained model or models (e.g., different models f |(custom) |(custom class extending {solr-javadocs}/modules/ltr/org/apache/solr/ltr/model/LTRScoringModel.html[LTRScoringModel]) |(not applicable) |=== -== Module - -This is provided via the `ltr` xref:configuration-guide:solr-modules.adoc[Solr Module] that needs to be enabled before use. - == Installation of LTR The ltr module requires the `modules/ltr/lib/solr-ltr-*.jar` JARs. @@ -125,13 +121,8 @@ Learning-To-Rank is a module and therefore its plugins must be configured in `so === Minimum Requirements -* Include the required module JARs. -Note that by default paths are relative to the Solr core, so they may need adjustments to your configuration, or an explicit specification of the `$solr.install.dir`. -+ -[source,xml] ----- - ----- +* Enable the `ltr` module to make the LTR classes available on Solr's classpath. +See xref:configuration-guide:solr-modules.adoc[Solr Module] for more details. * Declaration of the `ltr` query parser. + @@ -248,11 +239,11 @@ Assuming that you consider to use a large model placed at `/path/to/models/myMod } ---- -First, add the directory to Solr's resource paths with a xref:configuration-guide:libs.adoc#lib-directives-in-solrconfig[`` directive]: +First, add the directory to Solr's resource paths with a xref:configuration-guide:configuring-solr-xml.adoc#the-solr-element[solr.xml `` directive]: [source,xml] ---- - + /path/to/models" ---- Then, configure `DefaultWrapperModel` to wrap `myModel.json`: @@ -273,8 +264,6 @@ Then, configure `DefaultWrapperModel` to wrap `myModel.json`: NOTE: No `"features"` are configured in `myWrapperModel` because the features of the wrapped model (`myModel`) will be used; also note that the `"store"` configured for the wrapper model must match that of the wrapped model i.e., in this example the feature store called `largeModelsFeatureStore` is used. -CAUTION: `` doesn't work as expected in this case, because `SolrResourceLoader` considers given resources as JAR if `` indicates files. - As an alternative to the above-described `DefaultWrapperModel`, it is possible to xref:deployment-guide:zookeeper-ensemble.adoc#increasing-the-file-size-limit[increase ZooKeeper's file size limit]. === Applying Changes diff --git a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc index ba91a442edc..09beabda54a 100644 --- a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc +++ b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc @@ -41,10 +41,10 @@ Some key changes that you may run into are: * You may use `--zk-host` (or `-z`) in place of the `--solr-url` option to lookup the Solr host. * Basic Authentication support has been added to bin/solr via the `--credentials` (or `-u`) option. * Some short and single-letter options have been removed to avoid conflicts or in favor to other options. - + To learn about the updated options in each CLI tool, use the `--help` option or look up the tool in the documentation. -Additionally, the `bin/solr delete` command no longer deletes a configset when you delete a collection. Previously if you deleted a collection, it would also delete it's associated configset if it was the only user of it. +Additionally, the `bin/solr delete` command no longer deletes a configset when you delete a collection. Previously if you deleted a collection, it would also delete it's associated configset if it was the only user of it. Now you have to explicitly provide a `--delete-config` option to delete the configsets. This decouples the lifecycle of a configset from that of a collection. === SolrJ @@ -60,6 +60,12 @@ Users who previously relied on collection-specific URLs to avoid including the c The service installer now installs a `systemd` startup script instead of an `init.d` startup script. It is up to the user to uninstall any existing `init.d` script when upgrading. +=== SolrCloud request routing + +HTTP requests to SolrCloud that are for a specific core must be delivered to the node with that core, or else an HTTP 404 Not Found response will occur. +Previously, SolrCloud would try too hard scanning the cluster's state to look for it and internally route/proxy it. +If only one node is exposed to a client, and if the client uses the bin/solr export tool, it probably won't work. + === Deprecation removals * The `jaegertracer-configurator` module, which was deprecated in 9.2, is removed. Users should migrate to the `opentelemetry` module. @@ -79,11 +85,17 @@ has been removed. Please use `-Dsolr.hiddenSysProps` or the envVar `SOLR_HIDDEN_ * The legacy Circuit Breaker named `CircuitBreakerManager`, is removed. Please use individual Circuit Breaker plugins instead. -* The `BlobRepository`, which was deprecated in 8x in favour of the `FileStore` approach is removed. +* The `BlobRepository`, which was deprecated in 8x in favour of the `FileStore` approach is removed. Users should migrate to the `FileStore` implementation (per node stored file) and the still existing `BlobHandler` (across the cluster storage backed by `.system` collection). Please note this also removes the ability to share resource intensive objects across multiple cores as this feature was tied to the `BlobRepository` implementation. * The language specific Response Writers, which were deprecated in 9.8 in favour of more widely used formats like JSON have been removed. The removed writer types (invoked as part of the `wt` parameter) include `python`, `ruby`, `php`, and `phps`. +* The deprecated support for configuring replication using master/slave terminology is removed. Use leader/follower. + +* Support for the `` directive, which historically could be used in solrconfig.xml to add JARs on a core-by-core basis, was deprecated in 9.8 and has now been removed. +Users that need to vary JAR accessibility on a per-core basis can use Solr's xref:configuration-guide:package-manager.adoc[Package Manager]. +Users who that don't need to vary JAR access on a per-core basis have several options, including the xref:configuration-guide:configuring-solr-xml.adoc[`` tag supported by solr.xml] or manipulation of Solr's classpath prior to JVM startup. + * Kerberos based authentication has been removed. This results in changes to SolrJ, the Solr Admin app, and the removal of the `hadoop-auth` module. diff --git a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-9.adoc b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-9.adoc index cf91beee4cb..fa08dc0d287 100644 --- a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-9.adoc +++ b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-9.adoc @@ -380,7 +380,7 @@ Other relevant placement strategies should be used instead, such as autoscaling ** The `solrj-deps` (SolrJ Dependencies) are no longer separated out from the other Server jars. ** Please refer to the SolrJ Maven artifact to see the exact dependencies you need to include from `server/solr-webapp/webapp/WEB-INF/lib/` and `server/lib/ext/` if you are loading in SolrJ manually. If you plan on using SolrJ as a JDBC driver, please refer to the xref:query-guide:sql-query.adoc#generic-clients[JDBC documentation] -** More information can be found in the xref:configuration-guide:libs.adoc#lib-directives-in-solrconfig[Libs documentation]. +** More information can be found in the xref:configuration-guide:libs.adoc[Libs documentation]. * SolrJ class `CloudSolrClient` now supports HTTP2. It has a new Builder. See `CloudLegacySolrClient` for the 8.x version of this class. * In Backup request responses, the `response` key now uses a map to return information instead of a list. This is only applicable for users returning information in JSON format, which is the default behavior. diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java index 19bfc2565d6..22e1005ed8b 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java @@ -34,7 +34,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.function.Function; -import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.solr.common.MapWriter; import org.apache.solr.common.SolrException; @@ -53,6 +52,8 @@ * Immutable state of the cloud. Normally you can get the state by using {@code * ZkStateReader#getClusterState()}. * + *

However, the {@link #setLiveNodes list of live nodes} is updated when nodes go up and down. + * * @lucene.experimental */ public class ClusterState implements MapWriter { @@ -63,8 +64,7 @@ public class ClusterState implements MapWriter { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final Map collectionStates, immutableCollectionStates; - private Set liveNodes; - private Set hostAllowList; + private volatile Set liveNodes; /** Use this constr when ClusterState is meant for consumption. */ public ClusterState(Set liveNodes, Map collectionStates) { @@ -85,8 +85,7 @@ private static Map getRefMap(Map c * loaded (parameter order different from constructor above to have different erasures) */ public ClusterState(Map collectionStates, Set liveNodes) { - this.liveNodes = CollectionUtil.newHashSet(liveNodes.size()); - this.liveNodes.addAll(liveNodes); + setLiveNodes(liveNodes); this.collectionStates = new LinkedHashMap<>(collectionStates); this.immutableCollectionStates = Collections.unmodifiableMap(this.collectionStates); } @@ -189,7 +188,7 @@ public Map getCollectionsMap() { /** Get names of the currently live nodes. */ public Set getLiveNodes() { - return Collections.unmodifiableSet(liveNodes); + return liveNodes; } @Deprecated @@ -387,7 +386,7 @@ public boolean equals(Object obj) { /** Internal API used only by ZkStateReader */ void setLiveNodes(Set liveNodes) { - this.liveNodes = liveNodes; + this.liveNodes = Set.copyOf(liveNodes); } /** @@ -401,20 +400,6 @@ public Map getCollectionStates() { return immutableCollectionStates; } - /** - * Gets the set of allowed hosts (host:port) built from the set of live nodes. The set is cached - * to be reused. - */ - public Set getHostAllowList() { - if (hostAllowList == null) { - hostAllowList = - getLiveNodes().stream() - .map((liveNode) -> liveNode.substring(0, liveNode.indexOf('_'))) - .collect(Collectors.toSet()); - } - return hostAllowList; - } - /** * Streams the resolved {@link DocCollection}s, which will often fetch from ZooKeeper for each one * for a many-collection scenario. Use this sparingly; some users have thousands of collections! diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractBasicDistributedZk2TestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractBasicDistributedZk2TestBase.java index 513596533ff..1ac793e852d 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractBasicDistributedZk2TestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractBasicDistributedZk2TestBase.java @@ -204,25 +204,13 @@ private void addAndQueryDocs(final String baseUrl, int docs) throws Exception { SolrQuery query = new SolrQuery("*:*"); - String collectionUrl = baseUrl + "/onenodecollection" + "core"; - try (SolrClient client = getHttpSolrClient(baseUrl, "onenodecollectioncore")) { - - // it might take a moment for the proxy node to see us in their cloud state - waitForNon403or404or503(client, collectionUrl); - + try (SolrClient client = getHttpSolrClient(baseUrl, "onenodecollection")) { // add a doc - SolrInputDocument doc = new SolrInputDocument(); - doc.addField("id", docs); - client.add(doc); + client.add(sdoc("id", docs)); client.commit(); QueryResponse results = client.query(query); assertEquals(docs - 1, results.getResults().getNumFound()); - } - - try (SolrClient client = getHttpSolrClient(baseUrl, "onenodecollection")) { - QueryResponse results = client.query(query); - assertEquals(docs - 1, results.getResults().getNumFound()); SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", docs + 1);