Skip to content

Commit

Permalink
Introduce deprecation categories (#68061)
Browse files Browse the repository at this point in the history
Sort-of backport of #67443.

Closes #64824. Introduce the concept of categories to deprecation
logging. Every location where we log a deprecation message must now
include a deprecation category.
  • Loading branch information
pugnascotia authored Jan 29, 2021
1 parent 6c8ed22 commit 1c5b89c
Show file tree
Hide file tree
Showing 172 changed files with 610 additions and 311 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.lucene.analysis.cjk.CJKBigramFilter;
import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
Expand Down Expand Up @@ -102,7 +103,7 @@ public TokenFilterFactory getSynonymFilter() {
"] cannot be used to parse synonyms");
}
else {
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
DEPRECATION_LOGGER.deprecate(DeprecationCategory.ANALYSIS, "synonym_tokenfilters", "Token filter [" + name()
+ "] will not be usable to parse synonyms after v7.0");
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
Expand Down Expand Up @@ -249,7 +250,7 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
filters.put("dutch_stem", DutchStemTokenFilterFactory::new);
filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new);
filters.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
deprecationLogger.deprecate("edgeNGram_deprecation",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "edgeNGram_deprecation",
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [edge_ngram] instead.");
return new EdgeNGramTokenFilterFactory(indexSettings, environment, name, settings);
Expand All @@ -274,7 +275,7 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
filters.put("multiplexer", MultiplexerTokenFilterFactory::new);
filters.put("ngram", NGramTokenFilterFactory::new);
filters.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
deprecationLogger.deprecate("nGram_deprecation",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "nGram_deprecation",
"The [nGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [ngram] instead.");
return new NGramTokenFilterFactory(indexSettings, environment, name, settings);
Expand Down Expand Up @@ -323,7 +324,7 @@ public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
tokenizers.put("thai", ThaiTokenizerFactory::new);
tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(org.elasticsearch.Version.V_7_6_0)) {
deprecationLogger.deprecate("nGram_tokenizer_deprecation",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "nGram_tokenizer_deprecation",
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [ngram] instead.");
}
Expand All @@ -332,7 +333,7 @@ public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
tokenizers.put("ngram", NGramTokenizerFactory::new);
tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(org.elasticsearch.Version.V_7_6_0)) {
deprecationLogger.deprecate("edgeNGram_tokenizer_deprecation",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "edgeNGram_tokenizer_deprecation",
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [edge_ngram] instead.");
}
Expand Down Expand Up @@ -413,7 +414,7 @@ public List<PreConfiguredCharFilter> getPreConfiguredCharFilters() {
filters.add(PreConfiguredCharFilter.singleton("html_strip", false, HTMLStripCharFilter::new));
filters.add(PreConfiguredCharFilter.elasticsearchVersion("htmlStrip", false, (reader, version) -> {
if (version.onOrAfter(org.elasticsearch.Version.V_6_3_0)) {
deprecationLogger.deprecate("htmlStrip_deprecation",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "htmlStrip_deprecation",
"The [htmpStrip] char filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [html_strip] instead.");
}
Expand Down Expand Up @@ -444,7 +445,7 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
"[delimited_payload_filter] is not supported for new indices, use [delimited_payload] instead");
}
if (version.onOrAfter(Version.V_6_2_0)) {
deprecationLogger.deprecate("analysis_delimited_payload_filter",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "analysis_delimited_payload_filter",
"Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]");
}
return new DelimitedPayloadTokenFilter(input,
Expand All @@ -464,7 +465,7 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [edge_ngram] instead.");
} else {
deprecationLogger.deprecate("edgeNGram_deprecation",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "edgeNGram_deprecation",
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [edge_ngram] instead.");
}
Expand All @@ -491,7 +492,7 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
throw new IllegalArgumentException("The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [ngram] instead.");
} else {
deprecationLogger.deprecate("nGram_deprecation",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "nGram_deprecation",
"The [nGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [ngram] instead.");
}
Expand Down Expand Up @@ -569,15 +570,15 @@ public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {
// Temporary shim for aliases. TODO deprecate after they are moved
tokenizers.add(PreConfiguredTokenizer.elasticsearchVersion("nGram", (version) -> {
if (version.onOrAfter(org.elasticsearch.Version.V_7_6_0)) {
deprecationLogger.deprecate("nGram_tokenizer_deprecation",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "nGram_tokenizer_deprecation",
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [ngram] instead.");
}
return new NGramTokenizer();
}));
tokenizers.add(PreConfiguredTokenizer.elasticsearchVersion("edgeNGram", (version) -> {
if (version.onOrAfter(org.elasticsearch.Version.V_7_6_0)) {
deprecationLogger.deprecate("edgeNGram_tokenizer_deprecation",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "edgeNGram_tokenizer_deprecation",
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [edge_ngram] instead.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.apache.lucene.analysis.commongrams.CommonGramsFilter;
import org.apache.lucene.analysis.commongrams.CommonGramsQueryFilter;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
Expand Down Expand Up @@ -69,7 +70,7 @@ public TokenFilterFactory getSynonymFilter() {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
DEPRECATION_LOGGER.deprecate(DeprecationCategory.ANALYSIS, "synonym_tokenfilters", "Token filter [" + name()
+ "] will not be usable to parse synonyms after v7.0");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter;
import org.apache.lucene.analysis.reverse.ReverseStringFilter;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
Expand Down Expand Up @@ -91,8 +92,8 @@ public TokenFilterFactory getSynonymFilter() {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
+ "] will not be usable to parse synonyms after v7.0");
DEPRECATION_LOGGER.deprecate(DeprecationCategory.ANALYSIS, "synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0");
return this;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.FingerprintFilter;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
Expand Down Expand Up @@ -58,7 +59,7 @@ public TokenFilterFactory getSynonymFilter() {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
DEPRECATION_LOGGER.deprecate(DeprecationCategory.ANALYSIS, "synonym_tokenfilters", "Token filter [" + name()
+ "] will not be usable to parse synonyms after v7.0");
return this;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.analysis.common;

import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
Expand All @@ -37,7 +38,7 @@ public class LegacyDelimitedPayloadTokenFilterFactory extends DelimitedPayloadTo
"[delimited_payload_filter] is not supported for new indices, use [delimited_payload] instead");
}
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_2_0)) {
deprecationLogger.deprecate("analysis_legacy_delimited_payload_filter",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "analysis_legacy_delimited_payload_filter",
"Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]");
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
Expand Down Expand Up @@ -66,8 +67,8 @@ public TokenFilterFactory getSynonymFilter() {
}
else {
if (preserveOriginal) {
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
+ "] will not be usable to parse synonyms after v7.0");
DEPRECATION_LOGGER.deprecate(DeprecationCategory.ANALYSIS, "synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0");
return IDENTITY_FILTER;
}
throw new IllegalArgumentException("Token filter [" + name()
Expand Down Expand Up @@ -129,8 +130,8 @@ public TokenFilterFactory getSynonymFilter() {
}
else {
if (preserveOriginal) {
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
+ "] will not be usable to parse synonyms after v7.0");
DEPRECATION_LOGGER.deprecate(DeprecationCategory.ANALYSIS, "synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0");
return IDENTITY_FILTER;
}
throw new IllegalArgumentException("Token filter [" + name()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ngram.NGramTokenFilter;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
Expand Down Expand Up @@ -52,7 +53,7 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
+ maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the ["
+ IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + "] index level setting.");
} else {
deprecationLogger.deprecate("ngram_big_difference",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "ngram_big_difference",
"Deprecated big difference between max_gram and min_gram in NGram Tokenizer,"
+ "expected difference must be less than or equal to: [" + maxAllowedNgramDiff + "]");
}
Expand All @@ -71,8 +72,8 @@ public TokenFilterFactory getSynonymFilter() {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
+ "] will not be usable to parse synonyms after v7.0");
DEPRECATION_LOGGER.deprecate(DeprecationCategory.ANALYSIS, "synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0");
return this;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.ngram.NGramTokenizer;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
Expand Down Expand Up @@ -118,7 +119,7 @@ public boolean isTokenChar(int c) {
+ maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the ["
+ IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + "] index level setting.");
} else {
deprecationLogger.deprecate("ngram_big_difference",
deprecationLogger.deprecate(DeprecationCategory.ANALYSIS, "ngram_big_difference",
"Deprecated big difference between max_gram and min_gram in NGram Tokenizer,"
+ "expected difference must be less than or equal to: [" + maxAllowedNgramDiff + "]");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import org.apache.lucene.analysis.CharArraySet;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
Expand Down Expand Up @@ -49,7 +50,7 @@ public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProv
throw new IllegalArgumentException("[standard_html_strip] analyzer is not supported for new indices, " +
"use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter");
} else {
DEPRECATION_LOGGER.deprecate("standard_html_strip_deprecation",
DEPRECATION_LOGGER.deprecate(DeprecationCategory.ANALYSIS, "standard_html_strip_deprecation",
"Deprecated analyzer [standard_html_strip] used, " +
"replace it with a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.synonym.SynonymFilter;
import org.apache.lucene.analysis.synonym.SynonymMap;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
Expand Down Expand Up @@ -57,7 +58,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
this.settings = settings;

if (settings.get("ignore_case") != null) {
DEPRECATION_LOGGER.deprecate("synonym_ignore_case_option",
DEPRECATION_LOGGER.deprecate(DeprecationCategory.ANALYSIS, "synonym_ignore_case_option",
"The ignore_case option on the synonym_graph filter is deprecated. " +
"Instead, insert a lowercase filter in the filter chain before the synonym_graph filter.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilter;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
Expand Down Expand Up @@ -112,8 +113,8 @@ public TokenFilterFactory getSynonymFilter() {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
+ "] will not be usable to parse synonyms after v7.0");
DEPRECATION_LOGGER.deprecate(DeprecationCategory.ANALYSIS, "synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0");
return this;
}
}
Expand Down
Loading

0 comments on commit 1c5b89c

Please sign in to comment.