diff --git a/.env.sample b/.env.sample index b6f6b61..2a603dc 100644 --- a/.env.sample +++ b/.env.sample @@ -1 +1,2 @@ -ES_VERSION=7.5.1 \ No newline at end of file +ES_VERSION=8.7.0 +ELASTIC_PASSWORD=changeme diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index 516903c..0000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1,12 +0,0 @@ -# These are supported funding model platforms - -github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] -patreon: # Replace with a single Patreon username -open_collective: # Replace with a single Open Collective username -ko_fi: # Replace with a single Ko-fi username -tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel -community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry -liberapay: # Replace with a single Liberapay username -issuehunt: # Replace with a single IssueHunt username -otechie: # Replace with a single Otechie username -custom: https://www.paypal.me/duydq diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3cbc0fc..7b0a70f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,7 +8,6 @@ jobs: strategy: matrix: entry: - - { version: 11, distribution: 'adopt' } - { version: 17, distribution: 'adopt' } steps: - name: Checkout analysis-vietnamese @@ -33,4 +32,4 @@ jobs: - name: Build and Test run: | export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH - mvn --batch-mode test \ No newline at end of file + mvn --batch-mode test diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..c730e5d --- /dev/null +++ b/Dockerfile @@ -0,0 +1,44 @@ +ARG ES_VERSION +FROM docker.elastic.co/elasticsearch/elasticsearch:$ES_VERSION as builder + +USER root +ENV DEBIAN_FRONTEND=noninteractive + +RUN apt-get update -y && apt-get install -y software-properties-common build-essential +RUN gcc --version +RUN apt-get update -y && apt-get install -y make cmake pkg-config wget git + +ENV JAVA_HOME=/usr/share/elasticsearch/jdk +ENV PATH=$JAVA_HOME/bin:$PATH + +# Build coccoc-tokenizer +RUN echo "Build coccoc-tokenizer..." +WORKDIR /tmp +RUN git clone https://github.com/duydo/coccoc-tokenizer.git +RUN mkdir /tmp/coccoc-tokenizer/build +WORKDIR /tmp/coccoc-tokenizer/build +RUN cmake -DBUILD_JAVA=1 .. +RUN make install + +# Build analysis-vietnamese +RUN echo "analysis-vietnamese..." +WORKDIR /tmp +RUN wget https://dlcdn.apache.org/maven/maven-3/3.8.8/binaries/apache-maven-3.8.8-bin.tar.gz \ + && tar xvf apache-maven-3.8.8-bin.tar.gz +ENV MVN_HOME=/tmp/apache-maven-3.8.8 +ENV PATH=$MVN_HOME/bin:$PATH + +COPY . /tmp/elasticsearch-analysis-vietnamese +WORKDIR /tmp/elasticsearch-analysis-vietnamese +RUN mvn verify clean --fail-never +RUN mvn --batch-mode -Dmaven.test.skip -e package + +FROM docker.elastic.co/elasticsearch/elasticsearch:$ES_VERSION +ARG ES_VERSION +ARG COCCOC_INSTALL_PATH=/usr/local +ARG COCCOC_DICT_PATH=$COCCOC_INSTALL_PATH/share/tokenizer/dicts + +COPY --from=builder $COCCOC_INSTALL_PATH/lib/libcoccoc_tokenizer_jni.so /usr/lib +COPY --from=builder $COCCOC_DICT_PATH $COCCOC_DICT_PATH +COPY --from=builder /tmp/elasticsearch-analysis-vietnamese/target/releases/elasticsearch-analysis-vietnamese-$ES_VERSION.zip / +RUN echo "Y" | /usr/share/elasticsearch/bin/elasticsearch-plugin install --batch file:///elasticsearch-analysis-vietnamese-$ES_VERSION.zip diff --git a/README.md b/README.md index 822736a..9aaf402 100644 --- a/README.md +++ b/README.md @@ -108,20 +108,52 @@ The above example produces the following terms: ``` +## Use Docker + +Make sure you have installed both Docker & docker-compose + +### Build the image with Docker Compose + +```sh +# Copy, edit ES version and password for user elastic in file .env. Default password: changeme +cp .env.sample .env +docker compose build +docker compose up +``` +### Verify +```sh +curl -k http://elastic:changeme@localhost:9200/_analyze -H 'Content-Type: application/json' -d ' +{ + "analyzer": "vi_analyzer", + "text": "Cộng hòa Xã hội chủ nghĩa Việt Nam" +}' + +# Output +{"tokens":[{"token":"cộng hòa","start_offset":0,"end_offset":8,"type":"","position":0},{"token":"xã hội","start_offset":9,"end_offset":15,"type":"","position":1},{"token":"chủ nghĩa","start_offset":16,"end_offset":25,"type":"","position":2},{"token":"việt nam","start_offset":26,"end_offset":34,"type":"","position":3}]} +``` + ## Build from Source ### Step 1: Build C++ tokenizer for Vietnamese library ```sh -git clone https://github.com/coccoc/coccoc-tokenizer.git +git clone https://github.com/duydo/coccoc-tokenizer.git cd coccoc-tokenizer && mkdir build && cd build cmake -DBUILD_JAVA=1 .. make install +# Link the coccoc shared lib to /usr/lib +sudo ln -sf /usr/local/lib/libcoccoc_tokenizer_jni.* /usr/lib/ ``` By default, the `make install` installs: -- the lib commands (`tokenizer`, `dict_compiler` and `vn_lang_tool`) under `/usr/local/bin` -- the dynamic lib (`libcoccoc_tokenizer_jni.so`) under `/usr/local/lib/`. The plugin uses this lib directly. -- the dictionary files under `/usr/local/share/tokenizer/dicts`. The plugin uses this path for `dict_path` by default. +- The lib commands `tokenizer`, `dict_compiler` and `vn_lang_tool` under `/usr/local/bin` +- The dynamic lib `libcoccoc_tokenizer_jni.so` under `/usr/local/lib/`. The plugin uses this lib directly. +- The dictionary files under `/usr/local/share/tokenizer/dicts`. The plugin uses this path for `dict_path` by default. + +Verify +```sh +/usr/local/bin/tokenizer "Cộng hòa Xã hội chủ nghĩa Việt Nam" +# cộng hòa xã hội chủ nghĩa việt nam +``` -Refer [the repo](https://github.com/coccoc/coccoc-tokenizer) for more information to build the library. +Refer [the repo](https://github.com/duydo/coccoc-tokenizer) for more information to build the library. ### Step 2: Build the plugin @@ -136,7 +168,7 @@ Optionally, edit the `elasticsearch-analysis-vietnamese/pom.xml` to change the v ```xml ... -7.17.1 +8.7.0 ... ``` @@ -149,7 +181,7 @@ mvn package ### Step 3: Installation the plugin on Elasticsearch ```sh -bin/elasticsearch-plugin install file://target/releases/elasticsearch-analysis-vietnamese-7.17.1.zip +bin/elasticsearch-plugin install file://target/releases/elasticsearch-analysis-vietnamese-8.7.0.zip ``` ## Compatible Versions @@ -157,8 +189,11 @@ From v7.12.11, the plugin uses CocCoc C++ tokenizer instead of the VnTokenizer b I don't maintain the plugin with the VnTokenizer anymore, if you want to continue developing with it, refer [the branch vntokenizer](https://github.com/duydo/elasticsearch-analysis-vietnamese/tree/vntokenizer). | Vietnamese Analysis Plugin | Elasticsearch | -| -------------------------- |-----------------| -| master | 7.16 ~ 7.17.1 | +|----------------------------|-----------------| +| master | 8.7.0 | +| develop | 8.7.0 | +| 8.7.0 | 8.7.0 | +| 7.16.1 | 7.16 ~ 7.17.1 | | 7.12.1 | 7.12.1 ~ 7.15.x | | 7.3.1 | 7.3.1 | | 5.6.5 | 5.6.5 | diff --git a/docker-compose.yaml b/docker-compose.yaml index 17f5d27..98c2c52 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,15 +1,31 @@ version: '3.4' + services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:${ES_VERSION} + build: + context: . + args: + ES_VERSION: ${ES_VERSION} restart: on-failure ports: - "9200:9200" - volumes: - - ./target/releases/elasticsearch-analysis-vietnamese-${ES_VERSION}.zip:/usr/share/elasticsearch/plugin/elasticsearch-analysis-vietnamese-${ES_VERSION}.zip - - ./install-es-plugin.sh:/apps/install-es-plugin.sh + ulimits: + nofile: + soft: 65536 + hard: 65536 + memlock: + hard: -1 + soft: -1 environment: - - "ES_VERSION=${ES_VERSION}" - - "discovery.type=single-node" - entrypoint: - - /apps/install-es-plugin.sh \ No newline at end of file + ES_JAVA_OPTS: "-Xmx2g -Xms2g" + ELASTIC_USERNAME: "elastic" + ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} + bootstrap.memory_lock: "true" + discovery.type: "single-node" + xpack.security.enabled: "true" + networks: + - elastic + +networks: + elastic: + driver: bridge diff --git a/install-es-plugin.sh b/install-es-plugin.sh deleted file mode 100755 index 29bed00..0000000 --- a/install-es-plugin.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -# setting up prerequisites - -cd /usr/share/elasticsearch - -yes | ./bin/elasticsearch-plugin install file:./plugin/elasticsearch-analysis-vietnamese-$ES_VERSION.zip - -/usr/local/bin/docker-entrypoint.sh eswrapper \ No newline at end of file diff --git a/pom.xml b/pom.xml index 0fe8781..e67d231 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 org.elasticsearch elasticsearch-analysis-vietnamese - 7.17.8 + 8.7.0 jar elasticsearch-analysis-vietnamese https://github.com/duydo/elasticsearch-analysis-vietnamese/ diff --git a/src/main/java/com/coccoc/Tokenizer.java b/src/main/java/com/coccoc/Tokenizer.java index b029727..ec6fe7d 100644 --- a/src/main/java/com/coccoc/Tokenizer.java +++ b/src/main/java/com/coccoc/Tokenizer.java @@ -10,97 +10,102 @@ * @author duydo, CocCoc team */ public class Tokenizer { - public static final String TOKENIZER_SHARED_LIB_NAME = "coccoc_tokenizer_jni"; - static { - System.loadLibrary(TOKENIZER_SHARED_LIB_NAME); - } + public static final String TOKENIZER_SHARED_LIB_NAME = "coccoc_tokenizer_jni"; + + static { + System.loadLibrary(TOKENIZER_SHARED_LIB_NAME); + } - public enum TokenizeOption { - NORMAL(0), - HOST(1), - URL(2); + public enum TokenizeOption { + NORMAL(0), + HOST(1), + URL(2); - private final int value; + private final int value; - TokenizeOption(int value) { - this.value = value; - } + TokenizeOption(int value) { + this.value = value; + } - public int value() { - return value; - } + public int value() { + return value; } + } - public static final String SPACE = " "; - public static final String UNDERSCORE = "_"; - public static final String COMMA = ","; - public static final String DOT = "."; + public static final String SPACE = " "; + public static final String UNDERSCORE = "_"; + public static final String COMMA = ","; + public static final String DOT = "."; - private static String dictPath = null; + private static String dictPath = null; - private static final class Loader { - private static final Tokenizer INSTANCE = get(); + private static final class Loader { - private Loader() { - } + private static final Tokenizer INSTANCE = get(); - private static Tokenizer get() { - return new Tokenizer(dictPath); - } + private Loader() { } - public static Tokenizer getInstance(String dictPath) { - Tokenizer.dictPath = dictPath; - return Loader.INSTANCE; + private static Tokenizer get() { + return new Tokenizer(dictPath); } + } + + public static Tokenizer getInstance(String dictPath) { + Tokenizer.dictPath = dictPath; + return Loader.INSTANCE; + } - private Tokenizer(String dictPath) { - int status = initialize(dictPath); - if (0 > status) { - throw new RuntimeException(String.format("Cannot initialize Tokenizer: %s", dictPath)); - } + private Tokenizer(String dictPath) { + int status = initialize(dictPath); + if (0 > status) { + throw new RuntimeException(String.format("Cannot initialize Tokenizer: %s", dictPath)); + } + } + + public List segment(String text, TokenizeOption option, boolean keepPunctuation) { + if (text == null) { + throw new IllegalArgumentException("text is null"); + } + long resPointer = segmentPointer(text, false, option.value(), keepPunctuation); + if (resPointer < 0) { + throw new RuntimeException("Cannot segment the text"); } - public List segment(String text, TokenizeOption option, boolean keepPunctuation) { - if (text == null) { - throw new IllegalArgumentException("text is null"); - } - long resPointer = segmentPointer(text, false, option.value(), keepPunctuation); - - final List tokens = new ArrayList<>(); - // Positions from JNI implementation .cpp file - int rangesSize = (int) Unsafe.UNSAFE.getLong(resPointer + 8 * 2); - long rangesDataPointer = Unsafe.UNSAFE.getLong(resPointer + 8 * 3); - int tokenSize = 4 * 6; - for (int i = 0, spacePos = 0; i < rangesSize; ++i) { - // Positions of UNSAFE values are calculated from {struct Token} in tokenizer.hpp - int originalStartPos = Unsafe.UNSAFE.getInt(rangesDataPointer + i * tokenSize + 8); - int originalEndPos = Unsafe.UNSAFE.getInt(rangesDataPointer + i * tokenSize + 12); - int type = Unsafe.UNSAFE.getInt(rangesDataPointer + i * tokenSize + 16); - int segType = Unsafe.UNSAFE.getInt(rangesDataPointer + i * tokenSize + 20); - - // Build substring from UNSAFE array of codepoints - final StringBuilder sb = new StringBuilder(); - for (int j = originalStartPos; j < originalEndPos; ++j) { - sb.appendCodePoint(text.charAt(j)); - } - tokens.add(new Token(segType == 1 ? sb.toString().replace(COMMA, DOT) : sb.toString(), - Token.Type.fromInt(type), Token.SegType.fromInt(segType), originalStartPos, originalEndPos)); - } - freeMemory(resPointer); - return tokens; + final List tokens = new ArrayList<>(); + // Positions from JNI implementation .cpp file + int rangesSize = Unsafe.getInt(resPointer + 8 * 2); + long rangesDataPointer = Unsafe.getLong(resPointer + 8 * 3); + int tokenSize = 4 * 6; + for (int i = 0; i < rangesSize; ++i) { + // Positions of UNSAFE values are calculated from {struct Token} in tokenizer.hpp + int originalStartPos = Unsafe.getInt(rangesDataPointer + i * tokenSize + 8); + int originalEndPos = Unsafe.getInt(rangesDataPointer + i * tokenSize + 12); + int type = Unsafe.getInt(rangesDataPointer + i * tokenSize + 16); + int segType = Unsafe.getInt(rangesDataPointer + i * tokenSize + 20); + + // Build substring from UNSAFE array of codepoints + final StringBuilder sb = new StringBuilder(); + for (int j = originalStartPos; j < originalEndPos; ++j) { + sb.appendCodePoint(text.charAt(j)); + } + tokens.add(new Token(segType == 1 ? sb.toString().replace(COMMA, DOT) : sb.toString(), + Token.Type.fromInt(type), Token.SegType.fromInt(segType), originalStartPos, originalEndPos)); } + freeMemory(resPointer); + return tokens; + } - //Calls CocCoc lib's segmentPointer function - public native long segmentPointer(String text, boolean forTransforming, int tokenizeOption, boolean keepPunctuation); + //Calls CocCoc lib's segmentPointer function + public native long segmentPointer(String text, boolean forTransforming, int tokenizeOption, boolean keepPunctuation); - //Calls CocCoc lib's freeMemory function - private native void freeMemory(long resPointer); + //Calls CocCoc lib's freeMemory function + private native void freeMemory(long resPointer); - //Calls CocCoc lib's initialize function - private native int initialize(String dictPath); + //Calls CocCoc lib's initialize function + private native int initialize(String dictPath); } diff --git a/src/main/java/com/coccoc/Unsafe.java b/src/main/java/com/coccoc/Unsafe.java index 0ba7d86..12630d0 100644 --- a/src/main/java/com/coccoc/Unsafe.java +++ b/src/main/java/com/coccoc/Unsafe.java @@ -1,109 +1,29 @@ package com.coccoc; -import java.io.*; import java.lang.reflect.Field; public class Unsafe { - public static final sun.misc.Unsafe UNSAFE; - static { - sun.misc.Unsafe unsafe = null; - try { - Field field = sun.misc.Unsafe.class.getDeclaredField("theUnsafe"); - field.setAccessible(true); - unsafe = (sun.misc.Unsafe) field.get(null); - } catch (Exception e) { - e.printStackTrace(); - } - UNSAFE = unsafe; - } + public static final sun.misc.Unsafe UNSAFE; - private static final int BUFFER_SIZE = 1024 * 1024 * 16; - private static final String CHARSET = "iso-8859-1"; + static { + sun.misc.Unsafe unsafe = null; + try { + Field field = sun.misc.Unsafe.class.getDeclaredField("theUnsafe"); + field.setAccessible(true); + unsafe = (sun.misc.Unsafe) field.get(null); + } catch (Exception e) { + e.printStackTrace(); + } + UNSAFE = unsafe; + } - public static void saveUnsafeMemory(OutputStream os, long memory, long size) throws IOException { - for (long i = memory; i < memory + size; i++) { - os.write(UNSAFE.getByte(i)); - } - } - public static long readToUnsafeMemory(File file) throws IOException { - long len = file.length(); - long memory = UNSAFE.allocateMemory(len); - try (BufferedInputStream fis = new BufferedInputStream(new FileInputStream(file), BUFFER_SIZE)) { - for (long i = 0; i < len; i++) { - UNSAFE.putByte(memory + i, (byte) fis.read()); - } - } - return memory; - } + public static int getInt(long address) { + return UNSAFE.getInt(address); + } - public static float getFloat(byte[] buffer, int offset) { - return UNSAFE.getFloat(buffer, (long) (sun.misc.Unsafe.ARRAY_BYTE_BASE_OFFSET + offset)); - } - - public static double getDouble(byte[] buffer, int offset) { - return UNSAFE.getDouble(buffer, (long) (sun.misc.Unsafe.ARRAY_BYTE_BASE_OFFSET + offset)); - } - - public static long getLong(byte[] buffer, int offset) { - return UNSAFE.getLong(buffer, (long) (sun.misc.Unsafe.ARRAY_BYTE_BASE_OFFSET + offset)); - } - - public static int getInt(byte[] buffer, int offset) { - return UNSAFE.getInt(buffer, (long) (sun.misc.Unsafe.ARRAY_BYTE_BASE_OFFSET + offset)); - } - - public static short getShort(byte[] buffer, int offset) { - return UNSAFE.getShort(buffer, (long) (sun.misc.Unsafe.ARRAY_BYTE_BASE_OFFSET + offset)); - } - - public static byte getByte(byte[] buffer, int offset) { - return UNSAFE.getByte(buffer, (long) (sun.misc.Unsafe.ARRAY_BYTE_BASE_OFFSET + offset)); - } - - public static void writeString(String str, OutputStream os, long unsafeBuffer) - throws IOException { - byte[] bytes = str.getBytes(CHARSET); - writeInt(bytes.length, os, unsafeBuffer); - os.write(bytes); - } - - public static void writeInt(int value, OutputStream os, long unsafeBuffer) throws IOException { - Unsafe.UNSAFE.putInt(unsafeBuffer, value); - Unsafe.saveUnsafeMemory(os, unsafeBuffer, 4); - } - - public static String readString(InputStream is) throws IOException { - int length = readInt(is); - byte[] bytes = new byte[length]; - is.read(bytes); - return new String(bytes, CHARSET); - } - - public static int readInt(InputStream is) throws IOException { - byte[] bytes = new byte[4]; - is.read(bytes); - return Unsafe.getInt(bytes, 0); - } - - public static void copy(byte[] values, int length, long pointer) { - UNSAFE.copyMemory(values, sun.misc.Unsafe.ARRAY_BYTE_BASE_OFFSET, null, pointer, length); - } - - public static void copy(byte[] values, int off, int length, long pointer) { - UNSAFE.copyMemory(values, sun.misc.Unsafe.ARRAY_BYTE_BASE_OFFSET + off, null, pointer, length); - } - - public static void copy(short[] values, int length, long pointer) { - UNSAFE.copyMemory(values, sun.misc.Unsafe.ARRAY_SHORT_BASE_OFFSET, null, pointer, length * Short.BYTES); - } - - public static void copy(int[] values, int length, long pointer) { - UNSAFE.copyMemory(values, sun.misc.Unsafe.ARRAY_INT_BASE_OFFSET, null, pointer, length * Integer.BYTES); - } - - public static void copy(long[] values, int length, long pointer) { - UNSAFE.copyMemory(values, sun.misc.Unsafe.ARRAY_LONG_BASE_OFFSET, null, pointer, length * Long.BYTES); - } + public static long getLong(long address) { + return UNSAFE.getLong(address); + } } diff --git a/src/main/java/org/elasticsearch/index/analysis/VietnameseAnalyzerProvider.java b/src/main/java/org/elasticsearch/index/analysis/VietnameseAnalyzerProvider.java index b511a71..3b3b7d3 100644 --- a/src/main/java/org/elasticsearch/index/analysis/VietnameseAnalyzerProvider.java +++ b/src/main/java/org/elasticsearch/index/analysis/VietnameseAnalyzerProvider.java @@ -30,7 +30,7 @@ public class VietnameseAnalyzerProvider extends AbstractIndexAnalyzerProvider> nodePlugins() { @@ -34,8 +38,8 @@ public void testPluginIsLoaded() throws Exception { NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().get(); for (NodeInfo nodeInfo : response.getNodes()) { boolean pluginFound = false; - for (PluginDescriptor pluginInfo : nodeInfo.getInfo(PluginsAndModules.class).getPluginInfos()) { - if (pluginInfo.getName().equals(AnalysisVietnamesePlugin.class.getName())) { + for (PluginRuntimeInfo pluginInfo : nodeInfo.getInfo(PluginsAndModules.class).getPluginInfos()) { + if (pluginInfo.descriptor().getName().equals(AnalysisVietnamesePlugin.class.getName())) { pluginFound = true; break; } @@ -62,25 +66,25 @@ public void testVietnameseAnalyzerInMapping() throws ExecutionException, Interru ensureGreen("test"); final XContentBuilder mapping = jsonBuilder() .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("foo") - .field("type", "text") - .field("analyzer", "vi_analyzer") - .endObject() - .endObject() - .endObject() + .startObject("_doc") + .startObject("properties") + .startObject("foo") + .field("type", "text") + .field("analyzer", "vi_analyzer") + .endObject() + .endObject() + .endObject() .endObject(); - client().admin().indices().preparePutMapping("test").setType("_doc").setSource(mapping).get(); + client().admin().indices().preparePutMapping("test").setSource(mapping).get(); final XContentBuilder source = jsonBuilder() .startObject() - .field("foo", "công nghệ thông tin Việt Nam") + .field("foo", "công nghệ thông tin Việt Nam") .endObject(); - index("test", "_doc", "1", source); + index("test", "1", source); refresh(); SearchResponse response = client().prepareSearch("test"). setQuery( - QueryBuilders.matchQuery("foo", "công nghệ thông tin") + QueryBuilders.matchQuery("foo", "công nghệ thông tin") ).execute().actionGet(); assertThat(response.getHits().getTotalHits().toString(), is("1 hits")); } diff --git a/src/test/java/org/elasticsearch/index/analysis/VietnameseAnalysisTests.java b/src/test/java/org/elasticsearch/index/analysis/VietnameseAnalysisTests.java index 2e87426..ecfb836 100644 --- a/src/test/java/org/elasticsearch/index/analysis/VietnameseAnalysisTests.java +++ b/src/test/java/org/elasticsearch/index/analysis/VietnameseAnalysisTests.java @@ -10,19 +10,19 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.plugin.analysis.vi.AnalysisVietnamesePlugin; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; import java.io.StringReader; -import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; /** * Created by duydo on 2/19/17. */ -public class VietnameseAnalysisTests extends ESTestCase { +public class VietnameseAnalysisTests extends ESSingleNodeTestCase { public void testVietnameseAnalysis() throws IOException { TestAnalysis analysis = createTestAnalysis(Settings.EMPTY);