Skip to content

Commit

Permalink
优化segment文件目录检测,包含version文件的目录无需初始化 (#356)
Browse files Browse the repository at this point in the history
  • Loading branch information
weizijun authored Dec 11, 2023
1 parent 8118347 commit 71aa8d5
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.havenask.cluster.ClusterChangedEvent;
Expand Down Expand Up @@ -70,6 +69,8 @@
import org.havenask.index.Index;
import org.havenask.threadpool.ThreadPool;

import com.carrotsearch.hppc.cursors.ObjectCursor;

public class MetaDataSyncer extends AbstractLifecycleComponent implements ClusterStateApplier {
private static final Logger LOGGER = LogManager.getLogger(MetaDataSyncer.class);
private static final int MAX_SYNC_TIMES = 30;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,26 @@

package org.havenask.engine.index.config.generator;

import org.havenask.common.Nullable;
import org.havenask.common.settings.Settings;
import org.havenask.engine.index.config.Schema;
import org.havenask.engine.util.RangeUtil;
import org.havenask.index.mapper.MapperService;
import org.havenask.index.shard.ShardId;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Locale;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.havenask.common.Nullable;
import org.havenask.common.settings.Settings;
import org.havenask.engine.index.config.Schema;
import org.havenask.engine.util.RangeUtil;
import org.havenask.engine.util.Utils;
import org.havenask.index.mapper.MapperService;
import org.havenask.index.shard.ShardId;

public class RuntimeSegmentGenerator {
private static final Logger LOGGER = LogManager.getLogger(RuntimeSegmentGenerator.class);

public static final String VERSION_FILE_NAME = "version.0";
public static final String VERSION_FILE_CONTENT = "{\n"
+ "\"description\":\n"
Expand Down Expand Up @@ -198,10 +203,14 @@ public void generate() throws IOException {
Files.createDirectories(dataPath);
}

if (Files.exists(dataPath.resolve(VERSION_FILE_NAME))) {
String versionFile = Utils.getIndexMaxVersion(dataPath);

if (versionFile != null && false == versionFile.isEmpty()) {
return;
}

LOGGER.info("generate runtime segment for index [{}], partition [{}]", indexName, partitionName);

Files.write(
dataPath.resolve(VERSION_FILE_NAME),
VERSION_FILE_CONTENT.getBytes(StandardCharsets.UTF_8),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,20 +14,22 @@

package org.havenask.engine.index.config.generator;

import org.havenask.common.settings.Settings;
import org.havenask.engine.HavenaskEnginePlugin;
import org.havenask.index.mapper.MapperService;
import org.havenask.index.mapper.MapperServiceTestCase;
import org.havenask.index.shard.ShardId;
import org.havenask.plugins.Plugin;
import static java.util.Collections.singletonList;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Collection;
import java.util.Locale;

import static java.util.Collections.singletonList;
import org.havenask.common.settings.Settings;
import org.havenask.engine.HavenaskEnginePlugin;
import org.havenask.index.mapper.MapperService;
import org.havenask.index.mapper.MapperServiceTestCase;
import org.havenask.index.shard.ShardId;
import org.havenask.plugins.Plugin;

public class RuntimeSegmentGeneratorTests extends MapperServiceTestCase {
@Override
Expand Down Expand Up @@ -146,4 +148,37 @@ public void testBasic() throws IOException {
);
}

public void testExists() throws IOException {
String indexName = randomAlphaOfLength(5);
MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "keyword")));
Path runtimePath = createTempDir();
ShardId shardId = new ShardId(indexName, "_na_", 0);
RuntimeSegmentGenerator runtimeSegmentGenerator = new RuntimeSegmentGenerator(
shardId,
1,
Settings.EMPTY,
mapperService,
runtimePath
);

Path dataPath = runtimePath.resolve(indexName).resolve("generation_0").resolve("partition_0_65535");
Files.createDirectories(dataPath);

Files.write(
dataPath.resolve("version.1"),
RuntimeSegmentGenerator.VERSION_FILE_CONTENT.getBytes(StandardCharsets.UTF_8),
StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING
);

runtimeSegmentGenerator.generate();

assertFalse(Files.exists(dataPath.resolve(RuntimeSegmentGenerator.VERSION_FILE_NAME)));
assertFalse(Files.exists(dataPath.resolve(RuntimeSegmentGenerator.INDEX_FORMAT_VERSION_FILE_NAME)));
assertFalse(Files.exists(dataPath.resolve(RuntimeSegmentGenerator.INDEX_PARTITION_META_FILE_NAME)));
assertFalse(Files.exists(dataPath.resolve(RuntimeSegmentGenerator.SCHEMA_FILE_NAME)));
assertFalse(Files.exists(dataPath.resolve(RuntimeSegmentGenerator.DEPLOY_META_FILE_NAME)));
assertFalse(Files.exists(dataPath.resolve(RuntimeSegmentGenerator.ENTRY_TABLE_FILE_NAME)));
}

}

0 comments on commit 71aa8d5

Please sign in to comment.