Skip to content

Commit

Permalink
Reformat some RepositoryData code (#67775)
Browse files Browse the repository at this point in the history
Split this out from another branch that makes more meaningful changes,
to get the trivial stuff out of the way first.
  • Loading branch information
DaveCTurner authored Jan 20, 2021
1 parent f32ab15 commit 86b600d
Show file tree
Hide file tree
Showing 6 changed files with 151 additions and 58 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -206,12 +206,14 @@ public void testHandlingMissingRootLevelSnapshotMetadata() throws Exception {
Files.delete(repo.resolve(String.format(Locale.ROOT, BlobStoreRepository.SNAPSHOT_NAME_FORMAT, snapshotToCorrupt.getUUID())));

logger.info("--> strip version information from index-N blob");
final RepositoryData withoutVersions = new RepositoryData(repositoryData.getGenId(),
repositoryData.getSnapshotIds().stream().collect(Collectors.toMap(
SnapshotId::getUUID, Function.identity())),
repositoryData.getSnapshotIds().stream().collect(Collectors.toMap(
SnapshotId::getUUID, repositoryData::getSnapshotState)),
Collections.emptyMap(), Collections.emptyMap(), ShardGenerations.EMPTY, IndexMetaDataGenerations.EMPTY);
final RepositoryData withoutVersions = new RepositoryData(
repositoryData.getGenId(),
repositoryData.getSnapshotIds().stream().collect(Collectors.toMap(SnapshotId::getUUID, Function.identity())),
repositoryData.getSnapshotIds().stream().collect(Collectors.toMap(SnapshotId::getUUID, repositoryData::getSnapshotState)),
Collections.emptyMap(),
Collections.emptyMap(),
ShardGenerations.EMPTY,
IndexMetaDataGenerations.EMPTY);

Files.write(repo.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + withoutVersions.getGenId()),
BytesReference.toBytes(BytesReference.bytes(
Expand Down Expand Up @@ -330,20 +332,18 @@ public void testRepairBrokenShardGenerations() throws Exception {
assertFileExists(initialShardMetaPath);
Files.move(initialShardMetaPath, shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + randomIntBetween(1, 1000)));

final RepositoryData repositoryData1 = getRepositoryData(repoName);
final RepositoryData repositoryData = getRepositoryData(repoName);
final Map<String, SnapshotId> snapshotIds =
repositoryData1.getSnapshotIds().stream().collect(Collectors.toMap(SnapshotId::getUUID, Function.identity()));
repositoryData.getSnapshotIds().stream().collect(Collectors.toMap(SnapshotId::getUUID, Function.identity()));
final RepositoryData brokenRepoData = new RepositoryData(
repositoryData1.getGenId(), snapshotIds, snapshotIds.values().stream().collect(
Collectors.toMap(SnapshotId::getUUID, repositoryData1::getSnapshotState)),
snapshotIds.values().stream().collect(
Collectors.toMap(SnapshotId::getUUID, repositoryData1::getVersion)),
repositoryData1.getIndices().values().stream().collect(
Collectors.toMap(Function.identity(), repositoryData1::getSnapshots)
), ShardGenerations.builder().putAll(repositoryData1.shardGenerations()).put(indexId, 0, "0").build(),
repositoryData1.indexMetaDataGenerations()
);
Files.write(repoPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + repositoryData1.getGenId()),
repositoryData.getGenId(),
snapshotIds,
snapshotIds.values().stream().collect(Collectors.toMap(SnapshotId::getUUID, repositoryData::getSnapshotState)),
snapshotIds.values().stream().collect(Collectors.toMap(SnapshotId::getUUID, repositoryData::getVersion)),
repositoryData.getIndices().values().stream().collect(Collectors.toMap(Function.identity(), repositoryData::getSnapshots)),
ShardGenerations.builder().putAll(repositoryData.shardGenerations()).put(indexId, 0, "0").build(),
repositoryData.indexMetaDataGenerations());
Files.write(repoPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + repositoryData.getGenId()),
BytesReference.toBytes(BytesReference.bytes(
brokenRepoData.snapshotsToXContent(XContentFactory.jsonBuilder(), Version.CURRENT))),
StandardOpenOption.TRUNCATE_EXISTING);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.util.CollectionUtils;
Expand Down Expand Up @@ -70,8 +71,14 @@ public final class RepositoryData {
/**
* An instance initialized for an empty repository.
*/
public static final RepositoryData EMPTY = new RepositoryData(EMPTY_REPO_GEN, Collections.emptyMap(), Collections.emptyMap(),
Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), ShardGenerations.EMPTY,
public static final RepositoryData EMPTY = new RepositoryData(
EMPTY_REPO_GEN,
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap(),
ShardGenerations.EMPTY,
IndexMetaDataGenerations.EMPTY);

/**
Expand Down Expand Up @@ -107,19 +114,34 @@ public final class RepositoryData {
*/
private final ShardGenerations shardGenerations;

public RepositoryData(long genId, Map<String, SnapshotId> snapshotIds, Map<String, SnapshotState> snapshotStates,
Map<String, Version> snapshotVersions, Map<IndexId, List<SnapshotId>> indexSnapshots,
ShardGenerations shardGenerations, IndexMetaDataGenerations indexMetaDataGenerations) {
this(genId, Collections.unmodifiableMap(snapshotIds), Collections.unmodifiableMap(snapshotStates),
public RepositoryData(
long genId,
Map<String, SnapshotId> snapshotIds,
Map<String, SnapshotState> snapshotStates,
Map<String, Version> snapshotVersions,
Map<IndexId, List<SnapshotId>> indexSnapshots,
ShardGenerations shardGenerations,
IndexMetaDataGenerations indexMetaDataGenerations) {
this(
genId,
Collections.unmodifiableMap(snapshotIds),
Collections.unmodifiableMap(snapshotStates),
Collections.unmodifiableMap(snapshotVersions),
indexSnapshots.keySet().stream().collect(Collectors.toUnmodifiableMap(IndexId::getName, Function.identity())),
Collections.unmodifiableMap(indexSnapshots), shardGenerations, indexMetaDataGenerations);
Collections.unmodifiableMap(indexSnapshots),
shardGenerations,
indexMetaDataGenerations);
}

private RepositoryData(long genId, Map<String, SnapshotId> snapshotIds, Map<String, SnapshotState> snapshotStates,
Map<String, Version> snapshotVersions, Map<String, IndexId> indices,
Map<IndexId, List<SnapshotId>> indexSnapshots, ShardGenerations shardGenerations,
IndexMetaDataGenerations indexMetaDataGenerations) {
private RepositoryData(
long genId,
Map<String, SnapshotId> snapshotIds,
Map<String, SnapshotState> snapshotStates,
Map<String, Version> snapshotVersions,
Map<String, IndexId> indices,
Map<IndexId, List<SnapshotId>> indexSnapshots,
ShardGenerations shardGenerations,
IndexMetaDataGenerations indexMetaDataGenerations) {
this.genId = genId;
this.snapshotIds = snapshotIds;
this.snapshotStates = snapshotStates;
Expand All @@ -129,14 +151,21 @@ private RepositoryData(long genId, Map<String, SnapshotId> snapshotIds, Map<Stri
this.indexMetaDataGenerations = indexMetaDataGenerations;
this.snapshotVersions = snapshotVersions;
assert indices.values().containsAll(shardGenerations.indices()) : "ShardGenerations contained indices "
+ shardGenerations.indices() + " but snapshots only reference indices " + indices.values();
+ shardGenerations.indices() + " but snapshots only reference indices " + indices.values();
assert indexSnapshots.values().stream().noneMatch(snapshotIdList -> Set.copyOf(snapshotIdList).size() != snapshotIdList.size()) :
"Found duplicate snapshot ids per index in [" + indexSnapshots + "]";
}

protected RepositoryData copy() {
return new RepositoryData(
genId, snapshotIds, snapshotStates, snapshotVersions, indexSnapshots, shardGenerations, indexMetaDataGenerations);
genId,
snapshotIds,
snapshotStates,
snapshotVersions,
indices,
indexSnapshots,
shardGenerations,
indexMetaDataGenerations);
}

/**
Expand All @@ -151,7 +180,14 @@ public RepositoryData withVersions(Map<SnapshotId, Version> versions) {
final Map<String, Version> newVersions = new HashMap<>(snapshotVersions);
versions.forEach((id, version) -> newVersions.put(id.getUUID(), version));
return new RepositoryData(
genId, snapshotIds, snapshotStates, newVersions, indexSnapshots, shardGenerations, indexMetaDataGenerations);
genId,
snapshotIds,
snapshotStates,
newVersions,
indices,
indexSnapshots,
shardGenerations,
indexMetaDataGenerations);
}

public ShardGenerations shardGenerations() {
Expand Down Expand Up @@ -221,8 +257,7 @@ public List<IndexId> indicesToUpdateAfterRemovingSnapshot(Collection<SnapshotId>

/**
* Returns a map of {@link IndexId} to a collection of {@link String} containing all the {@link IndexId} and the
* {@link org.elasticsearch.cluster.metadata.IndexMetadata} blob name in it that can be removed after removing the given snapshot from
* the repository.
* {@link IndexMetadata} blob name in it that can be removed after removing the given snapshot from the repository.
* NOTE: Does not return a mapping for {@link IndexId} values that will be removed completely from the repository.
*
* @param snapshotIds SnapshotIds to remove
Expand Down Expand Up @@ -255,7 +290,7 @@ public Map<IndexId, Collection<String>> indexMetaDataToRemoveAfterRemovingSnapsh
* generations indexed by the shard id they correspond to must be supplied.
* @param indexMetaBlobs Map of index metadata blob uuids
* @param newIdentifiers Map of new index metadata blob uuids keyed by the identifiers of the
* {@link org.elasticsearch.cluster.metadata.IndexMetadata} in them
* {@link IndexMetadata} in them
*/
public RepositoryData addSnapshot(final SnapshotId snapshotId,
final SnapshotState snapshotState,
Expand Down Expand Up @@ -298,9 +333,14 @@ public RepositoryData addSnapshot(final SnapshotId snapshotId,
newIndexMetaGenerations = indexMetaDataGenerations.withAddedSnapshot(snapshotId, indexMetaBlobs, newIdentifiers);
}

return new RepositoryData(genId, snapshots, newSnapshotStates, newSnapshotVersions, allIndexSnapshots,
ShardGenerations.builder().putAll(this.shardGenerations).putAll(shardGenerations).build(),
newIndexMetaGenerations);
return new RepositoryData(
genId,
snapshots,
newSnapshotStates,
newSnapshotVersions,
allIndexSnapshots,
ShardGenerations.builder().putAll(this.shardGenerations).putAll(shardGenerations).build(),
newIndexMetaGenerations);
}

/**
Expand All @@ -313,7 +353,14 @@ public RepositoryData withGenId(long newGeneration) {
if (newGeneration == genId) {
return this;
}
return new RepositoryData(newGeneration, snapshotIds, snapshotStates, snapshotVersions, indices, indexSnapshots, shardGenerations,
return new RepositoryData(
newGeneration,
snapshotIds,
snapshotStates,
snapshotVersions,
indices,
indexSnapshots,
shardGenerations,
indexMetaDataGenerations);
}

Expand Down Expand Up @@ -354,10 +401,15 @@ public RepositoryData removeSnapshots(final Collection<SnapshotId> snapshots, fi
}
}

return new RepositoryData(genId, newSnapshotIds, newSnapshotStates, newSnapshotVersions, indexSnapshots,
ShardGenerations.builder().putAll(shardGenerations).putAll(updatedShardGenerations)
.retainIndicesAndPruneDeletes(indexSnapshots.keySet()).build(),
indexMetaDataGenerations.withRemovedSnapshots(snapshots)
return new RepositoryData(
genId,
newSnapshotIds,
newSnapshotStates,
newSnapshotVersions,
indexSnapshots,
ShardGenerations.builder().putAll(shardGenerations).putAll(updatedShardGenerations)
.retainIndicesAndPruneDeletes(indexSnapshots.keySet()).build(),
indexMetaDataGenerations.withRemovedSnapshots(snapshots)
);
}

Expand Down Expand Up @@ -455,6 +507,7 @@ public List<IndexId> resolveNewIndices(List<String> indicesToResolve, Map<String
public XContentBuilder snapshotsToXContent(final XContentBuilder builder, final Version repoMetaVersion) throws IOException {
builder.startObject();
// write the snapshots list

builder.startArray(SNAPSHOTS);
final boolean shouldWriteIndexGens = SnapshotsService.useIndexGenerations(repoMetaVersion);
final boolean shouldWriteShardGens = SnapshotsService.useShardGenerations(repoMetaVersion);
Expand Down Expand Up @@ -482,6 +535,7 @@ public XContentBuilder snapshotsToXContent(final XContentBuilder builder, final
builder.endObject();
}
builder.endArray();

// write the indices map
builder.startObject(INDICES);
for (final IndexId indexId : getIndices().values()) {
Expand All @@ -504,13 +558,15 @@ public XContentBuilder snapshotsToXContent(final XContentBuilder builder, final
builder.endObject();
}
builder.endObject();

if (shouldWriteIndexGens) {
builder.field(MIN_VERSION, SnapshotsService.INDEX_GEN_IN_REPO_DATA_VERSION.toString());
builder.field(INDEX_METADATA_IDENTIFIERS, indexMetaDataGenerations.identifiers);
} else if (shouldWriteShardGens) {
// Add min version field to make it impossible for older ES versions to deserialize this object
builder.field(MIN_VERSION, SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION.toString());
}

builder.endObject();
return builder;
}
Expand Down Expand Up @@ -563,7 +619,13 @@ public static RepositoryData snapshotsFromXContent(XContentParser parser, long g
// ensure we drained the stream completely
XContentParserUtils.ensureExpectedToken(null, parser.nextToken(), parser);

return new RepositoryData(genId, snapshots, snapshotStates, snapshotVersions, indexSnapshots, shardGenerations.build(),
return new RepositoryData(
genId,
snapshots,
snapshotStates,
snapshotVersions,
indexSnapshots,
shardGenerations.build(),
buildIndexMetaGenerations(indexMetaLookup, indexLookup, indexMetaIdentifiers));
}

Expand Down Expand Up @@ -741,4 +803,5 @@ private static String parseLegacySnapshotUUID(XContentParser parser) throws IOEx
}
return uuid;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -140,12 +140,24 @@ public void testInitIndices() {
snapshotStates.put(snapshotId.getUUID(), randomFrom(SnapshotState.values()));
snapshotVersions.put(snapshotId.getUUID(), randomFrom(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion()));
}
RepositoryData repositoryData = new RepositoryData(EMPTY_REPO_GEN, snapshotIds, Collections.emptyMap(), Collections.emptyMap(),
Collections.emptyMap(), ShardGenerations.EMPTY, IndexMetaDataGenerations.EMPTY);
RepositoryData repositoryData = new RepositoryData(
EMPTY_REPO_GEN,
snapshotIds,
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap(),
ShardGenerations.EMPTY,
IndexMetaDataGenerations.EMPTY);
// test that initializing indices works
Map<IndexId, List<SnapshotId>> indices = randomIndices(snapshotIds);
RepositoryData newRepoData = new RepositoryData(repositoryData.getGenId(), snapshotIds, snapshotStates, snapshotVersions, indices,
ShardGenerations.EMPTY, IndexMetaDataGenerations.EMPTY);
RepositoryData newRepoData = new RepositoryData(
repositoryData.getGenId(),
snapshotIds,
snapshotStates,
snapshotVersions,
indices,
ShardGenerations.EMPTY,
IndexMetaDataGenerations.EMPTY);
List<SnapshotId> expected = new ArrayList<>(repositoryData.getSnapshotIds());
Collections.sort(expected);
List<SnapshotId> actual = new ArrayList<>(newRepoData.getSnapshotIds());
Expand Down Expand Up @@ -228,8 +240,14 @@ public void testIndexThatReferencesAnUnknownSnapshot() throws IOException {
}
assertNotNull(corruptedIndexId);

RepositoryData corruptedRepositoryData = new RepositoryData(parsedRepositoryData.getGenId(), snapshotIds, snapshotStates,
snapshotVersions, indexSnapshots, shardGenBuilder.build(), IndexMetaDataGenerations.EMPTY);
RepositoryData corruptedRepositoryData = new RepositoryData(
parsedRepositoryData.getGenId(),
snapshotIds,
snapshotStates,
snapshotVersions,
indexSnapshots,
shardGenBuilder.build(),
IndexMetaDataGenerations.EMPTY);

final XContentBuilder corruptedBuilder = XContentBuilder.builder(xContent);
corruptedRepositoryData.snapshotsToXContent(corruptedBuilder, Version.CURRENT);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,13 +200,13 @@ public void testRepositoryDataConcurrentModificationNotAllowed() {
// write to index generational file
RepositoryData repositoryData = generateRandomRepoData();
final long startingGeneration = repositoryData.getGenId();
final PlainActionFuture<RepositoryData> future1 = PlainActionFuture.newFuture();
repository.writeIndexGen(repositoryData, startingGeneration, Version.CURRENT, Function.identity(),future1);
final PlainActionFuture<RepositoryData> future = PlainActionFuture.newFuture();
repository.writeIndexGen(repositoryData, startingGeneration, Version.CURRENT, Function.identity(), future);

// write repo data again to index generational file, errors because we already wrote to the
// N+1 generation from which this repository data instance was created
expectThrows(RepositoryException.class,
() -> writeIndexGen(repository, repositoryData.withGenId(startingGeneration + 1), repositoryData.getGenId()));
final RepositoryData fresherRepositoryData = repositoryData.withGenId(startingGeneration + 1);
expectThrows(RepositoryException.class, () -> writeIndexGen(repository, fresherRepositoryData, repositoryData.getGenId()));
}

public void testBadChunksize() throws Exception {
Expand Down
Loading

0 comments on commit 86b600d

Please sign in to comment.