From 92a3b3420dff542979e154a969437b161b34d619 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sat, 30 Nov 2019 12:57:59 +0200 Subject: [PATCH 001/142] Polished main --- .../plugin/repository-encrypted/build.gradle | 11 ++ .../encrypted/BufferOnMarkInputStream.java | 182 ++++++++++++++++++ .../encrypted/ChainPacketsInputStream.java | 146 ++++++++++++++ .../encrypted/CountingInputStream.java | 86 +++++++++ .../DecryptionPacketsInputStream.java | 122 ++++++++++++ .../encrypted/EncryptedRepository.java | 8 + .../encrypted/EncryptedRepositoryPlugin.java | 42 ++++ .../EncryptionPacketsInputStream.java | 127 ++++++++++++ .../encrypted/PrefixInputStream.java | 106 ++++++++++ .../plugin-metadata/plugin-security.policy | 8 + .../encrypted/EncryptedRepositoryTests.java | 14 ++ .../test/repository_encrypted/10_basic.yml | 16 ++ 12 files changed, 868 insertions(+) create mode 100644 x-pack/plugin/repository-encrypted/build.gradle create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java create mode 100644 x-pack/plugin/repository-encrypted/src/main/plugin-metadata/plugin-security.policy create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java create mode 100644 x-pack/plugin/repository-encrypted/src/test/resources/rest-api-spec/test/repository_encrypted/10_basic.yml diff --git a/x-pack/plugin/repository-encrypted/build.gradle b/x-pack/plugin/repository-encrypted/build.gradle new file mode 100644 index 0000000000000..5a2f82946f711 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/build.gradle @@ -0,0 +1,11 @@ +evaluationDependsOn(xpackModule('core')) + +apply plugin: 'elasticsearch.esplugin' +esplugin { + name 'repository-encrypted' + description 'Elasticsearch Expanded Pack Plugin - client-side encrypted repositories.' + classname 'org.elasticsearch.repositories.encrypted.EncryptedRepositoryPlugin' + extendedPlugins = ['x-pack-core'] +} + +integTest.enabled = false diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java new file mode 100644 index 0000000000000..d143234c031de --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -0,0 +1,182 @@ +package org.elasticsearch.repositories.encrypted; + +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Objects; + + +public final class BufferOnMarkInputStream extends FilterInputStream { + + private final int bufferSize; + private byte[] ringBuffer; + private int head; + private int tail; + private int position; + private boolean markCalled; + private boolean resetCalled; + private boolean closed; + + public BufferOnMarkInputStream(InputStream in, int bufferSize) { + super(Objects.requireNonNull(in)); + this.bufferSize = bufferSize; + this.ringBuffer = null; + this.head = this.tail = this.position = -1; + this.markCalled = this.resetCalled = false; + this.closed = false; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + ensureOpen(); + Objects.checkFromIndexSize(off, len, b.length); + if (len == 0) { + return 0; + } + if (resetCalled) { + int bytesRead = readFromBuffer(b, off, len); + if (bytesRead == 0) { + resetCalled = false; + } else { + return bytesRead; + } + } + int bytesRead = in.read(b, off, len); + if (bytesRead <= 0) { + return bytesRead; + } + if (markCalled) { + if (false == writeToBuffer(b, off, len)) { + // could not fully write to buffer, invalidate mark + markCalled = false; + } + } + return bytesRead; + } + + @Override + public int read() throws IOException { + ensureOpen(); + byte[] arr = new byte[1]; + int readResult = read(arr, 0, arr.length); + if (readResult == -1) { + return -1; + } + return arr[0]; + } + + @Override + public long skip(long n) throws IOException { + ensureOpen(); + if (n <= 0) { + return 0; + } + if (false == markCalled) { + return in.skip(n); + } + long remaining = n; + int size = (int)Math.min(2048, remaining); + byte[] skipBuffer = new byte[size]; + while (remaining > 0) { + int bytesRead = read(skipBuffer, 0, (int)Math.min(size, remaining)); + if (bytesRead < 0) { + break; + } + remaining -= bytesRead; + } + return n - remaining; + } + + @Override + public int available() throws IOException { + ensureOpen(); + int bytesAvailable = 0; + if (resetCalled) { + if (position < tail) { + bytesAvailable += tail - position; + } else { + bytesAvailable += ringBuffer.length - position + tail; + } + } + bytesAvailable += in.available(); + return bytesAvailable; + } + + @Override + public void mark(int readlimit) { + if (readlimit > bufferSize) { + throw new IllegalArgumentException("Readlimit value [" + readlimit + "] exceeds the maximum value of [" + bufferSize + "]"); + } + markCalled = true; + if (ringBuffer == null) { + ringBuffer = new byte[bufferSize]; + head = tail = position = 0; + } else { + head = position; + } + } + + @Override + public void reset() throws IOException { + ensureOpen(); + if (false == markCalled) { + throw new IOException("Mark not called or has been invalidated"); + } + resetCalled = true; + } + + private int readFromBuffer(byte[] b, int off, int len) { + if (position == tail) { + return 0; + } + final int readLength; + if (position < tail) { + readLength = Math.min(len, tail - position); + } else { + readLength = Math.min(len, ringBuffer.length - position); + } + System.arraycopy(ringBuffer, position, b, off, readLength); + position += readLength; + if (position == ringBuffer.length) { + position = 0; + } + return readLength; + } + + private boolean writeToBuffer(byte[] b, int off, int len) { + while (len > 0 && head != tail) { + final int writeLength; + if (head < tail) { + writeLength = Math.min(len, ringBuffer.length - tail); + } else { + writeLength = Math.min(len, head - tail); + } + System.arraycopy(b, off, ringBuffer, tail, writeLength); + tail += writeLength; + off += writeLength; + len -= writeLength; + if (tail == ringBuffer.length) { + tail = 0; + } + } + if (len != 0) { + return false; + } + return true; + } + + private void ensureOpen() throws IOException { + if (closed) { + throw new IOException("Stream has been closed"); + } + } + + @Override + public void close() throws IOException { + if (false == closed) { + closed = true; + in.close(); + } + } + +} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java new file mode 100644 index 0000000000000..298a2da7e5cca --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java @@ -0,0 +1,146 @@ +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.common.Nullable; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Objects; + +public abstract class ChainPacketsInputStream extends InputStream { + + private InputStream packetIn; + private InputStream markIn; + private boolean closed; + + public ChainPacketsInputStream() { + this.packetIn = null; + this.markIn = null; + this.closed = false; + } + + abstract boolean hasNextPacket(@Nullable InputStream currentPacketIn); + + abstract InputStream nextPacket(@Nullable InputStream currentPacketIn) throws IOException; + + @Override + public int read() throws IOException { + ensureOpen(); + do { + int byteVal = packetIn == null ? -1 : packetIn.read(); + if (byteVal != -1) { + return byteVal; + } + if (false == hasNextPacket(packetIn)) { + return -1; + } + nextIn(); + } while (true); + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + ensureOpen(); + Objects.checkFromIndexSize(off, len, b.length); + if (len == 0) { + return 0; + } + do { + int bytesRead = packetIn == null ? -1 : packetIn.read(b, off, len); + if (bytesRead != -1) { + return bytesRead; + } + if (false == hasNextPacket(packetIn)) { + return -1; + } + nextIn(); + } while (true); + } + + @Override + public long skip(long n) throws IOException { + ensureOpen(); + if (n <= 0) { + return 0; + } + long bytesRemaining = n; + while (bytesRemaining > 0) { + long bytesSkipped = packetIn == null ? 0 : packetIn.skip(bytesRemaining); + if (bytesSkipped == 0) { + int byteRead = read(); + if (byteRead == -1) { + break; + } else { + bytesRemaining--; + } + } else { + bytesRemaining -= bytesSkipped; + } + } + return n - bytesRemaining; + } + + @Override + public int available() throws IOException { + ensureOpen(); + return packetIn == null ? 0 : packetIn.available(); + } + + @Override + public boolean markSupported() { + return true; + } + + @Override + public void mark(int readlimit) { + if (markSupported()) { + markIn = packetIn; + if (markIn != null) { + markIn.mark(readlimit); + } + } + } + + @Override + public void reset() throws IOException { + if (false == markSupported()) { + throw new IOException("Mark/reset not supported"); + } + packetIn = markIn; + if (packetIn != null) { + packetIn.reset(); + } + } + + @Override + public void close() throws IOException { + if (false == closed) { + closed = true; + if (packetIn != null) { + packetIn.close(); + } + while (hasNextPacket(packetIn)) { + nextIn(); + } + } + } + + private void ensureOpen() throws IOException { + if (closed) { + throw new IOException("Stream is closed"); + } + } + + private void nextIn() throws IOException { + if (packetIn != null) { + packetIn.close(); + } + packetIn = nextPacket(packetIn); + if (packetIn == null) { + throw new NullPointerException(); + } + if (markSupported() && false == packetIn.markSupported()) { + throw new IllegalStateException("Packet input stream must support mark"); + } + } + +} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java new file mode 100644 index 0000000000000..fe1171e3cb95e --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java @@ -0,0 +1,86 @@ +package org.elasticsearch.repositories.encrypted; + +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Objects; + +public final class CountingInputStream extends FilterInputStream { + + private long count; + private long mark; + private boolean closed; + private final boolean closeSource; + + /** + * Wraps another input stream, counting the number of bytes read. + * + * @param in the input stream to be wrapped + * @param closeSource if closing this stream will propagate to the wrapped stream + */ + public CountingInputStream(InputStream in, boolean closeSource) { + super(Objects.requireNonNull(in)); + this.count = 0L; + this.mark = -1L; + this.closed = false; + this.closeSource = closeSource; + } + + /** Returns the number of bytes read. */ + public long getCount() { + return count; + } + + @Override + public int read() throws IOException { + int result = in.read(); + if (result != -1) { + count++; + } + return result; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + int result = in.read(b, off, len); + if (result != -1) { + count += result; + } + return result; + } + + @Override + public long skip(long n) throws IOException { + long result = in.skip(n); + count += result; + return result; + } + + @Override + public synchronized void mark(int readlimit) { + in.mark(readlimit); + mark = count; + } + + @Override + public synchronized void reset() throws IOException { + if (false == in.markSupported()) { + throw new IOException("Mark not supported"); + } + if (mark == -1L) { + throw new IOException("Mark not set"); + } + count = mark; + in.reset(); + } + + @Override + public void close() throws IOException { + if (false == closed) { + closed = true; + if (closeSource) { + in.close(); + } + } + } +} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java new file mode 100644 index 0000000000000..9c6b458ef37d0 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -0,0 +1,122 @@ +package org.elasticsearch.repositories.encrypted; + +import javax.crypto.BadPaddingException; +import javax.crypto.Cipher; +import javax.crypto.IllegalBlockSizeException; +import javax.crypto.NoSuchPaddingException; +import javax.crypto.SecretKey; +import javax.crypto.ShortBufferException; +import javax.crypto.spec.GCMParameterSpec; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.security.InvalidAlgorithmParameterException; +import java.security.InvalidKeyException; +import java.security.NoSuchAlgorithmException; +import java.util.NoSuchElementException; +import java.util.Objects; + +public final class DecryptionPacketsInputStream extends ChainPacketsInputStream { + + private final InputStream source; + private final SecretKey secretKey; + private final int nonce; + private final int packetLength; + private final byte[] packet; + private final byte[] iv; + + private boolean hasNext; + private long counter; + + public static long getDecryptionSize(long size, int packetLength) { + long encryptedPacketLength = packetLength + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES + EncryptedRepository.GCM_IV_SIZE_IN_BYTES; + long completePackets = size / encryptedPacketLength; + long decryptedSize = completePackets * packetLength; + if (size % encryptedPacketLength != 0) { + decryptedSize += (size % encryptedPacketLength) - EncryptedRepository.GCM_TAG_SIZE_IN_BYTES + - EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + } + return decryptedSize; + } + + public DecryptionPacketsInputStream(InputStream source, SecretKey secretKey, int nonce, int packetLength) { + this.source = Objects.requireNonNull(source); + this.secretKey = Objects.requireNonNull(secretKey); + this.nonce = nonce; + this.packetLength = packetLength; + this.packet = new byte[packetLength + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES]; + this.iv = new byte[EncryptedRepository.GCM_IV_SIZE_IN_BYTES]; + this.hasNext = true; + this.counter = EncryptedRepository.PACKET_START_COUNTER; + } + + @Override + boolean hasNextPacket(InputStream currentPacketIn) { + return hasNext; + } + + @Override + InputStream nextPacket(InputStream currentPacketIn) throws IOException { + if (currentPacketIn != null && currentPacketIn.read() != -1) { + throw new IllegalStateException("Stream for previous packet has not been fully processed"); + } + if (false == hasNextPacket(currentPacketIn)) { + throw new NoSuchElementException(); + } + PrefixInputStream packetInputStream = new PrefixInputStream(source, + packetLength + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES, + false); + int currentPacketLength = decrypt(packetInputStream); + if (currentPacketLength != packetLength) { + hasNext = false; + } + return new ByteArrayInputStream(packet, 0, currentPacketLength); + } + + @Override + public boolean markSupported() { + return false; + } + + @Override + public void mark(int readlimit) { + } + + @Override + public void reset() throws IOException { + throw new IOException("Mark/reset not supported"); + } + + private int decrypt(PrefixInputStream packetInputStream) throws IOException { + if (packetInputStream.read(iv) != iv.length) { + throw new IOException("Error while reading the heading IV of the packet"); + } + ByteBuffer ivBuffer = ByteBuffer.wrap(iv); + if (ivBuffer.getInt(0) != nonce || ivBuffer.getLong(4) != counter++) { + throw new IOException("Invalid packet IV"); + } + int packetLength = packetInputStream.read(packet); + if (packetLength < EncryptedRepository.GCM_TAG_SIZE_IN_BYTES) { + throw new IOException("Error while reading the packet"); + } + Cipher packetCipher = getPacketDecryptionCipher(iv); + try { + // in-place decryption + return packetCipher.doFinal(packet, 0, packetLength, packet); + } catch (ShortBufferException | IllegalBlockSizeException | BadPaddingException e) { + throw new IOException(e); + } + } + + private Cipher getPacketDecryptionCipher(byte[] packetIv) throws IOException { + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_SIZE_IN_BYTES * Byte.SIZE, packetIv); + try { + Cipher packetCipher = Cipher.getInstance(EncryptedRepository.GCM_ENCRYPTION_SCHEME); + packetCipher.init(Cipher.DECRYPT_MODE, secretKey, gcmParameterSpec); + return packetCipher; + } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | InvalidAlgorithmParameterException e) { + throw new IOException(e); + } + } +} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java new file mode 100644 index 0000000000000..c10bd5da44970 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -0,0 +1,8 @@ +package org.elasticsearch.repositories.encrypted; + +public class EncryptedRepository { + static final int GCM_TAG_SIZE_IN_BYTES = 16; + static final int GCM_IV_SIZE_IN_BYTES = 12; + static final String GCM_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; + static final long PACKET_START_COUNTER = Long.MIN_VALUE; +} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java new file mode 100644 index 0000000000000..c61c444203e82 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ReloadablePlugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.List; +import java.util.Map; + +public final class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin, ReloadablePlugin { + + public EncryptedRepositoryPlugin(final Settings settings) { + } + + @Override + public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry, + ThreadPool threadPool) { + return Map.of(); + } + + @Override + public List> getSettings() { + return List.of(); + } + + @Override + public void reload(Settings settings) { + // Secure settings should be readable inside this method. + } +} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java new file mode 100644 index 0000000000000..5ce7463a1dbd9 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -0,0 +1,127 @@ +package org.elasticsearch.repositories.encrypted; + +import javax.crypto.Cipher; +import javax.crypto.CipherInputStream; +import javax.crypto.NoSuchPaddingException; +import javax.crypto.SecretKey; +import javax.crypto.spec.GCMParameterSpec; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.SequenceInputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.security.InvalidAlgorithmParameterException; +import java.security.InvalidKeyException; +import java.security.NoSuchAlgorithmException; +import java.util.NoSuchElementException; +import java.util.Objects; + +public final class EncryptionPacketsInputStream extends ChainPacketsInputStream { + + private final InputStream source; + private final SecretKey secretKey; + private final ByteBuffer packetIv; + private final int packetLength; + private final int encryptedPacketLength; + + private long counter; + private Long markCounter; + private int markSourceOnNextPacket; + + public static long getEncryptionSize(long size, int packetLength) { + return size + (size / packetLength + 1) * (EncryptedRepository.GCM_TAG_SIZE_IN_BYTES + EncryptedRepository.GCM_IV_SIZE_IN_BYTES); + } + + public EncryptionPacketsInputStream(InputStream source, SecretKey secretKey, int nonce, int packetLength) { + this.source = Objects.requireNonNull(source); + this.secretKey = Objects.requireNonNull(secretKey); + this.packetIv = ByteBuffer.allocate(EncryptedRepository.GCM_IV_SIZE_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); + this.packetIv.putInt(0, nonce); + this.packetLength = packetLength; + this.encryptedPacketLength = packetLength + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + this.counter = EncryptedRepository.PACKET_START_COUNTER; + this.markCounter = null; + this.markSourceOnNextPacket = -1; + } + + @Override + boolean hasNextPacket(InputStream currentPacketIn) { + if (currentPacketIn != null && currentPacketIn instanceof CountingInputStream == false) { + throw new IllegalStateException(); + } + if (((CountingInputStream) currentPacketIn).getCount() > encryptedPacketLength) { + throw new IllegalStateException(); + } + return currentPacketIn == null || ((CountingInputStream) currentPacketIn).getCount() == encryptedPacketLength; + } + + @Override + InputStream nextPacket(InputStream currentPacketIn) throws IOException { + if (currentPacketIn != null && currentPacketIn.read() != -1) { + throw new IllegalStateException("Stream for previous packet has not been fully processed"); + } + if (false == hasNextPacket(currentPacketIn)) { + throw new NoSuchElementException(); + } + if (markSourceOnNextPacket != -1) { + markSourceOnNextPacket = -1; + source.mark(markSourceOnNextPacket); + } + InputStream encryptionInputStream = new PrefixInputStream(source, packetLength, false); + packetIv.putLong(4, counter++); + if (counter == EncryptedRepository.PACKET_START_COUNTER) { + // counter wrap around + throw new IOException("Maximum packet count limit exceeded"); + } + Cipher packetCipher = getPacketEncryptionCipher(secretKey, packetIv.array()); + encryptionInputStream = new CipherInputStream(encryptionInputStream, packetCipher); + encryptionInputStream = new SequenceInputStream(new ByteArrayInputStream(packetIv.array()), encryptionInputStream); + encryptionInputStream = new BufferOnMarkInputStream(encryptionInputStream, packetLength); + return new CountingInputStream(encryptionInputStream, false); + } + + @Override + public boolean markSupported() { + return source.markSupported(); + } + + @Override + public void mark(int readlimit) { + if (markSupported()) { + if (readlimit <= 0) { + throw new IllegalArgumentException("Mark readlimit must be a positive integer"); + } + super.mark(encryptedPacketLength); + markCounter = counter; + markSourceOnNextPacket = readlimit; + } + } + + @Override + public void reset() throws IOException { + if (false == markSupported()) { + throw new IOException("Mark/reset not supported"); + } + if (markCounter == null) { + throw new IOException("Mark no set"); + } + super.reset(); + counter = markCounter; + if (markSourceOnNextPacket == -1) { + source.reset(); + } + } + + private static Cipher getPacketEncryptionCipher(SecretKey secretKey, byte[] packetIv) throws IOException { + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_SIZE_IN_BYTES * Byte.SIZE, packetIv); + try { + Cipher packetCipher = Cipher.getInstance(EncryptedRepository.GCM_ENCRYPTION_SCHEME); + packetCipher.init(Cipher.ENCRYPT_MODE, secretKey, gcmParameterSpec); + return packetCipher; + } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | InvalidAlgorithmParameterException e) { + throw new IOException(e); + } + } + +} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java new file mode 100644 index 0000000000000..8cf280126d915 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java @@ -0,0 +1,106 @@ +package org.elasticsearch.repositories.encrypted; + +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Objects; + +public final class PrefixInputStream extends FilterInputStream { + + private final int length; + private int position; + private boolean closeSource; + private boolean closed; + + public PrefixInputStream(InputStream in, int length, boolean closeSource) { + super(Objects.requireNonNull(in)); + this.length = length; + this.position = 0; + this.closeSource = closeSource; + this.closed = false; + } + + @Override + public int read() throws IOException { + ensureOpen(); + if (position >= length) { + return -1; + } + int byteVal = in.read(); + if (byteVal == -1) { + return -1; + } + position++; + return byteVal; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + ensureOpen(); + Objects.checkFromIndexSize(off, len, b.length); + if (len == 0) { + return 0; + } + if (position >= length) { + return -1; + } + int readSize = Math.min(len, length - position); + int bytesRead = in.read(b, off, readSize); + if (bytesRead == -1) { + return -1; + } + position += bytesRead; + return bytesRead; + } + + @Override + public long skip(long n) throws IOException { + ensureOpen(); + if (n <= 0 || position >= length) { + return 0; + } + long bytesToSkip = Math.min(n, length - position); + assert bytesToSkip > 0; + long bytesSkipped = in.skip(bytesToSkip); + position += bytesSkipped; + return bytesSkipped; + } + + @Override + public int available() throws IOException { + ensureOpen(); + return Math.min(length - position, super.available()); + } + + @Override + public void mark(int readlimit) { + } + + @Override + public void reset() throws IOException { + throw new IOException("mark/reset not supported"); + } + + @Override + public boolean markSupported() { + return false; + } + + private void ensureOpen() throws IOException { + if (closed) { + throw new IOException("Stream has been closed"); + } + } + + @Override + public void close() throws IOException { + if (closed) { + return; + } + closed = true; + if (closeSource) { + in.close(); + } + } + +} diff --git a/x-pack/plugin/repository-encrypted/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/repository-encrypted/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 0000000000000..7f75e2af67c6e --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +grant { +}; diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java new file mode 100644 index 0000000000000..e65120d749fcd --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.test.ESTestCase; + +public class EncryptedRepositoryTests extends ESTestCase { + public void testThatDoesNothing() { + } +} diff --git a/x-pack/plugin/repository-encrypted/src/test/resources/rest-api-spec/test/repository_encrypted/10_basic.yml b/x-pack/plugin/repository-encrypted/src/test/resources/rest-api-spec/test/repository_encrypted/10_basic.yml new file mode 100644 index 0000000000000..858ba3e21e3ae --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/resources/rest-api-spec/test/repository_encrypted/10_basic.yml @@ -0,0 +1,16 @@ +# Integration tests for repository-encrypted +# +"Plugin repository-encrypted is loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - contains: { nodes.$master.plugins: { name: repository-encrypted } } From d07c05f38ad50fc0f78a99883f0e1c40af5caebb Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 1 Dec 2019 13:25:39 +0200 Subject: [PATCH 002/142] First successful tests --- .../encrypted/BufferOnMarkInputStream.java | 22 +++- .../encrypted/ChainPacketsInputStream.java | 8 +- .../encrypted/CountingInputStream.java | 5 + .../DecryptionPacketsInputStream.java | 8 +- .../encrypted/EncryptedRepository.java | 6 + .../EncryptionPacketsInputStream.java | 17 ++- .../encrypted/PrefixInputStream.java | 6 + .../EncryptionPacketsInputStreamTests.java | 106 ++++++++++++++++++ 8 files changed, 166 insertions(+), 12 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index d143234c031de..fde60dd0b0e12 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.repositories.encrypted; import java.io.FilterInputStream; @@ -116,6 +121,11 @@ public void mark(int readlimit) { } } + @Override + public boolean markSupported() { + return true; + } + @Override public void reset() throws IOException { ensureOpen(); @@ -165,12 +175,6 @@ private boolean writeToBuffer(byte[] b, int off, int len) { return true; } - private void ensureOpen() throws IOException { - if (closed) { - throw new IOException("Stream has been closed"); - } - } - @Override public void close() throws IOException { if (false == closed) { @@ -179,4 +183,10 @@ public void close() throws IOException { } } + private void ensureOpen() throws IOException { + if (closed) { + throw new IOException("Stream has been closed"); + } + } + } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java index 298a2da7e5cca..2bbd26f6aa712 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java @@ -1,10 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.repositories.encrypted; -import org.elasticsearch.common.Nullable; - import java.io.IOException; import java.io.InputStream; import java.util.Objects; +import org.elasticsearch.common.Nullable; public abstract class ChainPacketsInputStream extends InputStream { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java index fe1171e3cb95e..7fdff89830b28 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.repositories.encrypted; import java.io.FilterInputStream; diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index 9c6b458ef37d0..9a44289109185 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.repositories.encrypted; import javax.crypto.BadPaddingException; @@ -11,6 +16,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; @@ -92,7 +98,7 @@ private int decrypt(PrefixInputStream packetInputStream) throws IOException { if (packetInputStream.read(iv) != iv.length) { throw new IOException("Error while reading the heading IV of the packet"); } - ByteBuffer ivBuffer = ByteBuffer.wrap(iv); + ByteBuffer ivBuffer = ByteBuffer.wrap(iv).order(ByteOrder.LITTLE_ENDIAN); if (ivBuffer.getInt(0) != nonce || ivBuffer.getLong(4) != counter++) { throw new IOException("Invalid packet IV"); } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index c10bd5da44970..c0f229f7eefae 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + package org.elasticsearch.repositories.encrypted; public class EncryptedRepository { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 5ce7463a1dbd9..a139aff6c1032 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + package org.elasticsearch.repositories.encrypted; import javax.crypto.Cipher; @@ -19,10 +25,12 @@ public final class EncryptionPacketsInputStream extends ChainPacketsInputStream { + private static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 30; + private final InputStream source; private final SecretKey secretKey; - private final ByteBuffer packetIv; private final int packetLength; + private final ByteBuffer packetIv; private final int encryptedPacketLength; private long counter; @@ -36,9 +44,12 @@ public static long getEncryptionSize(long size, int packetLength) { public EncryptionPacketsInputStream(InputStream source, SecretKey secretKey, int nonce, int packetLength) { this.source = Objects.requireNonNull(source); this.secretKey = Objects.requireNonNull(secretKey); + if (packetLength <= 0 || packetLength >= MAX_PACKET_LENGTH_IN_BYTES) { + throw new IllegalArgumentException("Invalid packet length [" + packetLength + "]"); + } + this.packetLength = packetLength; this.packetIv = ByteBuffer.allocate(EncryptedRepository.GCM_IV_SIZE_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); this.packetIv.putInt(0, nonce); - this.packetLength = packetLength; this.encryptedPacketLength = packetLength + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; this.counter = EncryptedRepository.PACKET_START_COUNTER; this.markCounter = null; @@ -50,7 +61,7 @@ boolean hasNextPacket(InputStream currentPacketIn) { if (currentPacketIn != null && currentPacketIn instanceof CountingInputStream == false) { throw new IllegalStateException(); } - if (((CountingInputStream) currentPacketIn).getCount() > encryptedPacketLength) { + if (currentPacketIn != null && ((CountingInputStream) currentPacketIn).getCount() > encryptedPacketLength) { throw new IllegalStateException(); } return currentPacketIn == null || ((CountingInputStream) currentPacketIn).getCount() == encryptedPacketLength; diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java index 8cf280126d915..de46f3386dc12 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + package org.elasticsearch.repositories.encrypted; import java.io.FilterInputStream; diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java new file mode 100644 index 0000000000000..940ce978a2790 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.common.Randomness; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import org.junit.BeforeClass; + +import javax.crypto.Cipher; +import javax.crypto.CipherInputStream; +import javax.crypto.KeyGenerator; +import javax.crypto.SecretKey; +import javax.crypto.spec.GCMParameterSpec; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.security.SecureRandom; +import java.util.Arrays; +import java.util.Objects; + +public class EncryptionPacketsInputStreamTests extends ESTestCase { + + // test odd packet lengths (multiple of AES_BLOCK_SIZE or not) + // test non AES Key + // test read strategies (mark/reset) + + private static int TEST_ARRAY_SIZE = 1 << 20; + private static byte[] testPlaintextArray; + private static SecretKey secretKey; + + @BeforeClass + static void createSecretKeyAndTestArray() throws Exception { + try { + KeyGenerator keyGen = KeyGenerator.getInstance("AES"); + keyGen.init(256, new SecureRandom()); + secretKey = keyGen.generateKey(); + } catch (Exception e) { + throw new RuntimeException(e); + } + testPlaintextArray = new byte[TEST_ARRAY_SIZE]; + Randomness.get().nextBytes(testPlaintextArray); + } + + public void testShorterThanPacket() throws Exception { + int packetSize = 3 + Randomness.get().nextInt(2044); + int size = 1 + Randomness.get().nextInt(packetSize - 1); + testEncryptPacketWise(size, packetSize, new DefaultBufferedReadAllStrategy()); + } + + private void testEncryptPacketWise(int size, int packetSize, ReadStrategy readStrategy) throws Exception { + int encryptedPacketSize = packetSize + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + int plaintextOffset = Randomness.get().nextInt(testPlaintextArray.length - size); + int nonce = Randomness.get().nextInt(); + long counter = EncryptedRepository.PACKET_START_COUNTER; + try (InputStream encryptionInputStream = new EncryptionPacketsInputStream(new ByteArrayInputStream(testPlaintextArray, + plaintextOffset, size), secretKey, nonce, packetSize)) { + byte[] ciphertextArray = readStrategy.readAll(encryptionInputStream); + assertThat((long)ciphertextArray.length, Matchers.is(EncryptionPacketsInputStream.getEncryptionSize(size, packetSize))); + for (int ciphertextOffset = 0; ciphertextOffset < ciphertextArray.length; ciphertextOffset += encryptedPacketSize) { + ByteBuffer ivBuffer = ByteBuffer.wrap(ciphertextArray, ciphertextOffset, + EncryptedRepository.GCM_IV_SIZE_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); + assertThat(ivBuffer.getInt(0), Matchers.is(nonce)); + assertThat(ivBuffer.getLong(4), Matchers.is(counter++)); + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_SIZE_IN_BYTES * Byte.SIZE, + Arrays.copyOfRange(ciphertextArray, ciphertextOffset, + ciphertextOffset + EncryptedRepository.GCM_IV_SIZE_IN_BYTES)); + Cipher packetCipher = Cipher.getInstance(EncryptedRepository.GCM_ENCRYPTION_SCHEME); + packetCipher.init(Cipher.DECRYPT_MODE, secretKey, gcmParameterSpec); + try (InputStream packetDecryptionInputStream = new CipherInputStream(new ByteArrayInputStream(ciphertextArray, + ciphertextOffset + EncryptedRepository.GCM_IV_SIZE_IN_BYTES, + packetSize + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES), packetCipher)) { + byte[] decryptedCiphertext = packetDecryptionInputStream.readAllBytes(); + assertThat(decryptedCiphertext.length, Matchers.is(size)); + assertSubArray(decryptedCiphertext, 0, testPlaintextArray, plaintextOffset, size); + } + } + } + } + + private void assertSubArray(byte[] arr1, int offset1, byte[] arr2, int offset2, int length) { + Objects.checkFromIndexSize(offset1, length, arr1.length); + Objects.checkFromIndexSize(offset2, length, arr2.length); + for (int i = 0; i < length; i++) { + assertThat(arr1[offset1 + i], Matchers.is(arr2[offset2 + i])); + } + } + + interface ReadStrategy { + byte[] readAll(InputStream inputStream) throws IOException; + } + + static class DefaultBufferedReadAllStrategy implements ReadStrategy { + @Override + public byte[] readAll(InputStream inputStream) throws IOException { + return inputStream.readAllBytes(); + } + } + +} From de603a73a9a6951afe4555ab24755d95b03b00e5 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 1 Dec 2019 15:08:36 +0200 Subject: [PATCH 003/142] More tests --- .../EncryptionPacketsInputStreamTests.java | 29 ++++++++++++++----- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java index 940ce978a2790..c055b337942fc 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -48,15 +48,27 @@ static void createSecretKeyAndTestArray() throws Exception { Randomness.get().nextBytes(testPlaintextArray); } - public void testShorterThanPacket() throws Exception { - int packetSize = 3 + Randomness.get().nextInt(2044); + public void testEmpty() throws Exception { + int packetSize = 1 + Randomness.get().nextInt(2048); + testEncryptPacketWise(0, packetSize, new DefaultBufferedReadAllStrategy()); + } + + public void testSingleByteSize() throws Exception { + testEncryptPacketWise(1, 1, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(1, 2, new DefaultBufferedReadAllStrategy()); + int packetSize = 3 + Randomness.get().nextInt(2046); + testEncryptPacketWise(1, packetSize, new DefaultBufferedReadAllStrategy()); + } + + public void testSizeSmallerThanPacket() throws Exception { + int packetSize = 3 + Randomness.get().nextInt(2045); int size = 1 + Randomness.get().nextInt(packetSize - 1); testEncryptPacketWise(size, packetSize, new DefaultBufferedReadAllStrategy()); } private void testEncryptPacketWise(int size, int packetSize, ReadStrategy readStrategy) throws Exception { int encryptedPacketSize = packetSize + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; - int plaintextOffset = Randomness.get().nextInt(testPlaintextArray.length - size); + int plaintextOffset = Randomness.get().nextInt(testPlaintextArray.length - size + 1); int nonce = Randomness.get().nextInt(); long counter = EncryptedRepository.PACKET_START_COUNTER; try (InputStream encryptionInputStream = new EncryptionPacketsInputStream(new ByteArrayInputStream(testPlaintextArray, @@ -66,8 +78,8 @@ private void testEncryptPacketWise(int size, int packetSize, ReadStrategy readSt for (int ciphertextOffset = 0; ciphertextOffset < ciphertextArray.length; ciphertextOffset += encryptedPacketSize) { ByteBuffer ivBuffer = ByteBuffer.wrap(ciphertextArray, ciphertextOffset, EncryptedRepository.GCM_IV_SIZE_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); - assertThat(ivBuffer.getInt(0), Matchers.is(nonce)); - assertThat(ivBuffer.getLong(4), Matchers.is(counter++)); + assertThat(ivBuffer.getInt(), Matchers.is(nonce)); + assertThat(ivBuffer.getLong(), Matchers.is(counter++)); GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_SIZE_IN_BYTES * Byte.SIZE, Arrays.copyOfRange(ciphertextArray, ciphertextOffset, ciphertextOffset + EncryptedRepository.GCM_IV_SIZE_IN_BYTES)); @@ -77,8 +89,11 @@ private void testEncryptPacketWise(int size, int packetSize, ReadStrategy readSt ciphertextOffset + EncryptedRepository.GCM_IV_SIZE_IN_BYTES, packetSize + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES), packetCipher)) { byte[] decryptedCiphertext = packetDecryptionInputStream.readAllBytes(); - assertThat(decryptedCiphertext.length, Matchers.is(size)); - assertSubArray(decryptedCiphertext, 0, testPlaintextArray, plaintextOffset, size); + int decryptedPacketSize = size <= packetSize ? size : packetSize; + assertThat(decryptedCiphertext.length, Matchers.is(decryptedPacketSize)); + assertSubArray(decryptedCiphertext, 0, testPlaintextArray, plaintextOffset, decryptedPacketSize); + size -= decryptedPacketSize; + plaintextOffset += decryptedPacketSize; } } } From 7cc62e0a80d004dc95e00a5a42657ee5b4076d6d Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 1 Dec 2019 16:00:23 +0200 Subject: [PATCH 004/142] More tests --- .../encrypted/EncryptedRepository.java | 1 + .../EncryptionPacketsInputStreamTests.java | 50 ++++++++++++++++++- 2 files changed, 49 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index c0f229f7eefae..7e1131224118b 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -9,6 +9,7 @@ public class EncryptedRepository { static final int GCM_TAG_SIZE_IN_BYTES = 16; static final int GCM_IV_SIZE_IN_BYTES = 12; + static final int AES_BLOCK_SIZE_IN_BYTES = 128; static final String GCM_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; static final long PACKET_START_COUNTER = Long.MIN_VALUE; } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java index c055b337942fc..1e66ece9d13d6 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -56,16 +56,62 @@ public void testEmpty() throws Exception { public void testSingleByteSize() throws Exception { testEncryptPacketWise(1, 1, new DefaultBufferedReadAllStrategy()); testEncryptPacketWise(1, 2, new DefaultBufferedReadAllStrategy()); - int packetSize = 3 + Randomness.get().nextInt(2046); + testEncryptPacketWise(1, 3, new DefaultBufferedReadAllStrategy()); + int packetSize = 4 + Randomness.get().nextInt(2046); testEncryptPacketWise(1, packetSize, new DefaultBufferedReadAllStrategy()); } - public void testSizeSmallerThanPacket() throws Exception { + public void testSizeSmallerThanPacketSize() throws Exception { int packetSize = 3 + Randomness.get().nextInt(2045); + testEncryptPacketWise(packetSize - 1, packetSize, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(packetSize - 2, packetSize, new DefaultBufferedReadAllStrategy()); int size = 1 + Randomness.get().nextInt(packetSize - 1); testEncryptPacketWise(size, packetSize, new DefaultBufferedReadAllStrategy()); } + public void testSizeEqualToPacketSize() throws Exception { + int packetSize = 1 + Randomness.get().nextInt(2048); + testEncryptPacketWise(packetSize, packetSize, new DefaultBufferedReadAllStrategy()); + } + + public void testSizeLargerThanPacketSize() throws Exception { + int packetSize = 1 + Randomness.get().nextInt(2048); + testEncryptPacketWise(packetSize + 1, packetSize, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(packetSize + 2, packetSize, new DefaultBufferedReadAllStrategy()); + int size = packetSize + 3 + Randomness.get().nextInt(packetSize); + testEncryptPacketWise(size, packetSize, new DefaultBufferedReadAllStrategy()); + } + + public void testSizeMultipleOfPacketSize() throws Exception { + int packetSize = 1 + Randomness.get().nextInt(512); + testEncryptPacketWise(2 * packetSize, packetSize, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(3 * packetSize, packetSize, new DefaultBufferedReadAllStrategy()); + int packetCount = 4 + Randomness.get().nextInt(12); + testEncryptPacketWise(packetCount * packetSize, packetSize, new DefaultBufferedReadAllStrategy()); + } + + public void testSizeAlmostMultipleOfPacketSize() throws Exception { + int packetSize = 3 + Randomness.get().nextInt(510); + int packetCount = 2 + Randomness.get().nextInt(15); + testEncryptPacketWise(packetCount * packetSize - 1, packetSize, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(packetCount * packetSize - 2, packetSize, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(packetCount * packetSize + 1, packetSize, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(packetCount * packetSize + 2, packetSize, new DefaultBufferedReadAllStrategy()); + } + + public void testShortPacketSizes() throws Exception { + int packetCount = 2 + Randomness.get().nextInt(15); + testEncryptPacketWise(2 + Randomness.get().nextInt(15), 1, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(4 + Randomness.get().nextInt(30), 2, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(6 + Randomness.get().nextInt(45), 3, new DefaultBufferedReadAllStrategy()); + } + + public void testPacketSizeMultipleOfAESBlockSize() throws Exception { + int packetSize = 1 + Randomness.get().nextInt(8); + testEncryptPacketWise(1 + Randomness.get().nextInt(8192), packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES, + new DefaultBufferedReadAllStrategy()); + } + private void testEncryptPacketWise(int size, int packetSize, ReadStrategy readStrategy) throws Exception { int encryptedPacketSize = packetSize + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; int plaintextOffset = Randomness.get().nextInt(testPlaintextArray.length - size + 1); From cbd3c507e0dd133949f52581633fe7a06b4af61a Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 1 Dec 2019 22:59:37 +0200 Subject: [PATCH 005/142] BufferOnMarkInputStreamBug --- .../encrypted/BufferOnMarkInputStream.java | 43 +++++++++------ .../EncryptionPacketsInputStream.java | 2 +- .../EncryptionPacketsInputStreamTests.java | 54 ++++++++++++++++--- 3 files changed, 76 insertions(+), 23 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index fde60dd0b0e12..1e0e9886441d6 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -97,7 +97,7 @@ public int available() throws IOException { ensureOpen(); int bytesAvailable = 0; if (resetCalled) { - if (position < tail) { + if (position <= tail) { bytesAvailable += tail - position; } else { bytesAvailable += ringBuffer.length - position + tail; @@ -114,7 +114,8 @@ public void mark(int readlimit) { } markCalled = true; if (ringBuffer == null) { - ringBuffer = new byte[bufferSize]; + // "+ 1" for the full-buffer sentinel free element + ringBuffer = new byte[bufferSize + 1]; head = tail = position = 0; } else { head = position; @@ -135,12 +136,20 @@ public void reset() throws IOException { resetCalled = true; } + @Override + public void close() throws IOException { + if (false == closed) { + closed = true; + in.close(); + } + } + private int readFromBuffer(byte[] b, int off, int len) { if (position == tail) { return 0; } final int readLength; - if (position < tail) { + if (position <= tail) { readLength = Math.min(len, tail - position); } else { readLength = Math.min(len, ringBuffer.length - position); @@ -153,10 +162,23 @@ private int readFromBuffer(byte[] b, int off, int len) { return readLength; } + private int getRemainingBufferCapacity() { + if (head == tail) { + return ringBuffer.length - 1; + } else if (head < tail) { + return ringBuffer.length - tail + head - 1; + } else { + return head - tail - 1; + } + } + private boolean writeToBuffer(byte[] b, int off, int len) { - while (len > 0 && head != tail) { + if (len > getRemainingBufferCapacity()) { + return false; + } + while (len > 0) { final int writeLength; - if (head < tail) { + if (head <= tail) { writeLength = Math.min(len, ringBuffer.length - tail); } else { writeLength = Math.min(len, head - tail); @@ -169,20 +191,9 @@ private boolean writeToBuffer(byte[] b, int off, int len) { tail = 0; } } - if (len != 0) { - return false; - } return true; } - @Override - public void close() throws IOException { - if (false == closed) { - closed = true; - in.close(); - } - } - private void ensureOpen() throws IOException { if (closed) { throw new IOException("Stream has been closed"); diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index a139aff6c1032..e65ce5d1c3307 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -88,7 +88,7 @@ InputStream nextPacket(InputStream currentPacketIn) throws IOException { Cipher packetCipher = getPacketEncryptionCipher(secretKey, packetIv.array()); encryptionInputStream = new CipherInputStream(encryptionInputStream, packetCipher); encryptionInputStream = new SequenceInputStream(new ByteArrayInputStream(packetIv.array()), encryptionInputStream); - encryptionInputStream = new BufferOnMarkInputStream(encryptionInputStream, packetLength); + encryptionInputStream = new BufferOnMarkInputStream(encryptionInputStream, encryptedPacketLength); return new CountingInputStream(encryptionInputStream, false); } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java index 1e66ece9d13d6..8821ebb292db9 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -27,10 +27,6 @@ public class EncryptionPacketsInputStreamTests extends ESTestCase { - // test odd packet lengths (multiple of AES_BLOCK_SIZE or not) - // test non AES Key - // test read strategies (mark/reset) - private static int TEST_ARRAY_SIZE = 1 << 20; private static byte[] testPlaintextArray; private static SecretKey secretKey; @@ -108,8 +104,54 @@ public void testShortPacketSizes() throws Exception { public void testPacketSizeMultipleOfAESBlockSize() throws Exception { int packetSize = 1 + Randomness.get().nextInt(8); - testEncryptPacketWise(1 + Randomness.get().nextInt(8192), packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES, - new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(1 + Randomness.get().nextInt(packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES), + packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES + Randomness.get().nextInt(8192), + packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES, new DefaultBufferedReadAllStrategy()); + } + + public void testMarkAtPacketBoundary() throws Exception { + int packetSize = 3 + Randomness.get().nextInt(512); + int size = 3 * packetSize + Randomness.get().nextInt(512); + int plaintextOffset = Randomness.get().nextInt(testPlaintextArray.length - size + 1); + int nonce = Randomness.get().nextInt(); + final byte[] referenceCiphertextArray; + try (InputStream encryptionInputStream = new EncryptionPacketsInputStream(new ByteArrayInputStream(testPlaintextArray, + plaintextOffset, size), secretKey, nonce, packetSize)) { + referenceCiphertextArray = encryptionInputStream.readAllBytes(); + } + int encryptedPacketSize = packetSize + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + try (InputStream encryptionInputStream = new EncryptionPacketsInputStream(new ByteArrayInputStream(testPlaintextArray, + plaintextOffset, size), secretKey, nonce, packetSize)) { + // mark at the beginning + encryptionInputStream.mark(encryptedPacketSize - 1); + byte[] test = encryptionInputStream.readNBytes(encryptedPacketSize - 1); + assertSubArray(referenceCiphertextArray, 0, test, 0, test.length); + // reset at the beginning + encryptionInputStream.reset(); + test = encryptionInputStream.readNBytes(encryptedPacketSize - 1); + assertSubArray(referenceCiphertextArray, 0, test, 0, test.length); + // reset at the beginning + encryptionInputStream.reset(); + test = encryptionInputStream.readNBytes(encryptedPacketSize); + assertSubArray(referenceCiphertextArray, 0, test, 0, test.length); + // mark at the second packet boundary + encryptionInputStream.mark(encryptedPacketSize + 1); + test = encryptionInputStream.readNBytes(encryptedPacketSize + 1); + assertSubArray(referenceCiphertextArray, encryptedPacketSize, test, 0, test.length); + // reset at the second packet boundary + encryptionInputStream.reset(); + test = encryptionInputStream.readNBytes(encryptedPacketSize - 1); + assertSubArray(referenceCiphertextArray, encryptedPacketSize, test, 0, test.length); + // mark just before third packet boundary + encryptionInputStream.mark(1); + test = encryptionInputStream.readNBytes(1); + assertSubArray(referenceCiphertextArray, 2 * encryptedPacketSize - 1, test, 0, test.length); + // reset before packet boundary + encryptionInputStream.reset(); + test = encryptionInputStream.readNBytes(2); + assertSubArray(referenceCiphertextArray, 2 * encryptedPacketSize - 1, test, 0, test.length); + } } private void testEncryptPacketWise(int size, int packetSize, ReadStrategy readStrategy) throws Exception { From 9eb9bcfa23db7cada3c6a01411e8666f593674e6 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 2 Dec 2019 12:30:48 +0200 Subject: [PATCH 006/142] A few bugs... --- .../encrypted/BufferOnMarkInputStream.java | 22 ++++++++----- .../EncryptionPacketsInputStream.java | 2 +- .../EncryptionPacketsInputStreamTests.java | 31 ++++++++++--------- 3 files changed, 32 insertions(+), 23 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index 1e0e9886441d6..f303fb4f2ca1c 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -51,9 +51,12 @@ public int read(byte[] b, int off, int len) throws IOException { return bytesRead; } if (markCalled) { - if (false == writeToBuffer(b, off, len)) { + if (bytesRead > getRemainingBufferCapacity()) { // could not fully write to buffer, invalidate mark markCalled = false; + head = tail = position = 0; + } else { + writeToBuffer(b, off, bytesRead); } } return bytesRead; @@ -172,16 +175,16 @@ private int getRemainingBufferCapacity() { } } - private boolean writeToBuffer(byte[] b, int off, int len) { - if (len > getRemainingBufferCapacity()) { - return false; - } + private void writeToBuffer(byte[] b, int off, int len) { while (len > 0) { final int writeLength; if (head <= tail) { - writeLength = Math.min(len, ringBuffer.length - tail); + writeLength = Math.min(len, ringBuffer.length - tail - (head == 0 ? 1 : 0)); } else { - writeLength = Math.min(len, head - tail); + writeLength = Math.min(len, head - tail - 1); + } + if (writeLength == 0) { + throw new IllegalStateException(); } System.arraycopy(b, off, ringBuffer, tail, writeLength); tail += writeLength; @@ -189,9 +192,12 @@ private boolean writeToBuffer(byte[] b, int off, int len) { len -= writeLength; if (tail == ringBuffer.length) { tail = 0; + // tail wrap-around overwrites head + if (head == 0) { + throw new IllegalStateException(); + } } } - return true; } private void ensureOpen() throws IOException { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index e65ce5d1c3307..fde2887a0a257 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -76,8 +76,8 @@ InputStream nextPacket(InputStream currentPacketIn) throws IOException { throw new NoSuchElementException(); } if (markSourceOnNextPacket != -1) { - markSourceOnNextPacket = -1; source.mark(markSourceOnNextPacket); + markSourceOnNextPacket = -1; } InputStream encryptionInputStream = new PrefixInputStream(source, packetLength, false); packetIv.putLong(4, counter++); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java index 8821ebb292db9..3e2e40cb8e573 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -112,7 +112,7 @@ public void testPacketSizeMultipleOfAESBlockSize() throws Exception { public void testMarkAtPacketBoundary() throws Exception { int packetSize = 3 + Randomness.get().nextInt(512); - int size = 3 * packetSize + Randomness.get().nextInt(512); + int size = 4 * packetSize + Randomness.get().nextInt(512); int plaintextOffset = Randomness.get().nextInt(testPlaintextArray.length - size + 1); int nonce = Randomness.get().nextInt(); final byte[] referenceCiphertextArray; @@ -125,32 +125,35 @@ public void testMarkAtPacketBoundary() throws Exception { plaintextOffset, size), secretKey, nonce, packetSize)) { // mark at the beginning encryptionInputStream.mark(encryptedPacketSize - 1); - byte[] test = encryptionInputStream.readNBytes(encryptedPacketSize - 1); + byte[] test = encryptionInputStream.readNBytes(1 + Randomness.get().nextInt(encryptedPacketSize)); assertSubArray(referenceCiphertextArray, 0, test, 0, test.length); // reset at the beginning encryptionInputStream.reset(); - test = encryptionInputStream.readNBytes(encryptedPacketSize - 1); + test = encryptionInputStream.readNBytes(1 + Randomness.get().nextInt(encryptedPacketSize)); assertSubArray(referenceCiphertextArray, 0, test, 0, test.length); // reset at the beginning encryptionInputStream.reset(); test = encryptionInputStream.readNBytes(encryptedPacketSize); assertSubArray(referenceCiphertextArray, 0, test, 0, test.length); // mark at the second packet boundary - encryptionInputStream.mark(encryptedPacketSize + 1); - test = encryptionInputStream.readNBytes(encryptedPacketSize + 1); + encryptionInputStream.mark(2 * encryptedPacketSize + 1); + test = encryptionInputStream.readNBytes(encryptedPacketSize + 1 + Randomness.get().nextInt(encryptedPacketSize)); assertSubArray(referenceCiphertextArray, encryptedPacketSize, test, 0, test.length); // reset at the second packet boundary encryptionInputStream.reset(); - test = encryptionInputStream.readNBytes(encryptedPacketSize - 1); + int middlePacketOffset = Randomness.get().nextInt(encryptedPacketSize); + test = encryptionInputStream.readNBytes(middlePacketOffset); assertSubArray(referenceCiphertextArray, encryptedPacketSize, test, 0, test.length); - // mark just before third packet boundary - encryptionInputStream.mark(1); - test = encryptionInputStream.readNBytes(1); - assertSubArray(referenceCiphertextArray, 2 * encryptedPacketSize - 1, test, 0, test.length); - // reset before packet boundary + // mark before third packet boundary + encryptionInputStream.mark(encryptedPacketSize - middlePacketOffset); + // read up to the third packet boundary + test = encryptionInputStream.readNBytes(encryptedPacketSize - middlePacketOffset); + assertSubArray(referenceCiphertextArray, encryptedPacketSize + middlePacketOffset, test, 0, test.length); + // reset before the third packet boundary encryptionInputStream.reset(); - test = encryptionInputStream.readNBytes(2); - assertSubArray(referenceCiphertextArray, 2 * encryptedPacketSize - 1, test, 0, test.length); + test = encryptionInputStream.readNBytes( + encryptedPacketSize - middlePacketOffset + 1 + Randomness.get().nextInt(encryptedPacketSize)); + assertSubArray(referenceCiphertextArray, encryptedPacketSize - middlePacketOffset, test, 0, test.length); } } @@ -191,7 +194,7 @@ private void assertSubArray(byte[] arr1, int offset1, byte[] arr2, int offset2, Objects.checkFromIndexSize(offset1, length, arr1.length); Objects.checkFromIndexSize(offset2, length, arr2.length); for (int i = 0; i < length; i++) { - assertThat(arr1[offset1 + i], Matchers.is(arr2[offset2 + i])); + assertThat("Mismatch at index [" + i + "]", arr1[offset1 + i], Matchers.is(arr2[offset2 + i])); } } From 5919a1190fdeb0167d23b0ace7702e953b9b89e2 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 2 Dec 2019 16:58:33 +0200 Subject: [PATCH 007/142] BufferOnMark bug --- .../encrypted/BufferOnMarkInputStream.java | 1 + .../BufferOnMarkInputStreamTests.java | 108 ++++++++++++++++++ 2 files changed, 109 insertions(+) create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index f303fb4f2ca1c..5f0b3c7a7fe48 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -137,6 +137,7 @@ public void reset() throws IOException { throw new IOException("Mark not called or has been invalidated"); } resetCalled = true; + position = head; } @Override diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java new file mode 100644 index 0000000000000..bac1a4fa7e260 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -0,0 +1,108 @@ +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.common.Randomness; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.BeforeClass; + +import javax.crypto.KeyGenerator; +import javax.crypto.SecretKey; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.security.SecureRandom; +import java.util.Arrays; + +public class BufferOnMarkInputStreamTests extends ESTestCase { + + private static int TEST_ARRAY_SIZE = 128; + private static byte[] testArray; + + @BeforeClass + static void createTestArray() throws Exception { + testArray = new byte[TEST_ARRAY_SIZE]; + for (int i = 0; i < testArray.length; i++) { + testArray[i] = (byte) i; + } + } + + public void testSimpleMarkResetAtBeginning() throws Exception { + for (int length = 1; length <= 8; length++) { + for (int mark = 1; mark <= length; mark++) { + try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), mark)) { + in.mark(mark); + byte[] test1 = in.readNBytes(mark); + assertArray(0, test1); + in.reset(); + byte[] test2 = in.readNBytes(mark); + assertArray(0, test2); + } + } + } + } + + public void testMarkResetAtBeginning() throws Exception { + for (int length = 1; length <= 8; length++) { + try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), length)) { + in.mark(length); + for (int readLen = 1; readLen <= length; readLen++) { + byte[] test1 = in.readNBytes(readLen); + assertArray(0, test1); + in.reset(); + } + } + } + } + + public void testSimpleMarkResetEverywhere() throws Exception { + for (int length = 1; length <= 8; length++) { + for (int offset = 0; offset < length; offset++) { + for (int mark = 1; mark <= length - offset; mark++) { + try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), mark)) { + // skip first offset bytes + in.readNBytes(offset); + in.mark(mark); + byte[] test1 = in.readNBytes(mark); + assertArray(offset, test1); + in.reset(); + byte[] test2 = in.readNBytes(mark); + assertArray(offset, test2); + } + } + } + } + } + + private void assertArray(int offset, byte[] test) { + for (int i = 0; i < test.length; i++) { + Assert.assertThat(test[i], Matchers.is(testArray[offset + i])); + } + } + + static class NoMarkByteArrayInputStream extends ByteArrayInputStream { + + public NoMarkByteArrayInputStream(byte[] buf) { + super(buf); + } + + public NoMarkByteArrayInputStream(byte[] buf, int offset, int length) { + super(buf, offset, length); + } + + @Override + public void mark(int readlimit) { + } + + @Override + public boolean markSupported() { + return false; + } + + @Override + public void reset() { + throw new IllegalStateException("Mark not called or has been invalidated"); + } + } + + +} From 47f6aeac3e402f01b835f1c193e1b48b6a8bbd57 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 2 Dec 2019 19:06:50 +0200 Subject: [PATCH 008/142] More more more bugs! --- .../encrypted/BufferOnMarkInputStream.java | 9 ++- .../BufferOnMarkInputStreamTests.java | 68 +++++++++++++++++++ 2 files changed, 76 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index 5f0b3c7a7fe48..88980f63f8349 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -121,7 +121,14 @@ public void mark(int readlimit) { ringBuffer = new byte[bufferSize + 1]; head = tail = position = 0; } else { - head = position; + if (resetCalled) { + // mark after reset + head = position; + } else { + // discard buffer leftovers + tail = head; + position = head; + } } } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index bac1a4fa7e260..8fbb367611bef 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -51,6 +51,14 @@ public void testMarkResetAtBeginning() throws Exception { in.reset(); } } + try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), length)) { + in.mark(length); + for (int readLen = length; readLen >= 1; readLen--) { + byte[] test1 = in.readNBytes(readLen); + assertArray(0, test1); + in.reset(); + } + } } } @@ -73,6 +81,66 @@ public void testSimpleMarkResetEverywhere() throws Exception { } } + public void testMarkResetEverywhere() throws Exception { + for (int length = 1; length <= 8; length++) { + for (int offset = 0; offset < length; offset++) { + try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), + length)) { + // skip first offset bytes + in.readNBytes(offset); + in.mark(length); + for (int readLen = 1; readLen <= length - offset; readLen++) { + byte[] test = in.readNBytes(readLen); + assertArray(offset, test); + in.reset(); + } + } + try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), + length)) { + // skip first offset bytes + in.readNBytes(offset); + in.mark(length); + for (int readLen = length - offset; readLen >= 1; readLen--) { + byte[] test = in.readNBytes(readLen); + assertArray(offset, test); + in.reset(); + } + } + } + } + } + + public void testDoubleMarkEverywhere() throws Exception { + for (int length = 1; length <= 16; length++) { + for (int offset = 0; offset < length; offset++) { + for (int readLen = 1; readLen <= length - offset; readLen++) { + for (int markLen = 1; markLen <= length - offset; markLen++) { + try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), + length)) { + in.readNBytes(offset); + // first mark + in.mark(length - offset); + byte[] test = in.readNBytes(readLen); + assertArray(offset, test); + // reset to first + in.reset(); + // advance before/after the first read length + test = in.readNBytes(markLen); + assertArray(offset, test); + // second mark + in.mark(length - offset - markLen); + for (int readLen2 = 1; readLen2 <= length - offset - markLen; readLen2++) { + byte[] test2 = in.readNBytes(readLen2); + assertArray(offset + markLen, test2); + in.reset(); + } + } + } + } + } + } + } + private void assertArray(int offset, byte[] test) { for (int i = 0; i < test.length; i++) { Assert.assertThat(test[i], Matchers.is(testArray[offset + i])); From 8263062d8b021a6b8139349621d965145c569be9 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 3 Dec 2019 12:38:10 +0200 Subject: [PATCH 009/142] Mad tests --- .../encrypted/BufferOnMarkInputStream.java | 23 +++++--- .../BufferOnMarkInputStreamTests.java | 53 +++++++++++++++++-- 2 files changed, 63 insertions(+), 13 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index 88980f63f8349..fb152dff1ca83 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -8,19 +8,21 @@ import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; +import java.util.Arrays; import java.util.Objects; public final class BufferOnMarkInputStream extends FilterInputStream { - private final int bufferSize; - private byte[] ringBuffer; - private int head; - private int tail; - private int position; - private boolean markCalled; - private boolean resetCalled; - private boolean closed; + // protected for tests + protected final int bufferSize; + protected byte[] ringBuffer; + protected int head; + protected int tail; + protected int position; + protected boolean markCalled; + protected boolean resetCalled; + protected boolean closed; public BufferOnMarkInputStream(InputStream in, int bufferSize) { super(Objects.requireNonNull(in)); @@ -214,4 +216,9 @@ private void ensureOpen() throws IOException { } } + // only for tests + protected InputStream getWrapped() { + return in; + } + } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index 8fbb367611bef..de674626fce6d 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -27,7 +27,7 @@ static void createTestArray() throws Exception { } public void testSimpleMarkResetAtBeginning() throws Exception { - for (int length = 1; length <= 8; length++) { + for (int length = 1; length <= 16; length++) { for (int mark = 1; mark <= length; mark++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), mark)) { in.mark(mark); @@ -42,7 +42,7 @@ public void testSimpleMarkResetAtBeginning() throws Exception { } public void testMarkResetAtBeginning() throws Exception { - for (int length = 1; length <= 8; length++) { + for (int length = 1; length <= 16; length++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), length)) { in.mark(length); for (int readLen = 1; readLen <= length; readLen++) { @@ -63,7 +63,7 @@ public void testMarkResetAtBeginning() throws Exception { } public void testSimpleMarkResetEverywhere() throws Exception { - for (int length = 1; length <= 8; length++) { + for (int length = 1; length <= 16; length++) { for (int offset = 0; offset < length; offset++) { for (int mark = 1; mark <= length - offset; mark++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), mark)) { @@ -82,13 +82,14 @@ public void testSimpleMarkResetEverywhere() throws Exception { } public void testMarkResetEverywhere() throws Exception { - for (int length = 1; length <= 8; length++) { + for (int length = 1; length <= 16; length++) { for (int offset = 0; offset < length; offset++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), length)) { // skip first offset bytes in.readNBytes(offset); in.mark(length); + // increasing read lengths for (int readLen = 1; readLen <= length - offset; readLen++) { byte[] test = in.readNBytes(readLen); assertArray(offset, test); @@ -100,6 +101,7 @@ public void testMarkResetEverywhere() throws Exception { // skip first offset bytes in.readNBytes(offset); in.mark(length); + // decreasing read lengths for (int readLen = length - offset; readLen >= 1; readLen--) { byte[] test = in.readNBytes(readLen); assertArray(offset, test); @@ -141,6 +143,44 @@ public void testDoubleMarkEverywhere() throws Exception { } } + public void testThreeMarkResetMarkSteps() throws Exception { + int length = 16; + int stepLen = 8; + BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), stepLen); + testMarkResetMarkStep(in, 0, length, stepLen, 2); + } + + private void testMarkResetMarkStep(BufferOnMarkInputStream stream, int offset, int length, int stepLen, int step) throws Exception { + stream.mark(stepLen); + for (int readLen = 1; readLen <= Math.min(stepLen, length - offset); readLen++) { + for (int markLen = 1; markLen <= Math.min(stepLen, length - offset); markLen++) { + byte[] test = stream.readNBytes(readLen); + assertArray(offset, test); + stream.reset(); + test = stream.readNBytes(markLen); + assertArray(offset, test); + if (step > 0) { + int nextStepOffset = ((NoMarkByteArrayInputStream) stream.getWrapped()).getPos(); + BufferOnMarkInputStream cloneStream = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, + nextStepOffset, length - nextStepOffset), stepLen); + if (stream.ringBuffer != null) { + cloneStream.ringBuffer = Arrays.copyOf(stream.ringBuffer, stream.ringBuffer.length); + } else { + cloneStream.ringBuffer = null; + } + cloneStream.head = stream.head; + cloneStream.tail = stream.tail; + cloneStream.position = stream.position; + cloneStream.markCalled = stream.markCalled; + cloneStream.resetCalled = stream.resetCalled; + cloneStream.closed = stream.closed; + testMarkResetMarkStep(cloneStream, offset + markLen, length, stepLen, step - 1); + } + stream.reset(); + } + } + } + private void assertArray(int offset, byte[] test) { for (int i = 0; i < test.length; i++) { Assert.assertThat(test[i], Matchers.is(testArray[offset + i])); @@ -157,6 +197,10 @@ public NoMarkByteArrayInputStream(byte[] buf, int offset, int length) { super(buf, offset, length); } + int getPos() { + return pos; + } + @Override public void mark(int readlimit) { } @@ -172,5 +216,4 @@ public void reset() { } } - } From cf97ba2d20b8fcb98bd6977471bd071127841320 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 3 Dec 2019 22:18:57 +0200 Subject: [PATCH 010/142] Manic testing --- .../encrypted/BufferOnMarkInputStream.java | 35 ++++++- .../EncryptionPacketsInputStream.java | 2 +- .../BufferOnMarkInputStreamTests.java | 96 ++++++++++++++++++- .../EncryptionPacketsInputStreamTests.java | 2 +- 4 files changed, 126 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index fb152dff1ca83..51e5992066114 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -128,8 +128,7 @@ public void mark(int readlimit) { head = position; } else { // discard buffer leftovers - tail = head; - position = head; + head = tail = position = 0; } } } @@ -175,7 +174,11 @@ private int readFromBuffer(byte[] b, int off, int len) { return readLength; } - private int getRemainingBufferCapacity() { + // protected for tests + protected int getRemainingBufferCapacity() { + if (ringBuffer == null) { + return 0; + } if (head == tail) { return ringBuffer.length - 1; } else if (head < tail) { @@ -185,6 +188,32 @@ private int getRemainingBufferCapacity() { } } + //protected for tests + protected int getRemainingBufferToRead() { + if (ringBuffer == null) { + return 0; + } + if (head <= tail) { + return tail - position; + } else if (position >= head) { + return ringBuffer.length - position + tail; + } else { + return tail - position; + } + } + + // protected for tests + protected int getCurrentBufferCount() { + if (ringBuffer == null) { + return 0; + } + if (head <= tail) { + return tail - head; + } else { + return ringBuffer.length - head + tail; + } + } + private void writeToBuffer(byte[] b, int off, int len) { while (len > 0) { final int writeLength; diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index fde2887a0a257..15bbc4d4e4a45 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -83,7 +83,7 @@ InputStream nextPacket(InputStream currentPacketIn) throws IOException { packetIv.putLong(4, counter++); if (counter == EncryptedRepository.PACKET_START_COUNTER) { // counter wrap around - throw new IOException("Maximum packet count limit exceeded"); + throw new Error("Maximum packet count limit exceeded"); } Cipher packetCipher = getPacketEncryptionCipher(secretKey, packetIv.array()); encryptionInputStream = new CipherInputStream(encryptionInputStream, packetCipher); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index de674626fce6d..894f1b35f171d 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -15,12 +15,11 @@ public class BufferOnMarkInputStreamTests extends ESTestCase { - private static int TEST_ARRAY_SIZE = 128; private static byte[] testArray; @BeforeClass static void createTestArray() throws Exception { - testArray = new byte[TEST_ARRAY_SIZE]; + testArray = new byte[128]; for (int i = 0; i < testArray.length; i++) { testArray[i] = (byte) i; } @@ -31,11 +30,19 @@ public void testSimpleMarkResetAtBeginning() throws Exception { for (int mark = 1; mark <= length; mark++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), mark)) { in.mark(mark); + assertThat(in.getCurrentBufferCount(), Matchers.is(0)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(mark)); byte[] test1 = in.readNBytes(mark); assertArray(0, test1); + assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); in.reset(); + assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); byte[] test2 = in.readNBytes(mark); assertArray(0, test2); + assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); } } } @@ -45,6 +52,7 @@ public void testMarkResetAtBeginning() throws Exception { for (int length = 1; length <= 16; length++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), length)) { in.mark(length); + // increasing length read/reset for (int readLen = 1; readLen <= length; readLen++) { byte[] test1 = in.readNBytes(readLen); assertArray(0, test1); @@ -53,6 +61,7 @@ public void testMarkResetAtBeginning() throws Exception { } try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), length)) { in.mark(length); + // decreasing length read/reset for (int readLen = length; readLen >= 1; readLen--) { byte[] test1 = in.readNBytes(readLen); assertArray(0, test1); @@ -70,10 +79,18 @@ public void testSimpleMarkResetEverywhere() throws Exception { // skip first offset bytes in.readNBytes(offset); in.mark(mark); + assertThat(in.getCurrentBufferCount(), Matchers.is(0)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(mark)); byte[] test1 = in.readNBytes(mark); assertArray(offset, test1); + assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); in.reset(); + assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); byte[] test2 = in.readNBytes(mark); + assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); assertArray(offset, test2); } } @@ -120,21 +137,69 @@ public void testDoubleMarkEverywhere() throws Exception { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), length)) { in.readNBytes(offset); + assertThat(in.getCurrentBufferCount(), Matchers.is(0)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(0)); // first mark in.mark(length - offset); + assertThat(in.getCurrentBufferCount(), Matchers.is(0)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(0)); byte[] test = in.readNBytes(readLen); assertArray(offset, test); + assertThat(in.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen)); // reset to first in.reset(); + assertThat(in.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen)); // advance before/after the first read length test = in.readNBytes(markLen); + assertThat(in.getCurrentBufferCount(), Matchers.is(Math.max(readLen, markLen))); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - Math.max(readLen, markLen))); + if (markLen <= readLen) { + assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen - markLen)); + } else { + assertThat(in.resetCalled, Matchers.is(false)); + } assertArray(offset, test); // second mark in.mark(length - offset - markLen); + if (markLen <= readLen) { + assertThat(in.getCurrentBufferCount(), Matchers.is(readLen - markLen)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen + markLen)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen - markLen)); + } else { + assertThat(in.getCurrentBufferCount(), Matchers.is(0)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(0)); + } for (int readLen2 = 1; readLen2 <= length - offset - markLen; readLen2++) { byte[] test2 = in.readNBytes(readLen2); + if (markLen + readLen2 <= readLen) { + assertThat(in.getCurrentBufferCount(), Matchers.is(readLen - markLen)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen + markLen)); + assertThat(in.resetCalled, Matchers.is(true)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen - markLen - readLen2)); + } else { + assertThat(in.getCurrentBufferCount(), Matchers.is(readLen2)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen2)); + assertThat(in.resetCalled, Matchers.is(false)); + } assertArray(offset + markLen, test2); in.reset(); + assertThat(in.resetCalled, Matchers.is(true)); + if (markLen + readLen2 <= readLen) { + assertThat(in.getCurrentBufferCount(), Matchers.is(readLen - markLen)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen + markLen)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen - markLen)); + } else { + assertThat(in.getCurrentBufferCount(), Matchers.is(readLen2)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen2)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen2)); + } } } } @@ -143,9 +208,28 @@ public void testDoubleMarkEverywhere() throws Exception { } } + public void testMarkWithoutReset() throws Exception { + int maxMark = 8; + BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, testArray.length), maxMark); + int offset = 0; + while (offset < testArray.length) { + int readLen = Math.min(1 + Randomness.get().nextInt(maxMark), testArray.length - offset); + in.mark(Randomness.get().nextInt(readLen)); + assertThat(in.getCurrentBufferCount(), Matchers.is(0)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(maxMark)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(0)); + byte[] test = in.readNBytes(readLen); + assertThat(in.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(maxMark - readLen)); + assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen)); + assertArray(offset, test); + offset += readLen; + } + } + public void testThreeMarkResetMarkSteps() throws Exception { - int length = 16; - int stepLen = 8; + int length = 8 + Randomness.get().nextInt(8); + int stepLen = 4 + Randomness.get().nextInt(4); BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), stepLen); testMarkResetMarkStep(in, 0, length, stepLen, 2); } @@ -154,9 +238,12 @@ private void testMarkResetMarkStep(BufferOnMarkInputStream stream, int offset, i stream.mark(stepLen); for (int readLen = 1; readLen <= Math.min(stepLen, length - offset); readLen++) { for (int markLen = 1; markLen <= Math.min(stepLen, length - offset); markLen++) { + // read ahead byte[] test = stream.readNBytes(readLen); assertArray(offset, test); + // reset back stream.reset(); + // read ahead different length test = stream.readNBytes(markLen); assertArray(offset, test); if (step > 0) { @@ -176,6 +263,7 @@ private void testMarkResetMarkStep(BufferOnMarkInputStream stream, int offset, i cloneStream.closed = stream.closed; testMarkResetMarkStep(cloneStream, offset + markLen, length, stepLen, step - 1); } + // reset back stream.reset(); } } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java index 3e2e40cb8e573..f180900a16cee 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -153,7 +153,7 @@ public void testMarkAtPacketBoundary() throws Exception { encryptionInputStream.reset(); test = encryptionInputStream.readNBytes( encryptedPacketSize - middlePacketOffset + 1 + Randomness.get().nextInt(encryptedPacketSize)); - assertSubArray(referenceCiphertextArray, encryptedPacketSize - middlePacketOffset, test, 0, test.length); + assertSubArray(referenceCiphertextArray, encryptedPacketSize + middlePacketOffset, test, 0, test.length); } } From 3c82ba91190759945122dba5516036963ed9992e Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 4 Dec 2019 20:44:13 +0200 Subject: [PATCH 011/142] BufferOnMarkInputStreamTests completed --- .../encrypted/BufferOnMarkInputStream.java | 69 +- .../EncryptionPacketsInputStream.java | 2 +- .../BufferOnMarkInputStreamTests.java | 615 +++++++++++++++++- 3 files changed, 627 insertions(+), 59 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index 51e5992066114..1f7cdd79ff356 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -26,7 +26,11 @@ public final class BufferOnMarkInputStream extends FilterInputStream { public BufferOnMarkInputStream(InputStream in, int bufferSize) { super(Objects.requireNonNull(in)); + if (bufferSize <= 0) { + throw new IllegalArgumentException("The buffersize constructor argument must be a strictly positive value"); + } this.bufferSize = bufferSize; + // the ring buffer is lazily allocated upon the first mark call this.ringBuffer = null; this.head = this.tail = this.position = -1; this.markCalled = this.resetCalled = false; @@ -40,9 +44,11 @@ public int read(byte[] b, int off, int len) throws IOException { if (len == 0) { return 0; } + // firstly try reading any buffered bytes in case this read call is part of rewind following a reset call if (resetCalled) { int bytesRead = readFromBuffer(b, off, len); if (bytesRead == 0) { + // rewinding is complete, no more bytes to replay resetCalled = false; } else { return bytesRead; @@ -52,6 +58,7 @@ public int read(byte[] b, int off, int len) throws IOException { if (bytesRead <= 0) { return bytesRead; } + // if mark has been previously called, buffer all the read bytes if (markCalled) { if (bytesRead > getRemainingBufferCapacity()) { // could not fully write to buffer, invalidate mark @@ -82,6 +89,9 @@ public long skip(long n) throws IOException { return 0; } if (false == markCalled) { + if (resetCalled) { + throw new IllegalStateException("Reset cannot be called without a preceding mark invocation"); + } return in.skip(n); } long remaining = n; @@ -114,10 +124,15 @@ public int available() throws IOException { @Override public void mark(int readlimit) { + // readlimit is otherwise ignored but this defensively fails if the caller is expecting to be able to mark/reset more than this + // stream can accommodate if (readlimit > bufferSize) { throw new IllegalArgumentException("Readlimit value [" + readlimit + "] exceeds the maximum value of [" + bufferSize + "]"); + } else if (readlimit < 0) { + throw new IllegalArgumentException("Readlimit value [" + readlimit + "] cannot be negative"); } markCalled = true; + // lazily allocate the mark ring buffer if (ringBuffer == null) { // "+ 1" for the full-buffer sentinel free element ringBuffer = new byte[bufferSize + 1]; @@ -127,7 +142,7 @@ public void mark(int readlimit) { // mark after reset head = position; } else { - // discard buffer leftovers + // discard any leftovers in buffer head = tail = position = 0; } } @@ -174,10 +189,35 @@ private int readFromBuffer(byte[] b, int off, int len) { return readLength; } + private void writeToBuffer(byte[] b, int off, int len) { + while (len > 0) { + final int writeLength; + if (head <= tail) { + writeLength = Math.min(len, ringBuffer.length - tail - (head == 0 ? 1 : 0)); + } else { + writeLength = Math.min(len, head - tail - 1); + } + if (writeLength <= 0) { + throw new IllegalStateException("No space left in the mark buffer"); + } + System.arraycopy(b, off, ringBuffer, tail, writeLength); + tail += writeLength; + off += writeLength; + len -= writeLength; + if (tail == ringBuffer.length) { + tail = 0; + // tail wrap-around overwrites head + if (head == 0) { + throw new IllegalStateException("Possible overflow of the mark buffer"); + } + } + } + } + // protected for tests protected int getRemainingBufferCapacity() { if (ringBuffer == null) { - return 0; + return bufferSize; } if (head == tail) { return ringBuffer.length - 1; @@ -214,31 +254,6 @@ protected int getCurrentBufferCount() { } } - private void writeToBuffer(byte[] b, int off, int len) { - while (len > 0) { - final int writeLength; - if (head <= tail) { - writeLength = Math.min(len, ringBuffer.length - tail - (head == 0 ? 1 : 0)); - } else { - writeLength = Math.min(len, head - tail - 1); - } - if (writeLength == 0) { - throw new IllegalStateException(); - } - System.arraycopy(b, off, ringBuffer, tail, writeLength); - tail += writeLength; - off += writeLength; - len -= writeLength; - if (tail == ringBuffer.length) { - tail = 0; - // tail wrap-around overwrites head - if (head == 0) { - throw new IllegalStateException(); - } - } - } - } - private void ensureOpen() throws IOException { if (closed) { throw new IOException("Stream has been closed"); diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 15bbc4d4e4a45..fde2887a0a257 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -83,7 +83,7 @@ InputStream nextPacket(InputStream currentPacketIn) throws IOException { packetIv.putLong(4, counter++); if (counter == EncryptedRepository.PACKET_START_COUNTER) { // counter wrap around - throw new Error("Maximum packet count limit exceeded"); + throw new IOException("Maximum packet count limit exceeded"); } Cipher packetCipher = getPacketEncryptionCipher(secretKey, packetIv.array()); encryptionInputStream = new CipherInputStream(encryptionInputStream, packetCipher); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index 894f1b35f171d..1798b23e5c520 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -1,17 +1,29 @@ package org.elasticsearch.repositories.encrypted; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.BeforeClass; +import org.mockito.Mockito; +import org.mockito.stubbing.Answer; import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; +import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; +import java.io.EOFException; import java.io.IOException; +import java.io.InputStream; import java.security.SecureRandom; import java.util.Arrays; +import java.util.Random; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class BufferOnMarkInputStreamTests extends ESTestCase { @@ -25,8 +37,488 @@ static void createTestArray() throws Exception { } } - public void testSimpleMarkResetAtBeginning() throws Exception { - for (int length = 1; length <= 16; length++) { + public void testCloseRejectsSuccessiveCalls() throws Exception { + int bufferSize = 3 + Randomness.get().nextInt(32); + Tuple mockSourceTuple = getMockInfiniteInputStream(); + AtomicInteger bytesRead = mockSourceTuple.v1(); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); + // maybe read some bytes + test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + test.close(); + int bytesReadBefore = bytesRead.get(); + IOException e = expectThrows(IOException.class, () -> { + test.read(); + }); + assertThat(e.getMessage(), Matchers.is("Stream has been closed")); + e = expectThrows(IOException.class, () -> { + byte[] b = new byte[1 + Randomness.get().nextInt(32)]; + test.read(b, 0, 1 + Randomness.get().nextInt(b.length)); + }); + assertThat(e.getMessage(), Matchers.is("Stream has been closed")); + e = expectThrows(IOException.class, () -> { + test.skip(1 + Randomness.get().nextInt(32)); + }); + assertThat(e.getMessage(), Matchers.is("Stream has been closed")); + e = expectThrows(IOException.class, () -> { + test.available(); + }); + assertThat(e.getMessage(), Matchers.is("Stream has been closed")); + e = expectThrows(IOException.class, () -> { + test.reset(); + }); + assertThat(e.getMessage(), Matchers.is("Stream has been closed")); + int bytesReadAfter = bytesRead.get(); + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(0)); + assertThat(test.getCurrentBufferCount(), Matchers.is(0)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + } + + public void testBufferingUponMark() throws Exception { + int bufferSize = 3 + Randomness.get().nextInt(32); + Tuple mockSourceTuple = getMockInfiniteInputStream(); + AtomicInteger bytesRead = mockSourceTuple.v1(); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); + assertThat(test.getCurrentBufferCount(), Matchers.is(0)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + // read without mark + assertThat(test.read(), Matchers.not(-1)); + int readLen = 1 + Randomness.get().nextInt(8); + if (randomBoolean()) { + test.readNBytes(readLen); + } else { + skipNBytes(test, readLen); + } + assertThat(readLen, Matchers.not(0)); + // assert no buffering + assertThat(test.getCurrentBufferCount(), Matchers.is(0)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + // mark + test.mark(1 + Randomness.get().nextInt(bufferSize)); + // read one byte + int bytesReadBefore = bytesRead.get(); + assertThat(test.read(), Matchers.not(-1)); + int bytesReadAfter = bytesRead.get(); + // assert byte is "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(1)); + // assert byte is buffered + assertThat(test.getCurrentBufferCount(), Matchers.is(1)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - 1)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(1)); + assertThat(test.resetCalled, Matchers.is(false)); + // read more bytes, up to buffer size bytes + bytesReadBefore = bytesRead.get(); + readLen = 1 + Randomness.get().nextInt(bufferSize - 1); + if (randomBoolean()) { + test.readNBytes(readLen); + } else { + skipNBytes(test, readLen); + } + bytesReadAfter = bytesRead.get(); + // assert byte is "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); + // assert byte is buffered + assertThat(test.getCurrentBufferCount(), Matchers.is(1 + readLen)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - 1 - readLen)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(1 + readLen)); + assertThat(test.resetCalled, Matchers.is(false)); + } + + public void testInvalidateMark() throws Exception { + int bufferSize = 3 + Randomness.get().nextInt(32); + Tuple mockSourceTuple = getMockInfiniteInputStream(); + AtomicInteger bytesRead = mockSourceTuple.v1(); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); + assertThat(test.getCurrentBufferCount(), Matchers.is(0)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + // mark + test.mark(1 + Randomness.get().nextInt(bufferSize)); + // read all bytes to fill the mark buffer + int bytesReadBefore = bytesRead.get(); + int readLen = bufferSize; + if (randomBoolean()) { + test.readNBytes(readLen); + } else { + skipNBytes(test, readLen); + } + int bytesReadAfter = bytesRead.get(); + // assert byte is "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); + // assert byte is buffered + assertThat(test.getCurrentBufferCount(), Matchers.is(bufferSize)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(bufferSize)); + assertThat(test.resetCalled, Matchers.is(false)); + // read another one byte + bytesReadBefore = bytesRead.get(); + assertThat(test.read(), Matchers.not(-1)); + bytesReadAfter = bytesRead.get(); + // assert byte is "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(1)); + // assert mark is invalidated + assertThat(test.getCurrentBufferCount(), Matchers.is(0)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.markCalled, Matchers.is(false)); + // read more bytes + bytesReadBefore = bytesRead.get(); + readLen = 1 + Randomness.get().nextInt(2 * bufferSize); + if (randomBoolean()) { + test.readNBytes(readLen); + } else { + skipNBytes(test, readLen); + } + bytesReadAfter = bytesRead.get(); + // assert byte is "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); + // assert byte is NOT buffered + assertThat(test.getCurrentBufferCount(), Matchers.is(0)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.markCalled, Matchers.is(false)); + // assert reset does not work any more + IOException e = expectThrows(IOException.class, () -> { + test.reset(); + }); + assertThat(e.getMessage(), Matchers.is("Mark not called or has been invalidated")); + } + + public void testResetWithoutMarkFails() throws Exception { + Tuple mockSourceTuple = getMockInfiniteInputStream(); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), 1 + Randomness.get().nextInt(1024)); + // maybe read some bytes + test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + IOException e = expectThrows(IOException.class, () -> { + test.reset(); + }); + assertThat(e.getMessage(), Matchers.is("Mark not called or has been invalidated")); + } + + public void testMarkAndBufferReadLimitsCheck() throws Exception { + Tuple mockSourceTuple = getMockInfiniteInputStream(); + int bufferSize = 1 + Randomness.get().nextInt(1024); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); + // maybe read some bytes + test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + int wrongReadLimit = bufferSize + 1 + Randomness.get().nextInt(8); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + test.mark(wrongReadLimit); + }); + assertThat(e.getMessage(), Matchers.is("Readlimit value [" + wrongReadLimit + "] exceeds the maximum value of [" + bufferSize + "]")); + e = expectThrows(IllegalArgumentException.class, () -> { + test.mark(-1 - Randomness.get().nextInt(2)); + }); + assertThat(e.getMessage(), Matchers.containsString("cannot be negative")); + e = expectThrows(IllegalArgumentException.class, () -> { + new BufferOnMarkInputStream(mock(InputStream.class), 0 - Randomness.get().nextInt(2)); + }); + assertThat(e.getMessage(), Matchers.is("The buffersize constructor argument must be a strictly positive value")); + } + + public void testConsumeBufferUponReset() throws Exception { + int bufferSize = 3 + Randomness.get().nextInt(128); + Tuple mockSourceTuple = getMockInfiniteInputStream(); + AtomicInteger bytesRead = mockSourceTuple.v1(); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); + // maybe read some bytes + test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + // mark + test.mark(1 + Randomness.get().nextInt(bufferSize)); + // read less than bufferSize bytes + int bytesReadBefore = bytesRead.get(); + int readLen = 1 + Randomness.get().nextInt(bufferSize); + if (randomBoolean()) { + test.readNBytes(readLen); + } else { + skipNBytes(test, readLen); + } + int bytesReadAfter = bytesRead.get(); + // assert bytes are "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); + // assert buffer is populated + assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.markCalled, Matchers.is(true)); + // reset + test.reset(); + assertThat(test.resetCalled, Matchers.is(true)); + // read again, from buffer this time + bytesReadBefore = bytesRead.get(); + int readLen2 = 1 + Randomness.get().nextInt(readLen); + if (randomBoolean()) { + test.readNBytes(readLen2); + } else { + skipNBytes(test, readLen2); + } + bytesReadAfter = bytesRead.get(); + // assert bytes are replayed from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(0)); + // assert buffer is consumed + assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen - readLen2)); + assertThat(test.markCalled, Matchers.is(true)); + assertThat(test.resetCalled, Matchers.is(true)); + } + + public void testInvalidateMarkAfterReset() throws Exception { + int bufferSize = 3 + Randomness.get().nextInt(128); + Tuple mockSourceTuple = getMockInfiniteInputStream(); + AtomicInteger bytesRead = mockSourceTuple.v1(); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); + // maybe read some bytes + test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + // mark + test.mark(1 + Randomness.get().nextInt(bufferSize)); + // read less than bufferSize bytes + int bytesReadBefore = bytesRead.get(); + int readLen = 1 + Randomness.get().nextInt(bufferSize); + if (randomBoolean()) { + test.readNBytes(readLen); + } else { + skipNBytes(test, readLen); + } + int bytesReadAfter = bytesRead.get(); + // assert bytes are "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); + // assert buffer is populated + assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.markCalled, Matchers.is(true)); + // reset + test.reset(); + assertThat(test.resetCalled, Matchers.is(true)); + assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + // read again, from buffer this time + bytesReadBefore = bytesRead.get(); + int readLen2 = readLen; + if (randomBoolean()) { + test.readNBytes(readLen2); + } else { + skipNBytes(test, readLen2); + } + bytesReadAfter = bytesRead.get(); + // assert bytes are replayed from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(0)); + // assert buffer is consumed + assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.markCalled, Matchers.is(true)); + assertThat(test.resetCalled, Matchers.is(true)); + // read on, from the stream, until the mark buffer is full + bytesReadBefore = bytesRead.get(); + int readLen3 = bufferSize - readLen; + if (randomBoolean()) { + test.readNBytes(readLen3); + } else { + skipNBytes(test, readLen3); + } + bytesReadAfter = bytesRead.get(); + // assert bytes are "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen3)); + assertThat(test.getCurrentBufferCount(), Matchers.is(bufferSize)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(test.markCalled, Matchers.is(true)); + assertThat(test.resetCalled, Matchers.is(false)); + // read more bytes + bytesReadBefore = bytesRead.get(); + int readLen4 = 1 + Randomness.get().nextInt(2 * bufferSize); + if (randomBoolean()) { + test.readNBytes(readLen4); + } else { + skipNBytes(test, readLen4); + } + bytesReadAfter = bytesRead.get(); + // assert byte is "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen4)); + // assert mark reset + assertThat(test.getCurrentBufferCount(), Matchers.is(0)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.markCalled, Matchers.is(false)); + // assert reset does not work any more + IOException e = expectThrows(IOException.class, () -> { + test.reset(); + }); + assertThat(e.getMessage(), Matchers.is("Mark not called or has been invalidated")); + } + + public void testMarkAfterResetWhileReplayingBuffer() throws Exception { + int bufferSize = 8 + Randomness.get().nextInt(8); + Tuple mockSourceTuple = getMockInfiniteInputStream(); + AtomicInteger bytesRead = mockSourceTuple.v1(); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); + // maybe read some bytes + test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + // mark + test.mark(1 + Randomness.get().nextInt(bufferSize)); + // read less than bufferSize bytes + int bytesReadBefore = bytesRead.get(); + int readLen = 1 + Randomness.get().nextInt(bufferSize); + if (randomBoolean()) { + test.readNBytes(readLen); + } else { + skipNBytes(test, readLen); + } + int bytesReadAfter = bytesRead.get(); + // assert bytes are "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); + // assert buffer is populated + assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.markCalled, Matchers.is(true)); + assertThat(test.resetCalled, Matchers.is(false)); + // reset + test.reset(); + assertThat(test.resetCalled, Matchers.is(true)); + assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + for (int readLen2 = 1; readLen2 <= readLen; readLen2++) { + Tuple mockSourceTuple2 = getMockInfiniteInputStream(); + BufferOnMarkInputStream cloneTest = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); + cloneBufferOnMarkStream(cloneTest, test); + AtomicInteger bytesRead2 = mockSourceTuple2.v1(); + // read again, from buffer this time, less than before + bytesReadBefore = bytesRead2.get(); + if (randomBoolean()) { + cloneTest.readNBytes(readLen2); + } else { + skipNBytes(cloneTest, readLen2); + } + bytesReadAfter = bytesRead2.get(); + // assert bytes are replayed from the buffer, and not read from the stream + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(0)); + // assert buffer is consumed + assertThat(cloneTest.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(cloneTest.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); + assertThat(cloneTest.getRemainingBufferToRead(), Matchers.is(readLen - readLen2)); + assertThat(cloneTest.markCalled, Matchers.is(true)); + assertThat(cloneTest.resetCalled, Matchers.is(true)); + // mark + cloneTest.mark(1 + Randomness.get().nextInt(bufferSize)); + assertThat(cloneTest.getCurrentBufferCount(), Matchers.is(readLen - readLen2)); + assertThat(cloneTest.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen + readLen2)); + assertThat(cloneTest.getRemainingBufferToRead(), Matchers.is(readLen - readLen2)); + assertThat(cloneTest.markCalled, Matchers.is(true)); + assertThat(cloneTest.resetCalled, Matchers.is(true)); + // read until the buffer is filled + for (int readLen3 = 1; readLen3 <= readLen - readLen2; readLen3++) { + Tuple mockSourceTuple3 = getMockInfiniteInputStream(); + BufferOnMarkInputStream cloneTest3 = new BufferOnMarkInputStream(mockSourceTuple3.v2(), bufferSize); + cloneBufferOnMarkStream(cloneTest3, cloneTest); + AtomicInteger bytesRead3 = mockSourceTuple3.v1(); + // read again from buffer, after the mark inside the buffer + bytesReadBefore = bytesRead3.get(); + if (randomBoolean()) { + cloneTest3.readNBytes(readLen3); + } else { + skipNBytes(cloneTest3, readLen3); + } + bytesReadAfter = bytesRead3.get(); + // assert bytes are replayed from the buffer, and not read from the stream + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(0)); + // assert buffer is consumed completely + assertThat(cloneTest3.getCurrentBufferCount(), Matchers.is(readLen - readLen2)); + assertThat(cloneTest3.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen + readLen2)); + assertThat(cloneTest3.getRemainingBufferToRead(), Matchers.is(readLen - readLen2 - readLen3)); + assertThat(cloneTest3.markCalled, Matchers.is(true)); + assertThat(cloneTest3.resetCalled, Matchers.is(true)); + } + // read beyond the buffer can supply, but not more than it can accommodate + for (int readLen3 = readLen - readLen2 + 1; readLen3 <= bufferSize - readLen2; readLen3++) { + Tuple mockSourceTuple3 = getMockInfiniteInputStream(); + BufferOnMarkInputStream cloneTest3 = new BufferOnMarkInputStream(mockSourceTuple3.v2(), bufferSize); + cloneBufferOnMarkStream(cloneTest3, cloneTest); + AtomicInteger bytesRead3 = mockSourceTuple3.v1(); + // read again from buffer, after the mark inside the buffer + bytesReadBefore = bytesRead3.get(); + if (randomBoolean()) { + cloneTest3.readNBytes(readLen3); + } else { + skipNBytes(cloneTest3, readLen3); + } + bytesReadAfter = bytesRead3.get(); + // assert bytes are PARTLY replayed, PARTLY read from the stream + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen3 + readLen2 - readLen)); + // assert buffer is appended and fully replayed + assertThat(cloneTest3.getCurrentBufferCount(), Matchers.is(readLen3)); + assertThat(cloneTest3.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen3)); + assertThat(cloneTest3.markCalled, Matchers.is(true)); + assertThat(cloneTest3.resetCalled, Matchers.is(false)); + } + } + } + + public void testMarkAfterResetAfterReplayingBuffer() throws Exception { + int bufferSize = 8 + Randomness.get().nextInt(8); + Tuple mockSourceTuple = getMockInfiniteInputStream(); + AtomicInteger bytesRead = mockSourceTuple.v1(); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); + // maybe read some bytes + test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + // mark + test.mark(1 + Randomness.get().nextInt(bufferSize)); + // read less than bufferSize bytes + int bytesReadBefore = bytesRead.get(); + int readLen = 1 + Randomness.get().nextInt(bufferSize); + if (randomBoolean()) { + test.readNBytes(readLen); + } else { + skipNBytes(test, readLen); + } + int bytesReadAfter = bytesRead.get(); + // assert bytes are "read" and not returned from the buffer + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); + // assert buffer is populated + assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.markCalled, Matchers.is(true)); + // reset + test.reset(); + assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); + assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); + assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.markCalled, Matchers.is(true)); + assertThat(test.resetCalled, Matchers.is(true)); + for (int readLen2 = readLen + 1; readLen2 <= bufferSize; readLen2++) { + Tuple mockSourceTuple2 = getMockInfiniteInputStream(); + BufferOnMarkInputStream test2 = new BufferOnMarkInputStream(mockSourceTuple2.v2(), bufferSize); + cloneBufferOnMarkStream(test2, test); + AtomicInteger bytesRead2 = mockSourceTuple2.v1(); + // read again, more than before + bytesReadBefore = bytesRead2.get(); + byte[] read2 = test2.readNBytes(readLen2); + bytesReadAfter = bytesRead2.get(); + // assert bytes are PARTLY replayed, PARTLY read from the stream + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(read2.length - readLen)); + // assert buffer is appended and fully replayed + assertThat(test2.getCurrentBufferCount(), Matchers.is(read2.length)); + assertThat(test2.getRemainingBufferCapacity(), Matchers.is(bufferSize - read2.length)); + assertThat(test2.markCalled, Matchers.is(true)); + assertThat(test2.resetCalled, Matchers.is(false)); + // mark + test2.mark(1 + Randomness.get().nextInt(bufferSize)); + assertThat(test2.getCurrentBufferCount(), Matchers.is(0)); + assertThat(test2.getRemainingBufferCapacity(), Matchers.is(bufferSize)); + assertThat(test2.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test2.markCalled, Matchers.is(true)); + assertThat(test2.resetCalled, Matchers.is(false)); + } + } + + public void testNoMockSimpleMarkResetAtBeginning() throws Exception { + for (int length = 1; length <= 8; length++) { for (int mark = 1; mark <= length; mark++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), mark)) { in.mark(mark); @@ -48,8 +540,8 @@ public void testSimpleMarkResetAtBeginning() throws Exception { } } - public void testMarkResetAtBeginning() throws Exception { - for (int length = 1; length <= 16; length++) { + public void testNoMockMarkResetAtBeginning() throws Exception { + for (int length = 1; length <= 8; length++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), length)) { in.mark(length); // increasing length read/reset @@ -71,8 +563,8 @@ public void testMarkResetAtBeginning() throws Exception { } } - public void testSimpleMarkResetEverywhere() throws Exception { - for (int length = 1; length <= 16; length++) { + public void testNoMockSimpleMarkResetEverywhere() throws Exception { + for (int length = 1; length <= 10; length++) { for (int offset = 0; offset < length; offset++) { for (int mark = 1; mark <= length - offset; mark++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), mark)) { @@ -98,8 +590,8 @@ public void testSimpleMarkResetEverywhere() throws Exception { } } - public void testMarkResetEverywhere() throws Exception { - for (int length = 1; length <= 16; length++) { + public void testNoMockMarkResetEverywhere() throws Exception { + for (int length = 1; length <= 8; length++) { for (int offset = 0; offset < length; offset++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), length)) { @@ -129,7 +621,7 @@ public void testMarkResetEverywhere() throws Exception { } } - public void testDoubleMarkEverywhere() throws Exception { + public void testNoMockDoubleMarkEverywhere() throws Exception { for (int length = 1; length <= 16; length++) { for (int offset = 0; offset < length; offset++) { for (int readLen = 1; readLen <= length - offset; readLen++) { @@ -138,7 +630,7 @@ public void testDoubleMarkEverywhere() throws Exception { length)) { in.readNBytes(offset); assertThat(in.getCurrentBufferCount(), Matchers.is(0)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(in.getRemainingBufferCapacity(), Matchers.is(length)); assertThat(in.getRemainingBufferToRead(), Matchers.is(0)); // first mark in.mark(length - offset); @@ -208,7 +700,7 @@ public void testDoubleMarkEverywhere() throws Exception { } } - public void testMarkWithoutReset() throws Exception { + public void testNoMockMarkWithoutReset() throws Exception { int maxMark = 8; BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, testArray.length), maxMark); int offset = 0; @@ -227,7 +719,7 @@ public void testMarkWithoutReset() throws Exception { } } - public void testThreeMarkResetMarkSteps() throws Exception { + public void testNoMockThreeMarkResetMarkSteps() throws Exception { int length = 8 + Randomness.get().nextInt(8); int stepLen = 4 + Randomness.get().nextInt(4); BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), stepLen); @@ -238,43 +730,100 @@ private void testMarkResetMarkStep(BufferOnMarkInputStream stream, int offset, i stream.mark(stepLen); for (int readLen = 1; readLen <= Math.min(stepLen, length - offset); readLen++) { for (int markLen = 1; markLen <= Math.min(stepLen, length - offset); markLen++) { + BufferOnMarkInputStream cloneStream = cloneBufferOnMarkStream(stream) ; // read ahead - byte[] test = stream.readNBytes(readLen); + byte[] test = cloneStream.readNBytes(readLen); assertArray(offset, test); // reset back - stream.reset(); + cloneStream.reset(); // read ahead different length - test = stream.readNBytes(markLen); + test = cloneStream.readNBytes(markLen); assertArray(offset, test); if (step > 0) { - int nextStepOffset = ((NoMarkByteArrayInputStream) stream.getWrapped()).getPos(); - BufferOnMarkInputStream cloneStream = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, - nextStepOffset, length - nextStepOffset), stepLen); - if (stream.ringBuffer != null) { - cloneStream.ringBuffer = Arrays.copyOf(stream.ringBuffer, stream.ringBuffer.length); - } else { - cloneStream.ringBuffer = null; - } - cloneStream.head = stream.head; - cloneStream.tail = stream.tail; - cloneStream.position = stream.position; - cloneStream.markCalled = stream.markCalled; - cloneStream.resetCalled = stream.resetCalled; - cloneStream.closed = stream.closed; testMarkResetMarkStep(cloneStream, offset + markLen, length, stepLen, step - 1); } - // reset back - stream.reset(); } } } + private BufferOnMarkInputStream cloneBufferOnMarkStream(BufferOnMarkInputStream orig) { + int origOffset = ((NoMarkByteArrayInputStream) orig.getWrapped()).getPos(); + int origLen = ((NoMarkByteArrayInputStream) orig.getWrapped()).getCount(); + BufferOnMarkInputStream cloneStream = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, + origOffset, origLen - origOffset), orig.bufferSize); + if (orig.ringBuffer != null) { + cloneStream.ringBuffer = Arrays.copyOf(orig.ringBuffer, orig.ringBuffer.length); + } else { + cloneStream.ringBuffer = null; + } + cloneStream.head = orig.head; + cloneStream.tail = orig.tail; + cloneStream.position = orig.position; + cloneStream.markCalled = orig.markCalled; + cloneStream.resetCalled = orig.resetCalled; + cloneStream.closed = orig.closed; + return cloneStream; + } + + private void cloneBufferOnMarkStream(BufferOnMarkInputStream clone, BufferOnMarkInputStream orig) { + if (orig.ringBuffer != null) { + clone.ringBuffer = Arrays.copyOf(orig.ringBuffer, orig.ringBuffer.length); + } else { + clone.ringBuffer = null; + } + clone.head = orig.head; + clone.tail = orig.tail; + clone.position = orig.position; + clone.markCalled = orig.markCalled; + clone.resetCalled = orig.resetCalled; + clone.closed = orig.closed; + } + private void assertArray(int offset, byte[] test) { for (int i = 0; i < test.length; i++) { Assert.assertThat(test[i], Matchers.is(testArray[offset + i])); } } + private Tuple getMockInfiniteInputStream() throws IOException { + InputStream mockSource = mock(InputStream.class); + AtomicInteger bytesRead = new AtomicInteger(0); + when(mockSource.read(org.mockito.Matchers.any(), org.mockito.Matchers.anyInt(), org.mockito.Matchers.anyInt())). + thenAnswer(invocationOnMock -> { + final byte[] b = (byte[]) invocationOnMock.getArguments()[0]; + final int off = (int) invocationOnMock.getArguments()[1]; + final int len = (int) invocationOnMock.getArguments()[2]; + if (len == 0) { + return 0; + } else { + int bytesCount = 1 + Randomness.get().nextInt(len); + bytesRead.addAndGet(bytesCount); + return bytesCount; + } + }); + return new Tuple<>(bytesRead, mockSource); + } + + private static void skipNBytes(InputStream in, long n) throws IOException { + if (n > 0) { + long ns = in.skip(n); + if (ns >= 0 && ns < n) { // skipped too few bytes + // adjust number to skip + n -= ns; + // read until requested number skipped or EOS reached + while (n > 0 && in.read() != -1) { + n--; + } + // if not enough skipped, then EOFE + if (n != 0) { + throw new EOFException(); + } + } else if (ns != n) { // skipped negative or too many bytes + throw new IOException("Unable to skip exactly"); + } + } + } + static class NoMarkByteArrayInputStream extends ByteArrayInputStream { public NoMarkByteArrayInputStream(byte[] buf) { @@ -289,6 +838,10 @@ int getPos() { return pos; } + int getCount() { + return count; + } + @Override public void mark(int readlimit) { } From 76d82712580139ffff16160494fced31b9119b55 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 4 Dec 2019 20:49:36 +0200 Subject: [PATCH 012/142] Checkstyle --- .../encrypted/BufferOnMarkInputStream.java | 1 - .../BufferOnMarkInputStreamTests.java | 24 +++++++++---------- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index 1f7cdd79ff356..7f5a0c52c74ef 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -8,7 +8,6 @@ import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; -import java.util.Arrays; import java.util.Objects; diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index 1798b23e5c520..1ef8b5c67ef59 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + package org.elasticsearch.repositories.encrypted; import org.elasticsearch.common.Randomness; @@ -6,22 +12,14 @@ import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.BeforeClass; -import org.mockito.Mockito; -import org.mockito.stubbing.Answer; -import javax.crypto.KeyGenerator; -import javax.crypto.SecretKey; -import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; -import java.security.SecureRandom; import java.util.Arrays; -import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; -import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -207,7 +205,8 @@ public void testMarkAndBufferReadLimitsCheck() throws Exception { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { test.mark(wrongReadLimit); }); - assertThat(e.getMessage(), Matchers.is("Readlimit value [" + wrongReadLimit + "] exceeds the maximum value of [" + bufferSize + "]")); + assertThat(e.getMessage(), Matchers.is("Readlimit value [" + wrongReadLimit + "] exceeds the maximum value of [" + + bufferSize + "]")); e = expectThrows(IllegalArgumentException.class, () -> { test.mark(-1 - Randomness.get().nextInt(2)); }); @@ -567,7 +566,8 @@ public void testNoMockSimpleMarkResetEverywhere() throws Exception { for (int length = 1; length <= 10; length++) { for (int offset = 0; offset < length; offset++) { for (int mark = 1; mark <= length - offset; mark++) { - try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), mark)) { + try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new + NoMarkByteArrayInputStream(testArray, 0, length), mark)) { // skip first offset bytes in.readNBytes(offset); in.mark(mark); @@ -826,11 +826,11 @@ private static void skipNBytes(InputStream in, long n) throws IOException { static class NoMarkByteArrayInputStream extends ByteArrayInputStream { - public NoMarkByteArrayInputStream(byte[] buf) { + NoMarkByteArrayInputStream(byte[] buf) { super(buf); } - public NoMarkByteArrayInputStream(byte[] buf, int offset, int length) { + NoMarkByteArrayInputStream(byte[] buf, int offset, int length) { super(buf, offset, length); } From 4e9778e6614c591190b25e18617117c55f1f8cbc Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 5 Dec 2019 15:09:14 +0200 Subject: [PATCH 013/142] BufferOnMarkInputStream javadocs --- .../encrypted/BufferOnMarkInputStream.java | 188 +++++++++++++++++- .../BufferOnMarkInputStreamTests.java | 67 ++++--- 2 files changed, 212 insertions(+), 43 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index 7f5a0c52c74ef..388a9af004910 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -10,19 +10,69 @@ import java.io.InputStream; import java.util.Objects; - +/** + * A {@code BufferOnMarkInputStream} adds the mark and reset functionality to another input stream. + * All the bytes read or skipped following a {@link #mark(int)} call are also stored in a fixed-size internal array + * so they can be replayed following a {@link #reset()} call. The size of the internal buffer is specified at construction + * time. It is an error (throws {@code IllegalArgumentException}) to specify a larger {@code readlimit} value as an argument + * to a mark call. + *

+ * Unlike the {@link java.io.BufferedInputStream} this only buffers upon a {@link #mark(int)} call, + * i.e. if {@code mark} is never called this is equivalent to a bare pass-through {@link FilterInputStream}. + * Moreover, this does not buffer in advance, so the amount of bytes read from this input stream, at any time, is equal to the amount + * read from the underlying stream (provided that reset has not been called, in which case bytes are replayed from the internal buffer + * and no bytes are read from the underlying stream). + *

+ * Close will also close the underlying stream and any subsequent {@code read}, {@code skip}, {@code available} and + * {@code reset} calls will throw {@code IOException}s. + *

+ * This is NOT thread-safe, multiple threads sharing a single instance must synchronize access. + */ public final class BufferOnMarkInputStream extends FilterInputStream { - // protected for tests + // all protected for tests protected final int bufferSize; + /** + * The array used to store the bytes to be replayed upon a reset call. + * The buffer portion that stores valid bytes, which must be returned by the read calls after a reset call, + * is demarcated by a {@code head} (inclusive) and a {@code tail} offset (exclusive). The offsets wrap around, + * i.e. if the {@code tail} offset is smaller than the {@code head} offset, then the portion of valid bytes + * is that from the {@code head} offset until the end of the buffer array and from the start of the array + * until the {@code tail} offset. The buffer is empty when both the {@code head} and the {@code tail} offsets + * are equal. The buffer is full if it stores {@code bufferSize} elements. + * To avoid mixing up the two states, the actual allocated size of the array is {@code bufferSize + 1}. + */ protected byte[] ringBuffer; + /** + * The inclusive start offset of the bytes that must be replayed after a reset call. + */ protected int head; + /** + * The exclusive end offset of the bytes that must be replayed after a reset call. + */ protected int tail; + /** + * The current offset of the next byte to be returned from the buffer for the reads following a reset. + * This is defined only when {@code resetCalled} is {@code true}. + */ protected int position; + /** + * {@code true} when the result of a read or skip from the underlying stream must also be stored in the buffer + */ protected boolean markCalled; + /** + * {@code true} when the returned bytes must come from the buffer and not from the underlying stream + */ protected boolean resetCalled; protected boolean closed; + /** + * Creates a {@code BufferOnMarkInputStream} that buffers a maximum of {@code bufferSize} elements. + * The {@code bufferSize} is the maximum value for the mark readlimit. + * + * @param in the underlying input buffer + * @param bufferSize the number of bytes that can be stored after a call to mark + */ public BufferOnMarkInputStream(InputStream in, int bufferSize) { super(Objects.requireNonNull(in)); if (bufferSize <= 0) { @@ -36,6 +86,31 @@ public BufferOnMarkInputStream(InputStream in, int bufferSize) { this.closed = false; } + /** + * Reads up to {@code len} bytes of data into an array of bytes from this + * input stream. If {@code len} is zero, then no bytes are read and {@cpde 0} + * is returned; otherwise, there is an attempt to read at least one byte. + * The read will return buffered bytes, which have been returned in a previous + * call as well, if the contents of the stream must be replayed following a + * reset call; otherwise it forwards the call to the underlying stream. + * If no byte is available because there are no more bytes to replay following + * a reset (if a reset was called) and the underlying stream is exhausted, the + * value {@code -1} is returned; otherwise, at least one byte is read and stored + * into {@code b}, starting at offset {@code off}. + * + * @param b the buffer into which the data is read. + * @param off the start offset in the destination array {@code b} + * @param len the maximum number of bytes read. + * @return the total number of bytes read into the buffer, or + * {@code -1} if there is no more data because the end of + * the stream has been reached. + * @throws NullPointerException If {@code b} is {@code null}. + * @throws IndexOutOfBoundsException If {@code off} is negative, + * {@code len} is negative, or {@code len} is greater than + * {@code b.length - off} + * @throws IOException if this stream has been closed or an I/O error occurs on the underlying stream. + * @see java.io.InputStream#read(byte[], int, int) + */ @Override public int read(byte[] b, int off, int len) throws IOException { ensureOpen(); @@ -43,7 +118,7 @@ public int read(byte[] b, int off, int len) throws IOException { if (len == 0) { return 0; } - // firstly try reading any buffered bytes in case this read call is part of rewind following a reset call + // firstly try reading any buffered bytes in case this read call is part of a rewind following a reset call if (resetCalled) { int bytesRead = readFromBuffer(b, off, len); if (bytesRead == 0) { @@ -70,6 +145,22 @@ public int read(byte[] b, int off, int len) throws IOException { return bytesRead; } + /** + * Reads the next byte of data from this input stream. The value + * byte is returned as an {@code int} in the range + * {@code 0} to {@code 255}. If no byte is available + * because the end of the stream has been reached, the value + * {@code -1} is returned. The end of the stream is reached if the + * end of the underlying stream is reached, and reset has not been + * called or there are no more bytes to replay following a reset. + * This method blocks until input data is available, the end of + * the stream is detected, or an exception is thrown. + * + * @return the next byte of data, or {@code -1} if the end of the + * stream is reached. + * @exception IOException if this stream has been closed or an I/O error occurs on the underlying stream. + * @see BufferOnMarkInputStream#read(byte[], int, int) + */ @Override public int read() throws IOException { ensureOpen(); @@ -81,6 +172,18 @@ public int read() throws IOException { return arr[0]; } + /** + * Skips over and discards {@code n} bytes of data from the + * input stream. The {@code skip} method may, for a variety of + * reasons, end up skipping over some smaller number of bytes, + * possibly {@code 0}. The actual number of bytes skipped is + * returned. + * + * @param n the number of bytes to be skipped. + * @return the actual number of bytes skipped. + * @throws IOException if this stream is closed, or if {@code in.skip(n)} throws an IOException or, + * in the case that {@code mark} is called, if BufferOnMarkInputStream#read(byte[], int, int) throws an IOException + */ @Override public long skip(long n) throws IOException { ensureOpen(); @@ -106,6 +209,17 @@ public long skip(long n) throws IOException { return n - remaining; } + /** + * Returns an estimate of the number of bytes that can be read (or + * skipped over) from this input stream without blocking by the next + * caller of a method for this input stream. The next caller might be + * the same thread or another thread. A single read or skip of this + * many bytes will not block, but may read or skip fewer bytes. + * + * @return an estimate of the number of bytes that can be read (or skipped + * over) from this input stream without blocking. + * @exception IOException if this stream is closed or if {@code in.available()} throws an IOException + */ @Override public int available() throws IOException { ensureOpen(); @@ -121,15 +235,39 @@ public int available() throws IOException { return bytesAvailable; } + /** + * Marks the current position in this input stream. A subsequent call to + * the {@code reset} method repositions this stream at the last marked + * position so that subsequent reads re-read the same bytes. + *

+ * The {@code readlimit} arguments tells this input stream to + * allow that many bytes to be read before the mark position can be + * invalidated. The {@code readlimit} argument value must be smaller than + * the {@code bufferSize} constructor argument value, as returned by + * {@link #getMaxMarkReadlimit()}. + *

+ * The invalidation of the mark position when the read count exceeds the read + * limit is not currently enforced. A mark position is invalidated when the + * read count exceeds the maximum read limit, as returned by + * {@link #getMaxMarkReadlimit()}. + * + * @param readlimit the maximum limit of bytes that can be read before + * the mark position can be invalidated. + * @see BufferOnMarkInputStream#reset() + * @see java.io.InputStream#mark(int) + */ @Override public void mark(int readlimit) { // readlimit is otherwise ignored but this defensively fails if the caller is expecting to be able to mark/reset more than this - // stream can accommodate + // instance can accommodate in the ring mark buffer if (readlimit > bufferSize) { throw new IllegalArgumentException("Readlimit value [" + readlimit + "] exceeds the maximum value of [" + bufferSize + "]"); } else if (readlimit < 0) { throw new IllegalArgumentException("Readlimit value [" + readlimit + "] cannot be negative"); } + if (closed) { + return; + } markCalled = true; // lazily allocate the mark ring buffer if (ringBuffer == null) { @@ -147,11 +285,27 @@ public void mark(int readlimit) { } } + /** + * Tests if this input stream supports the {@code mark} and + * {@code reset} methods. This always returns {@code true}. + */ @Override public boolean markSupported() { return true; } + /** + * Repositions this stream to the position at the time the + * {@code mark} method was last called on this input stream. + * Subsequent read calls will return the same bytes in the same + * order since the point of the {@code mark} call. Naturally, + * {@code mark} can be invoked at any moment, even after a + * {@code reset}. + * + * @throws IOException if the stream has been closed or the number of bytes + * read since the last mark call exceeded {@link #getMaxMarkReadlimit()} + * @see java.io.InputStream#mark(int) + */ @Override public void reset() throws IOException { ensureOpen(); @@ -162,14 +316,28 @@ public void reset() throws IOException { position = head; } + /** + * Closes this input stream as well as the underlying stream. + * + * @exception IOException if an I/O error occurs while closing the underlying stream. + */ @Override public void close() throws IOException { if (false == closed) { closed = true; + ringBuffer = null; in.close(); } } + /** + * Returns the maximum value for the {@code readlimit} argument of the {@link #mark(int)} method. + * This is the same as the {@code bufferSize} constructor argument. + */ + public int getMaxMarkReadlimit() { + return bufferSize; + } + private int readFromBuffer(byte[] b, int off, int len) { if (position == tail) { return 0; @@ -213,6 +381,12 @@ private void writeToBuffer(byte[] b, int off, int len) { } } + private void ensureOpen() throws IOException { + if (closed) { + throw new IOException("Stream has been closed"); + } + } + // protected for tests protected int getRemainingBufferCapacity() { if (ringBuffer == null) { @@ -253,12 +427,6 @@ protected int getCurrentBufferCount() { } } - private void ensureOpen() throws IOException { - if (closed) { - throw new IOException("Stream has been closed"); - } - } - // only for tests protected InputStream getWrapped() { return in; diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index 1ef8b5c67ef59..396879417ab37 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -35,6 +35,40 @@ static void createTestArray() throws Exception { } } + public void testResetWithoutMarkFails() throws Exception { + Tuple mockSourceTuple = getMockInfiniteInputStream(); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), 1 + Randomness.get().nextInt(1024)); + // maybe read some bytes + test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + IOException e = expectThrows(IOException.class, () -> { + test.reset(); + }); + assertThat(e.getMessage(), Matchers.is("Mark not called or has been invalidated")); + } + + public void testMarkAndBufferReadLimitsCheck() throws Exception { + Tuple mockSourceTuple = getMockInfiniteInputStream(); + int bufferSize = 1 + Randomness.get().nextInt(1024); + BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); + assertThat(test.getMaxMarkReadlimit(), Matchers.is(bufferSize)); + // maybe read some bytes + test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + int wrongReadLimit = bufferSize + 1 + Randomness.get().nextInt(8); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + test.mark(wrongReadLimit); + }); + assertThat(e.getMessage(), Matchers.is("Readlimit value [" + wrongReadLimit + "] exceeds the maximum value of [" + + bufferSize + "]")); + e = expectThrows(IllegalArgumentException.class, () -> { + test.mark(-1 - Randomness.get().nextInt(2)); + }); + assertThat(e.getMessage(), Matchers.containsString("cannot be negative")); + e = expectThrows(IllegalArgumentException.class, () -> { + new BufferOnMarkInputStream(mock(InputStream.class), 0 - Randomness.get().nextInt(2)); + }); + assertThat(e.getMessage(), Matchers.is("The buffersize constructor argument must be a strictly positive value")); + } + public void testCloseRejectsSuccessiveCalls() throws Exception { int bufferSize = 3 + Randomness.get().nextInt(32); Tuple mockSourceTuple = getMockInfiniteInputStream(); @@ -184,39 +218,6 @@ public void testInvalidateMark() throws Exception { assertThat(e.getMessage(), Matchers.is("Mark not called or has been invalidated")); } - public void testResetWithoutMarkFails() throws Exception { - Tuple mockSourceTuple = getMockInfiniteInputStream(); - BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), 1 + Randomness.get().nextInt(1024)); - // maybe read some bytes - test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); - IOException e = expectThrows(IOException.class, () -> { - test.reset(); - }); - assertThat(e.getMessage(), Matchers.is("Mark not called or has been invalidated")); - } - - public void testMarkAndBufferReadLimitsCheck() throws Exception { - Tuple mockSourceTuple = getMockInfiniteInputStream(); - int bufferSize = 1 + Randomness.get().nextInt(1024); - BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); - // maybe read some bytes - test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); - int wrongReadLimit = bufferSize + 1 + Randomness.get().nextInt(8); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - test.mark(wrongReadLimit); - }); - assertThat(e.getMessage(), Matchers.is("Readlimit value [" + wrongReadLimit + "] exceeds the maximum value of [" - + bufferSize + "]")); - e = expectThrows(IllegalArgumentException.class, () -> { - test.mark(-1 - Randomness.get().nextInt(2)); - }); - assertThat(e.getMessage(), Matchers.containsString("cannot be negative")); - e = expectThrows(IllegalArgumentException.class, () -> { - new BufferOnMarkInputStream(mock(InputStream.class), 0 - Randomness.get().nextInt(2)); - }); - assertThat(e.getMessage(), Matchers.is("The buffersize constructor argument must be a strictly positive value")); - } - public void testConsumeBufferUponReset() throws Exception { int bufferSize = 3 + Randomness.get().nextInt(128); Tuple mockSourceTuple = getMockInfiniteInputStream(); From 24d6d2751b5cc00f36e381d3502a40c36ecce679 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 5 Dec 2019 15:37:11 +0200 Subject: [PATCH 014/142] merge fallout --- .../encrypted/EncryptedRepositoryPlugin.java | 6 ------ .../repositories/encrypted/PrefixInputStream.java | 10 ++++++++++ 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index c61c444203e82..e642bc748a3a5 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -24,12 +24,6 @@ public final class EncryptedRepositoryPlugin extends Plugin implements Repositor public EncryptedRepositoryPlugin(final Settings settings) { } - @Override - public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry, - ThreadPool threadPool) { - return Map.of(); - } - @Override public List> getSettings() { return List.of(); diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java index de46f3386dc12..86bb25080ca54 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java @@ -11,6 +11,16 @@ import java.io.InputStream; import java.util.Objects; +/** + * A {@code PrefixInputStream} wraps another input stream and exposes + * only the first bytes of it. Reading from the wrapping + * {@code PrefixInputStream} consumes the underlying stream. The stream + * is exhausted when {@code length} bytes have been read or the underlying + * stream is exhausted. + *

+ * Closing this stream may or may not close the underlying stream, see + * {@code closeSource}. + */ public final class PrefixInputStream extends FilterInputStream { private final int length; From 3cd79bd66ce8dc473b30c64494d7757b536f9f6f Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 5 Dec 2019 19:18:42 +0200 Subject: [PATCH 015/142] PrefixInputStream tests --- .../encrypted/BufferOnMarkInputStream.java | 5 +- .../encrypted/EncryptedRepositoryPlugin.java | 5 - .../encrypted/PrefixInputStream.java | 22 +- .../BufferOnMarkInputStreamTests.java | 15 ++ .../encrypted/PrefixInputStreamTests.java | 231 ++++++++++++++++++ 5 files changed, 262 insertions(+), 16 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PrefixInputStreamTests.java diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index 388a9af004910..d7f508ceaed10 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -67,7 +67,8 @@ public final class BufferOnMarkInputStream extends FilterInputStream { protected boolean closed; /** - * Creates a {@code BufferOnMarkInputStream} that buffers a maximum of {@code bufferSize} elements. + * Creates a {@code BufferOnMarkInputStream} that buffers a maximum of {@code bufferSize} elements + * from the wrapped input stream {@code in} in order to support {@code mark} and {@code reset}. * The {@code bufferSize} is the maximum value for the mark readlimit. * * @param in the underlying input buffer @@ -88,7 +89,7 @@ public BufferOnMarkInputStream(InputStream in, int bufferSize) { /** * Reads up to {@code len} bytes of data into an array of bytes from this - * input stream. If {@code len} is zero, then no bytes are read and {@cpde 0} + * input stream. If {@code len} is zero, then no bytes are read and {@code 0} * is returned; otherwise, there is an attempt to read at least one byte. * The read will return buffered bytes, which have been returned in a previous * call as well, if the contents of the stream must be replayed following a diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index e642bc748a3a5..93e511bb14383 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -8,16 +8,11 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; -import org.elasticsearch.repositories.Repository; -import org.elasticsearch.threadpool.ThreadPool; import java.util.List; -import java.util.Map; public final class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin, ReloadablePlugin { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java index 86bb25080ca54..65a28ac8d1dde 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java @@ -18,8 +18,9 @@ * is exhausted when {@code length} bytes have been read or the underlying * stream is exhausted. *

- * Closing this stream may or may not close the underlying stream, see - * {@code closeSource}. + * Iff {@code closeSource} constructor argument is {@code true}, closing this + * stream will close the underlying input stream. Any subsequent {@code read}, + * {@code skip} and {@code available} calls will throw {@code IOException}s. */ public final class PrefixInputStream extends FilterInputStream { @@ -30,6 +31,9 @@ public final class PrefixInputStream extends FilterInputStream { public PrefixInputStream(InputStream in, int length, boolean closeSource) { super(Objects.requireNonNull(in)); + if (length < 0) { + throw new IllegalArgumentException("The length constructor argument must be a positive value"); + } this.length = length; this.position = 0; this.closeSource = closeSource; @@ -85,7 +89,7 @@ public long skip(long n) throws IOException { @Override public int available() throws IOException { ensureOpen(); - return Math.min(length - position, super.available()); + return Math.min(length - position, in.available()); } @Override @@ -102,12 +106,6 @@ public boolean markSupported() { return false; } - private void ensureOpen() throws IOException { - if (closed) { - throw new IOException("Stream has been closed"); - } - } - @Override public void close() throws IOException { if (closed) { @@ -119,4 +117,10 @@ public void close() throws IOException { } } + private void ensureOpen() throws IOException { + if (closed) { + throw new IOException("Stream has been closed"); + } + } + } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index 396879417ab37..02e4f1a89730e 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -802,6 +802,21 @@ private Tuple getMockInfiniteInputStream() throws IO return bytesCount; } }); + when(mockSource.read()).thenAnswer(invocationOnMock -> { + bytesRead.incrementAndGet(); + return Randomness.get().nextInt(256); + }); + when(mockSource.skip(org.mockito.Matchers.anyLong())).thenAnswer(invocationOnMock -> { + final long n = (long) invocationOnMock.getArguments()[0]; + if (n <= 0) { + return 0; + } + int bytesSkipped = 1 + Randomness.get().nextInt(Math.toIntExact(n)); + bytesRead.addAndGet(bytesSkipped); + return bytesSkipped; + }); + when(mockSource.available()).thenReturn(1 + Randomness.get().nextInt(32)); + when(mockSource.markSupported()).thenReturn(false); return new Tuple<>(bytesRead, mockSource); } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PrefixInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PrefixInputStreamTests.java new file mode 100644 index 0000000000000..5673a6376dbb6 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PrefixInputStreamTests.java @@ -0,0 +1,231 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class PrefixInputStreamTests extends ESTestCase { + + public void testZeroLength() throws Exception { + Tuple mockTuple = getMockBoundedInputStream(0); + PrefixInputStream test = new PrefixInputStream(mockTuple.v2(), 1 + Randomness.get().nextInt(32), randomBoolean()); + assertThat(test.available(), Matchers.is(0)); + assertThat(test.read(), Matchers.is(-1)); + assertThat(test.skip(1 + Randomness.get().nextInt(32)), Matchers.is(0L)); + } + + public void testClose() throws Exception { + int boundedLength = 1 + Randomness.get().nextInt(256); + Tuple mockTuple = getMockBoundedInputStream(boundedLength); + int prefixLength = Randomness.get().nextInt(boundedLength); + PrefixInputStream test = new PrefixInputStream(mockTuple.v2(), prefixLength, randomBoolean()); + test.close(); + int byteCountBefore = mockTuple.v1().get(); + IOException e = expectThrows(IOException.class, () -> { + test.read(); + }); + assertThat(e.getMessage(), Matchers.is("Stream has been closed")); + e = expectThrows(IOException.class, () -> { + byte[] b = new byte[1 + Randomness.get().nextInt(32)]; + test.read(b, 0, 1 + Randomness.get().nextInt(b.length)); + }); + assertThat(e.getMessage(), Matchers.is("Stream has been closed")); + e = expectThrows(IOException.class, () -> { + test.skip(1 + Randomness.get().nextInt(32)); + }); + assertThat(e.getMessage(), Matchers.is("Stream has been closed")); + e = expectThrows(IOException.class, () -> { + test.available(); + }); + assertThat(e.getMessage(), Matchers.is("Stream has been closed")); + int byteCountAfter = mockTuple.v1().get(); + assertThat(byteCountBefore - byteCountAfter, Matchers.is(0)); + // test closeSource parameter + AtomicBoolean isClosed = new AtomicBoolean(false); + InputStream mockIn = mock(InputStream.class); + doAnswer(new Answer() { + public Void answer(InvocationOnMock invocation) { + isClosed.set(true); + return null; + } + }).when(mockIn).close(); + new PrefixInputStream(mockIn, 1 + Randomness.get().nextInt(32), true).close(); + assertThat(isClosed.get(), Matchers.is(true)); + isClosed.set(false); + new PrefixInputStream(mockIn, 1 + Randomness.get().nextInt(32), false).close(); + assertThat(isClosed.get(), Matchers.is(false)); + } + + public void testAvailable() throws Exception { + AtomicInteger available = new AtomicInteger(0); + int boundedLength = 1 + Randomness.get().nextInt(256); + InputStream mockIn = mock(InputStream.class); + when(mockIn.available()).thenAnswer(invocationOnMock -> { + return available.get(); + }); + PrefixInputStream test = new PrefixInputStream(mockIn, boundedLength, randomBoolean()); + assertThat(test.available(), Matchers.is(0)); + available.set(Randomness.get().nextInt(boundedLength)); + assertThat(test.available(), Matchers.is(available.get())); + available.set(boundedLength + 1 + Randomness.get().nextInt(boundedLength)); + assertThat(test.available(), Matchers.is(boundedLength)); + } + + public void testReadPrefixLength() throws Exception { + int boundedLength = 1 + Randomness.get().nextInt(256); + Tuple mockTuple = getMockBoundedInputStream(boundedLength); + int prefixLength = Randomness.get().nextInt(boundedLength); + PrefixInputStream test = new PrefixInputStream(mockTuple.v2(), prefixLength, randomBoolean()); + int byteCountBefore = mockTuple.v1().get(); + byte[] b = test.readAllBytes(); + int byteCountAfter = mockTuple.v1().get(); + assertThat(b.length, Matchers.is(prefixLength)); + assertThat(byteCountBefore - byteCountAfter, Matchers.is(prefixLength)); + assertThat(test.read(), Matchers.is(-1)); + assertThat(test.available(), Matchers.is(0)); + assertThat(mockTuple.v2().read(), Matchers.not(-1)); + } + + public void testSkipPrefixLength() throws Exception { + int boundedLength = 1 + Randomness.get().nextInt(256); + Tuple mockTuple = getMockBoundedInputStream(boundedLength); + int prefixLength = Randomness.get().nextInt(boundedLength); + PrefixInputStream test = new PrefixInputStream(mockTuple.v2(), prefixLength, randomBoolean()); + int byteCountBefore = mockTuple.v1().get(); + skipNBytes(test, prefixLength); + int byteCountAfter = mockTuple.v1().get(); + assertThat(byteCountBefore - byteCountAfter, Matchers.is(prefixLength)); + assertThat(test.read(), Matchers.is(-1)); + assertThat(test.available(), Matchers.is(0)); + assertThat(mockTuple.v2().read(), Matchers.not(-1)); + } + + public void testReadShorterWrapped() throws Exception { + int boundedLength = 1 + Randomness.get().nextInt(256); + Tuple mockTuple = getMockBoundedInputStream(boundedLength); + int prefixLength = boundedLength; + if (randomBoolean()) { + prefixLength += 1 + Randomness.get().nextInt(boundedLength); + } + PrefixInputStream test = new PrefixInputStream(mockTuple.v2(), prefixLength, randomBoolean()); + int byteCountBefore = mockTuple.v1().get(); + byte[] b = test.readAllBytes(); + int byteCountAfter = mockTuple.v1().get(); + assertThat(b.length, Matchers.is(boundedLength)); + assertThat(byteCountBefore - byteCountAfter, Matchers.is(boundedLength)); + assertThat(test.read(), Matchers.is(-1)); + assertThat(test.available(), Matchers.is(0)); + assertThat(mockTuple.v2().read(), Matchers.is(-1)); + assertThat(mockTuple.v2().available(), Matchers.is(0)); + } + + public void testSkipShorterWrapped() throws Exception { + int boundedLength = 1 + Randomness.get().nextInt(256); + Tuple mockTuple = getMockBoundedInputStream(boundedLength); + final int prefixLength; + if (randomBoolean()) { + prefixLength = boundedLength + 1 + Randomness.get().nextInt(boundedLength); + } else { + prefixLength = boundedLength; + } + PrefixInputStream test = new PrefixInputStream(mockTuple.v2(), prefixLength, randomBoolean()); + int byteCountBefore = mockTuple.v1().get(); + if (prefixLength == boundedLength) { + skipNBytes(test, prefixLength); + } else { + expectThrows(EOFException.class, () -> { + skipNBytes(test, prefixLength); + }); + } + int byteCountAfter = mockTuple.v1().get(); + assertThat(byteCountBefore - byteCountAfter, Matchers.is(boundedLength)); + assertThat(test.read(), Matchers.is(-1)); + assertThat(test.available(), Matchers.is(0)); + assertThat(mockTuple.v2().read(), Matchers.is(-1)); + assertThat(mockTuple.v2().available(), Matchers.is(0)); + } + + private Tuple getMockBoundedInputStream(int bound) throws IOException { + InputStream mockSource = mock(InputStream.class); + AtomicInteger bytesRemaining = new AtomicInteger(bound); + when(mockSource.read(org.mockito.Matchers.any(), org.mockito.Matchers.anyInt(), org.mockito.Matchers.anyInt())). + thenAnswer(invocationOnMock -> { + final byte[] b = (byte[]) invocationOnMock.getArguments()[0]; + final int off = (int) invocationOnMock.getArguments()[1]; + final int len = (int) invocationOnMock.getArguments()[2]; + if (len == 0) { + return 0; + } else { + if (bytesRemaining.get() <= 0) { + return -1; + } + int bytesCount = 1 + Randomness.get().nextInt(Math.min(len, bytesRemaining.get())); + bytesRemaining.addAndGet(-bytesCount); + return bytesCount; + } + }); + when(mockSource.read()).thenAnswer(invocationOnMock -> { + if (bytesRemaining.get() <= 0) { + return -1; + } + bytesRemaining.decrementAndGet(); + return Randomness.get().nextInt(256); + }); + when(mockSource.skip(org.mockito.Matchers.anyLong())).thenAnswer(invocationOnMock -> { + final long n = (long) invocationOnMock.getArguments()[0]; + if (n <= 0 || bytesRemaining.get() <= 0) { + return 0; + } + int bytesSkipped = 1 + Randomness.get().nextInt(Math.min(bytesRemaining.get(), Math.toIntExact(n))); + bytesRemaining.addAndGet(-bytesSkipped); + return bytesSkipped; + }); + when(mockSource.available()).thenAnswer(invocationOnMock -> { + if (bytesRemaining.get() <= 0) { + return 0; + } + return 1 + Randomness.get().nextInt(bytesRemaining.get()); + }); + when(mockSource.markSupported()).thenReturn(false); + return new Tuple<>(bytesRemaining, mockSource); + } + + private static void skipNBytes(InputStream in, long n) throws IOException { + if (n > 0) { + long ns = in.skip(n); + if (ns >= 0 && ns < n) { // skipped too few bytes + // adjust number to skip + n -= ns; + // read until requested number skipped or EOS reached + while (n > 0 && in.read() != -1) { + n--; + } + // if not enough skipped, then EOFE + if (n != 0) { + throw new EOFException(); + } + } else if (ns != n) { // skipped negative or too many bytes + throw new IOException("Unable to skip exactly"); + } + } + } +} From c610fe83b3a50f769f6e9496aeacbbb3fcd07eb4 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 5 Dec 2019 19:44:55 +0200 Subject: [PATCH 016/142] WIP --- .../encrypted/CountingInputStream.java | 14 +++++++++++++- .../repositories/encrypted/PrefixInputStream.java | 4 ++-- .../encrypted/CountingInputStreamTests.java | 12 ++++++++++++ 3 files changed, 27 insertions(+), 3 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java index 7fdff89830b28..b30dd5cab9e88 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java @@ -10,6 +10,18 @@ import java.io.InputStream; import java.util.Objects; +/** + * A {@code CountingInputStream} wraps another input stream and counts the number of bytes + * that have been read or skipped. This input stream must be used in place of the wrapped one. + * Bytes replayed following a {@code reset} call are not counted multiple times, i.e. only + * the bytes that are produced in a single pass, without resets, by the wrapped stream are counted. + * This input stream does no buffering on its own and only supports {@code mark} and + * {@code reset} if the wrapped stream supports it. + *

+ * If the {@code closeSource} constructor argument is {@code true}, closing this + * stream will also close the wrapped input stream. Apart from closing the wrapped + * stream in this case, the {@code close} method does nothing else. + */ public final class CountingInputStream extends FilterInputStream { private long count; @@ -21,7 +33,7 @@ public final class CountingInputStream extends FilterInputStream { * Wraps another input stream, counting the number of bytes read. * * @param in the input stream to be wrapped - * @param closeSource if closing this stream will propagate to the wrapped stream + * @param closeSource {@code true} if closing this stream will also close the wrapped stream */ public CountingInputStream(InputStream in, boolean closeSource) { super(Objects.requireNonNull(in)); diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java index 65a28ac8d1dde..7f20bdc3d4dac 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java @@ -18,8 +18,8 @@ * is exhausted when {@code length} bytes have been read or the underlying * stream is exhausted. *

- * Iff {@code closeSource} constructor argument is {@code true}, closing this - * stream will close the underlying input stream. Any subsequent {@code read}, + * If the {@code closeSource} constructor argument is {@code true}, closing this + * stream will also close the underlying input stream. Any subsequent {@code read}, * {@code skip} and {@code available} calls will throw {@code IOException}s. */ public final class PrefixInputStream extends FilterInputStream { diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java new file mode 100644 index 0000000000000..d1cfbc663e8d1 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.test.ESTestCase; + +public class CountingInputStreamTests extends ESTestCase { +} From e4f8564057007c681d931a51f981980376fc5463 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 5 Dec 2019 21:35:13 +0200 Subject: [PATCH 017/142] CountingInputStreamTests --- .../encrypted/CountingInputStreamTests.java | 154 ++++++++++++++++++ 1 file changed, 154 insertions(+) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java index d1cfbc663e8d1..40871edb7187e 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java @@ -6,7 +6,161 @@ package org.elasticsearch.repositories.encrypted; +import org.elasticsearch.common.Randomness; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import org.junit.BeforeClass; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class CountingInputStreamTests extends ESTestCase { + + private static byte[] testArray; + + @BeforeClass + static void createTestArray() throws Exception { + testArray = new byte[128]; + for (int i = 0; i < testArray.length; i++) { + testArray[i] = (byte) i; + } + } + + public void testWrappedMarkAndClose() throws Exception { + AtomicBoolean isClosed = new AtomicBoolean(false); + InputStream mockIn = mock(InputStream.class); + doAnswer(new Answer() { + public Void answer(InvocationOnMock invocation) { + isClosed.set(true); + return null; + } + }).when(mockIn).close(); + new CountingInputStream(mockIn, true).close(); + assertThat(isClosed.get(), Matchers.is(true)); + isClosed.set(false); + new CountingInputStream(mockIn, false).close(); + assertThat(isClosed.get(), Matchers.is(false)); + when(mockIn.markSupported()).thenAnswer(invocationOnMock -> { + return false; + }); + assertThat(new CountingInputStream(mockIn, randomBoolean()).markSupported(), Matchers.is(false)); + when(mockIn.markSupported()).thenAnswer(invocationOnMock -> { + return true; + }); + assertThat(new CountingInputStream(mockIn, randomBoolean()).markSupported(), Matchers.is(true)); + } + + public void testSimpleCountForRead() throws Exception { + CountingInputStream test = new CountingInputStream(new ByteArrayInputStream(testArray), randomBoolean()); + assertThat(test.getCount(), Matchers.is(0L)); + int readLen = Randomness.get().nextInt(testArray.length); + test.readNBytes(readLen); + assertThat(test.getCount(), Matchers.is((long)readLen)); + readLen = testArray.length - readLen; + test.readNBytes(readLen); + assertThat(test.getCount(), Matchers.is((long)testArray.length)); + test.close(); + assertThat(test.getCount(), Matchers.is((long)testArray.length)); + } + + public void testSimpleCountForSkip() throws Exception { + CountingInputStream test = new CountingInputStream(new ByteArrayInputStream(testArray), randomBoolean()); + assertThat(test.getCount(), Matchers.is(0L)); + int skipLen = Randomness.get().nextInt(testArray.length); + test.skip(skipLen); + assertThat(test.getCount(), Matchers.is((long)skipLen)); + skipLen = testArray.length - skipLen; + test.readNBytes(skipLen); + assertThat(test.getCount(), Matchers.is((long)testArray.length)); + test.close(); + assertThat(test.getCount(), Matchers.is((long)testArray.length)); + } + + public void testCountingForMarkAndReset() throws Exception { + CountingInputStream test = new CountingInputStream(new ByteArrayInputStream(testArray), randomBoolean()); + assertThat(test.getCount(), Matchers.is(0L)); + assertThat(test.markSupported(), Matchers.is(true)); + int offset1 = Randomness.get().nextInt(testArray.length); + if (randomBoolean()) { + test.skip(offset1); + } else { + test.read(new byte[offset1]); + } + assertThat(test.getCount(), Matchers.is((long)offset1)); + test.mark(testArray.length); + int offset2 = Randomness.get().nextInt(testArray.length - offset1); + if (randomBoolean()) { + test.skip(offset2); + } else { + test.read(new byte[offset2]); + } + assertThat(test.getCount(), Matchers.is((long)offset1 + offset2)); + test.reset(); + assertThat(test.getCount(), Matchers.is((long)offset1)); + int offset3 = Randomness.get().nextInt(offset2); + if (randomBoolean()) { + test.skip(offset3); + } else { + test.read(new byte[offset3]); + } + assertThat(test.getCount(), Matchers.is((long)offset1 + offset3)); + test.reset(); + assertThat(test.getCount(), Matchers.is((long)offset1)); + test.readAllBytes(); + assertThat(test.getCount(), Matchers.is((long)testArray.length)); + test.close(); + assertThat(test.getCount(), Matchers.is((long)testArray.length)); + } + + public void testCountingForMarkAfterReset() throws Exception { + CountingInputStream test = new CountingInputStream(new ByteArrayInputStream(testArray), randomBoolean()); + assertThat(test.getCount(), Matchers.is(0L)); + assertThat(test.markSupported(), Matchers.is(true)); + int offset1 = Randomness.get().nextInt(testArray.length); + if (randomBoolean()) { + test.skip(offset1); + } else { + test.read(new byte[offset1]); + } + assertThat(test.getCount(), Matchers.is((long)offset1)); + test.mark(testArray.length); + int offset2 = Randomness.get().nextInt(testArray.length - offset1); + if (randomBoolean()) { + test.skip(offset2); + } else { + test.read(new byte[offset2]); + } + assertThat(test.getCount(), Matchers.is((long)offset1 + offset2)); + test.reset(); + assertThat(test.getCount(), Matchers.is((long)offset1)); + int offset3 = Randomness.get().nextInt(offset2); + if (randomBoolean()) { + test.skip(offset3); + } else { + test.read(new byte[offset3]); + } + test.mark(testArray.length); + assertThat(test.getCount(), Matchers.is((long)offset1 + offset3)); + int offset4 = Randomness.get().nextInt(testArray.length - offset1 - offset3); + if (randomBoolean()) { + test.skip(offset4); + } else { + test.read(new byte[offset4]); + } + assertThat(test.getCount(), Matchers.is((long)offset1 + offset3 + offset4)); + test.reset(); + assertThat(test.getCount(), Matchers.is((long)offset1 + offset3)); + test.readAllBytes(); + assertThat(test.getCount(), Matchers.is((long)testArray.length)); + test.close(); + assertThat(test.getCount(), Matchers.is((long)testArray.length)); + } + } From c816c45be3e105e9ceeef4daf0f50269b573f94a Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 5 Dec 2019 23:05:37 +0200 Subject: [PATCH 018/142] Renaming and more javadocs --- .../encrypted/ChainPacketsInputStream.java | 150 -------------- .../encrypted/ChainingInputStream.java | 190 ++++++++++++++++++ .../DecryptionPacketsInputStream.java | 10 +- .../EncryptionPacketsInputStream.java | 16 +- 4 files changed, 203 insertions(+), 163 deletions(-) delete mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java deleted file mode 100644 index 2bbd26f6aa712..0000000000000 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainPacketsInputStream.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.repositories.encrypted; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Objects; -import org.elasticsearch.common.Nullable; - -public abstract class ChainPacketsInputStream extends InputStream { - - private InputStream packetIn; - private InputStream markIn; - private boolean closed; - - public ChainPacketsInputStream() { - this.packetIn = null; - this.markIn = null; - this.closed = false; - } - - abstract boolean hasNextPacket(@Nullable InputStream currentPacketIn); - - abstract InputStream nextPacket(@Nullable InputStream currentPacketIn) throws IOException; - - @Override - public int read() throws IOException { - ensureOpen(); - do { - int byteVal = packetIn == null ? -1 : packetIn.read(); - if (byteVal != -1) { - return byteVal; - } - if (false == hasNextPacket(packetIn)) { - return -1; - } - nextIn(); - } while (true); - } - - @Override - public int read(byte[] b, int off, int len) throws IOException { - ensureOpen(); - Objects.checkFromIndexSize(off, len, b.length); - if (len == 0) { - return 0; - } - do { - int bytesRead = packetIn == null ? -1 : packetIn.read(b, off, len); - if (bytesRead != -1) { - return bytesRead; - } - if (false == hasNextPacket(packetIn)) { - return -1; - } - nextIn(); - } while (true); - } - - @Override - public long skip(long n) throws IOException { - ensureOpen(); - if (n <= 0) { - return 0; - } - long bytesRemaining = n; - while (bytesRemaining > 0) { - long bytesSkipped = packetIn == null ? 0 : packetIn.skip(bytesRemaining); - if (bytesSkipped == 0) { - int byteRead = read(); - if (byteRead == -1) { - break; - } else { - bytesRemaining--; - } - } else { - bytesRemaining -= bytesSkipped; - } - } - return n - bytesRemaining; - } - - @Override - public int available() throws IOException { - ensureOpen(); - return packetIn == null ? 0 : packetIn.available(); - } - - @Override - public boolean markSupported() { - return true; - } - - @Override - public void mark(int readlimit) { - if (markSupported()) { - markIn = packetIn; - if (markIn != null) { - markIn.mark(readlimit); - } - } - } - - @Override - public void reset() throws IOException { - if (false == markSupported()) { - throw new IOException("Mark/reset not supported"); - } - packetIn = markIn; - if (packetIn != null) { - packetIn.reset(); - } - } - - @Override - public void close() throws IOException { - if (false == closed) { - closed = true; - if (packetIn != null) { - packetIn.close(); - } - while (hasNextPacket(packetIn)) { - nextIn(); - } - } - } - - private void ensureOpen() throws IOException { - if (closed) { - throw new IOException("Stream is closed"); - } - } - - private void nextIn() throws IOException { - if (packetIn != null) { - packetIn.close(); - } - packetIn = nextPacket(packetIn); - if (packetIn == null) { - throw new NullPointerException(); - } - if (markSupported() && false == packetIn.markSupported()) { - throw new IllegalStateException("Packet input stream must support mark"); - } - } - -} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java new file mode 100644 index 0000000000000..9d1878b9b3174 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -0,0 +1,190 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.repositories.encrypted; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Objects; +import org.elasticsearch.common.Nullable; + +/** + * A {@code ChainingInputStream} concatenates several input streams into a single one. + * It starts reading from the first input stream until it's exhausted, whereupon + * it closes it and starts reading from the next one, until the last component stream is + * exhausted. + *

+ * The implementing subclass must provide the component input streams and describe the + * chaining order, by implementing the {@link #nextElement(InputStream)} and + * {@link #hasNextElement(InputStream)} abstract methods. The {@code ChainingInputStream} + * assumes ownership of the component input streams as they are generated. They should + * not be accessed by any other callers and they will be closed when they are exhausted + * and before requesting the next one. The previous element instance is passed to the + * {@code nextElement} method to obtain the next component. + *

+ * This stream does support {@code mark} and {@code reset} but it expects that the component + * streams also support it. Otherwise the implementing subclass must override + * {@code markSupported} to return {@code false}. + *

+ * The {@code close} call will close all the element streams that are generated (the same way + * as if they would be iterated during a read all operation) and any subsequent {@code read}, + * {@code skip}, {@code available} and {@code reset} calls will throw {@code IOException}s. + *

+ * This is NOT thread-safe, multiple threads sharing a single instance must synchronize access. + */ +public abstract class ChainingInputStream extends InputStream { + + /** + * The instance of the currently in use component input stream, + * i.e. the instance servicing the read and skip calls on the {@code ChainingInputStream} + */ + private InputStream currentIn; + /** + * The instance of the component input stream at the time of the last {@code mark} call. + */ + private InputStream markIn; + private boolean closed; + + /** + * This method is passed the current element input stream and must return {@code true} + * if there exists a successive one, or {@code false} otherwise. It passes {@code null} + * at the start, when no element input stream has yet been obtained. + */ + abstract boolean hasNextElement(@Nullable InputStream currentElementIn); + + abstract InputStream nextElement(@Nullable InputStream currentElementIn) throws IOException; + + @Override + public int read() throws IOException { + ensureOpen(); + do { + int byteVal = currentIn == null ? -1 : currentIn.read(); + if (byteVal != -1) { + return byteVal; + } + if (false == hasNextElement(currentIn)) { + return -1; + } + nextIn(); + } while (true); + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + ensureOpen(); + Objects.checkFromIndexSize(off, len, b.length); + if (len == 0) { + return 0; + } + do { + int bytesRead = currentIn == null ? -1 : currentIn.read(b, off, len); + if (bytesRead != -1) { + return bytesRead; + } + if (false == hasNextElement(currentIn)) { + return -1; + } + nextIn(); + } while (true); + } + + @Override + public long skip(long n) throws IOException { + ensureOpen(); + if (n <= 0) { + return 0; + } + long bytesRemaining = n; + while (bytesRemaining > 0) { + long bytesSkipped = currentIn == null ? 0 : currentIn.skip(bytesRemaining); + if (bytesSkipped == 0) { + int byteRead = read(); + if (byteRead == -1) { + break; + } else { + bytesRemaining--; + } + } else { + bytesRemaining -= bytesSkipped; + } + } + return n - bytesRemaining; + } + + @Override + public int available() throws IOException { + ensureOpen(); + return currentIn == null ? 0 : currentIn.available(); + } + + @Override + public boolean markSupported() { + return true; + } + + @Override + public void mark(int readlimit) { + if (markSupported()) { + if (markIn != null && currentIn != markIn) { + try { + markIn.close(); + } catch (IOException e) { + } + } + markIn = currentIn; + if (markIn != null) { + markIn.mark(readlimit); + } + } + } + + @Override + public void reset() throws IOException { + if (false == markSupported()) { + throw new IOException("Mark/reset not supported"); + } + ensureOpen(); + currentIn = markIn; + if (currentIn != null) { + currentIn.reset(); + } + } + + @Override + public void close() throws IOException { + if (false == closed) { + closed = true; + if (currentIn != null) { + currentIn.close(); + } + if (markIn != null) { + markIn.close(); + } + while (hasNextElement(currentIn)) { + nextIn(); + } + } + } + + private void ensureOpen() throws IOException { + if (closed) { + throw new IOException("Stream is closed"); + } + } + + private void nextIn() throws IOException { + if (currentIn != null && currentIn != markIn) { + currentIn.close(); + } + currentIn = nextElement(currentIn); + if (currentIn == null) { + throw new NullPointerException(); + } + if (markSupported() && false == currentIn.markSupported()) { + throw new IllegalStateException("Component input stream must support mark"); + } + } + +} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index 9a44289109185..a41fdac47524a 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -23,7 +23,7 @@ import java.util.NoSuchElementException; import java.util.Objects; -public final class DecryptionPacketsInputStream extends ChainPacketsInputStream { +public final class DecryptionPacketsInputStream extends ChainingInputStream { private final InputStream source; private final SecretKey secretKey; @@ -58,16 +58,16 @@ public DecryptionPacketsInputStream(InputStream source, SecretKey secretKey, int } @Override - boolean hasNextPacket(InputStream currentPacketIn) { + boolean hasNextElement(InputStream currentElementIn) { return hasNext; } @Override - InputStream nextPacket(InputStream currentPacketIn) throws IOException { - if (currentPacketIn != null && currentPacketIn.read() != -1) { + InputStream nextElement(InputStream currentElementIn) throws IOException { + if (currentElementIn != null && currentElementIn.read() != -1) { throw new IllegalStateException("Stream for previous packet has not been fully processed"); } - if (false == hasNextPacket(currentPacketIn)) { + if (false == hasNextElement(currentElementIn)) { throw new NoSuchElementException(); } PrefixInputStream packetInputStream = new PrefixInputStream(source, diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index fde2887a0a257..2ef5565769419 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -23,7 +23,7 @@ import java.util.NoSuchElementException; import java.util.Objects; -public final class EncryptionPacketsInputStream extends ChainPacketsInputStream { +public final class EncryptionPacketsInputStream extends ChainingInputStream { private static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 30; @@ -57,22 +57,22 @@ public EncryptionPacketsInputStream(InputStream source, SecretKey secretKey, int } @Override - boolean hasNextPacket(InputStream currentPacketIn) { - if (currentPacketIn != null && currentPacketIn instanceof CountingInputStream == false) { + boolean hasNextElement(InputStream currentElementIn) { + if (currentElementIn != null && currentElementIn instanceof CountingInputStream == false) { throw new IllegalStateException(); } - if (currentPacketIn != null && ((CountingInputStream) currentPacketIn).getCount() > encryptedPacketLength) { + if (currentElementIn != null && ((CountingInputStream) currentElementIn).getCount() > encryptedPacketLength) { throw new IllegalStateException(); } - return currentPacketIn == null || ((CountingInputStream) currentPacketIn).getCount() == encryptedPacketLength; + return currentElementIn == null || ((CountingInputStream) currentElementIn).getCount() == encryptedPacketLength; } @Override - InputStream nextPacket(InputStream currentPacketIn) throws IOException { - if (currentPacketIn != null && currentPacketIn.read() != -1) { + InputStream nextElement(InputStream currentElementIn) throws IOException { + if (currentElementIn != null && currentElementIn.read() != -1) { throw new IllegalStateException("Stream for previous packet has not been fully processed"); } - if (false == hasNextPacket(currentPacketIn)) { + if (false == hasNextElement(currentElementIn)) { throw new NoSuchElementException(); } if (markSourceOnNextPacket != -1) { From 29c484b7cff923f043c6be51b713a505e91f6920 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 6 Dec 2019 01:51:52 +0200 Subject: [PATCH 019/142] Refactor ChainingInputStream --- .../encrypted/ChainingInputStream.java | 66 ++++++++++--------- .../DecryptionPacketsInputStream.java | 11 +--- .../EncryptionPacketsInputStream.java | 26 +++----- 3 files changed, 46 insertions(+), 57 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index 9d1878b9b3174..3300d86b890b8 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -17,12 +17,12 @@ * exhausted. *

* The implementing subclass must provide the component input streams and describe the - * chaining order, by implementing the {@link #nextElement(InputStream)} and - * {@link #hasNextElement(InputStream)} abstract methods. The {@code ChainingInputStream} + * chaining order, by implementing the {@link #nextElement(InputStream)} method. This * assumes ownership of the component input streams as they are generated. They should * not be accessed by any other callers and they will be closed when they are exhausted * and before requesting the next one. The previous element instance is passed to the - * {@code nextElement} method to obtain the next component. + * {@code nextElement} method to obtain the next component. This is similar in scope to + * {@link java.io.SequenceInputStream}. *

* This stream does support {@code mark} and {@code reset} but it expects that the component * streams also support it. Otherwise the implementing subclass must override @@ -36,6 +36,8 @@ */ public abstract class ChainingInputStream extends InputStream { + private static final InputStream EXHAUSTED_MARKER = InputStream.nullInputStream(); + /** * The instance of the currently in use component input stream, * i.e. the instance servicing the read and skip calls on the {@code ChainingInputStream} @@ -48,13 +50,11 @@ public abstract class ChainingInputStream extends InputStream { private boolean closed; /** - * This method is passed the current element input stream and must return {@code true} - * if there exists a successive one, or {@code false} otherwise. It passes {@code null} - * at the start, when no element input stream has yet been obtained. + * This method is passed the current element input stream and must return the successive one, + * or {@code null} if the current element is the last one. It is passed the {@code null} value + * at the very start, when no element input stream has yet been obtained. */ - abstract boolean hasNextElement(@Nullable InputStream currentElementIn); - - abstract InputStream nextElement(@Nullable InputStream currentElementIn) throws IOException; + abstract @Nullable InputStream nextElement(@Nullable InputStream currentElementIn) throws IOException; @Override public int read() throws IOException { @@ -64,11 +64,8 @@ public int read() throws IOException { if (byteVal != -1) { return byteVal; } - if (false == hasNextElement(currentIn)) { - return -1; - } - nextIn(); - } while (true); + } while (nextIn()); + return -1; } @Override @@ -83,11 +80,8 @@ public int read(byte[] b, int off, int len) throws IOException { if (bytesRead != -1) { return bytesRead; } - if (false == hasNextElement(currentIn)) { - return -1; - } - nextIn(); - } while (true); + } while (nextIn()); + return -1; } @Override @@ -127,14 +121,17 @@ public boolean markSupported() { @Override public void mark(int readlimit) { if (markSupported()) { - if (markIn != null && currentIn != markIn) { + // closes any previously stored mark input stream + if (markIn != null && markIn != EXHAUSTED_MARKER && currentIn != markIn) { try { markIn.close(); } catch (IOException e) { + // an IOException on an input stream element is not important } } + // stores the current input stream to be reused in case of a reset markIn = currentIn; - if (markIn != null) { + if (markIn != null && markIn != EXHAUSTED_MARKER) { markIn.mark(readlimit); } } @@ -147,7 +144,7 @@ public void reset() throws IOException { } ensureOpen(); currentIn = markIn; - if (currentIn != null) { + if (currentIn != null && currentIn != EXHAUSTED_MARKER) { currentIn.reset(); } } @@ -156,15 +153,14 @@ public void reset() throws IOException { public void close() throws IOException { if (false == closed) { closed = true; - if (currentIn != null) { + if (currentIn != null && currentIn != EXHAUSTED_MARKER) { currentIn.close(); } - if (markIn != null) { + if (markIn != null && markIn != currentIn && markIn != EXHAUSTED_MARKER) { markIn.close(); } - while (hasNextElement(currentIn)) { - nextIn(); - } + // iterate over the input stream elements and close them + while (nextIn()) {} } } @@ -174,17 +170,23 @@ private void ensureOpen() throws IOException { } } - private void nextIn() throws IOException { + private boolean nextIn() throws IOException { + if (currentIn == EXHAUSTED_MARKER) { + return false; + } + // close the current element, but only if it is not saved because of mark if (currentIn != null && currentIn != markIn) { currentIn.close(); } currentIn = nextElement(currentIn); - if (currentIn == null) { - throw new NullPointerException(); - } - if (markSupported() && false == currentIn.markSupported()) { + if (markSupported() && currentIn != null && false == currentIn.markSupported()) { throw new IllegalStateException("Component input stream must support mark"); } + if (currentIn == null) { + currentIn = EXHAUSTED_MARKER; + return false; + } + return true; } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index a41fdac47524a..d7d1dabd60073 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -20,7 +20,6 @@ import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; -import java.util.NoSuchElementException; import java.util.Objects; public final class DecryptionPacketsInputStream extends ChainingInputStream { @@ -57,23 +56,19 @@ public DecryptionPacketsInputStream(InputStream source, SecretKey secretKey, int this.counter = EncryptedRepository.PACKET_START_COUNTER; } - @Override - boolean hasNextElement(InputStream currentElementIn) { - return hasNext; - } - @Override InputStream nextElement(InputStream currentElementIn) throws IOException { if (currentElementIn != null && currentElementIn.read() != -1) { throw new IllegalStateException("Stream for previous packet has not been fully processed"); } - if (false == hasNextElement(currentElementIn)) { - throw new NoSuchElementException(); + if (false == hasNext) { + return null; } PrefixInputStream packetInputStream = new PrefixInputStream(source, packetLength + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES, false); int currentPacketLength = decrypt(packetInputStream); + // only the last packet is shorter, so this must be the last packet if (currentPacketLength != packetLength) { hasNext = false; } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 2ef5565769419..b631f8cfd51e1 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -20,7 +20,6 @@ import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; -import java.util.NoSuchElementException; import java.util.Objects; public final class EncryptionPacketsInputStream extends ChainingInputStream { @@ -56,29 +55,19 @@ public EncryptionPacketsInputStream(InputStream source, SecretKey secretKey, int this.markSourceOnNextPacket = -1; } - @Override - boolean hasNextElement(InputStream currentElementIn) { - if (currentElementIn != null && currentElementIn instanceof CountingInputStream == false) { - throw new IllegalStateException(); - } - if (currentElementIn != null && ((CountingInputStream) currentElementIn).getCount() > encryptedPacketLength) { - throw new IllegalStateException(); - } - return currentElementIn == null || ((CountingInputStream) currentElementIn).getCount() == encryptedPacketLength; - } - @Override InputStream nextElement(InputStream currentElementIn) throws IOException { - if (currentElementIn != null && currentElementIn.read() != -1) { - throw new IllegalStateException("Stream for previous packet has not been fully processed"); - } - if (false == hasNextElement(currentElementIn)) { - throw new NoSuchElementException(); + // the last packet input stream is the only one shorter than encryptedPacketLength + if (currentElementIn != null && ((CountingInputStream) currentElementIn).getCount() < encryptedPacketLength) { + // there are no more packets + return null; } + // mark source input stream at packet boundary if (markSourceOnNextPacket != -1) { source.mark(markSourceOnNextPacket); markSourceOnNextPacket = -1; } + // create the new packet InputStream encryptionInputStream = new PrefixInputStream(source, packetLength, false); packetIv.putLong(4, counter++); if (counter == EncryptedRepository.PACKET_START_COUNTER) { @@ -103,8 +92,11 @@ public void mark(int readlimit) { if (readlimit <= 0) { throw new IllegalArgumentException("Mark readlimit must be a positive integer"); } + // handles the packet-wise part of the marking operation super.mark(encryptedPacketLength); + // saves the counter used to generate packet IVs markCounter = counter; + // stores the flag used to mark the source input stream at packet boundary markSourceOnNextPacket = readlimit; } } From db5f58e0894c454a39e548dfc389a52762375fc2 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 6 Dec 2019 02:14:44 +0200 Subject: [PATCH 020/142] Scarce EncryptionPacketsInputStream javadocs --- .../encrypted/EncryptionPacketsInputStream.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index b631f8cfd51e1..3b5765f98054d 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -22,6 +22,23 @@ import java.security.NoSuchAlgorithmException; import java.util.Objects; +/** + * An {@code EncryptionPacketsInputStream} wraps another input stream and encrypts its contents. + * The method of encryption is AES/GCM/NoPadding, which is a variant of authenticated encryption. + * The encryption works packet wise, i.e. the bytes are encrypted separately, using an unique + * {@code Cipher}. All the packets are encrypted using the same {@code SecretKey} but using a + * different Initialization Vector. The IV is comprised of an integer the same for all packets, + * a {@code nonce} that must not repeat for the same {@code secretKey}, and a monotonically + * increasing long counter. The packet size is preferably a large multiple of the AES block size, + * but this is not a requirement. + *

+ * This input stream supports the {@code mark} and {@code reset} operations only if the wrapped + * stream also supports them. A {@code mark} call will trigger the memory buffering of the current + * packet and will also trigger a {@code mark} call on the wrapped input stream on the next + * packet boundary. + * + * @see DecryptionPacketsInputStream + */ public final class EncryptionPacketsInputStream extends ChainingInputStream { private static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 30; From d6dc8751043cb0744bbe95fcaa337698deb61f15 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 8 Dec 2019 23:12:42 +0200 Subject: [PATCH 021/142] ChainingInputStream polishing and tests --- .../encrypted/ChainingInputStream.java | 69 ++++++++------ .../DecryptionPacketsInputStream.java | 4 +- .../EncryptionPacketsInputStream.java | 4 +- .../encrypted/ChainingInputStreamTests.java | 91 +++++++++++++++++++ 4 files changed, 137 insertions(+), 31 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index 3300d86b890b8..a9f96a64d04d4 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -11,50 +11,61 @@ import org.elasticsearch.common.Nullable; /** - * A {@code ChainingInputStream} concatenates several input streams into a single one. + * A {@code ChainingInputStream} concatenates multiple component input streams into a + * single input stream. * It starts reading from the first input stream until it's exhausted, whereupon - * it closes it and starts reading from the next one, until the last component stream is - * exhausted. + * it closes it and starts reading from the next one, until the last component input + * stream is exhausted. *

- * The implementing subclass must provide the component input streams and describe the - * chaining order, by implementing the {@link #nextElement(InputStream)} method. This - * assumes ownership of the component input streams as they are generated. They should - * not be accessed by any other callers and they will be closed when they are exhausted - * and before requesting the next one. The previous element instance is passed to the - * {@code nextElement} method to obtain the next component. This is similar in scope to - * {@link java.io.SequenceInputStream}. + * The implementing subclass provides the component input streams by implementing the + * {@link #nextComponent(InputStream)} method. This method receives the instance of the + * current input stream, which has been exhausted, and must return the next input stream. + * The {@code ChainingInputStream} assumes ownership of the newly generated component input + * stream, i.e. they should not be used by other callers and they will be closed when they + * are exhausted or when the {@code ChainingInputStream} is closed. *

* This stream does support {@code mark} and {@code reset} but it expects that the component - * streams also support it. Otherwise the implementing subclass must override - * {@code markSupported} to return {@code false}. + * streams also support it. If the component input streams do not support {@code mark} and + * {@code reset}, the implementing subclass must override {@code markSupported} to return + * {@code false}. *

- * The {@code close} call will close all the element streams that are generated (the same way - * as if they would be iterated during a read all operation) and any subsequent {@code read}, + * The {@code close} call will close the current component input stream and any subsequent {@code read}, * {@code skip}, {@code available} and {@code reset} calls will throw {@code IOException}s. *

+ * The {@code ChainingInputStream} is similar in purpose to the {@link java.io.SequenceInputStream}. + *

* This is NOT thread-safe, multiple threads sharing a single instance must synchronize access. */ public abstract class ChainingInputStream extends InputStream { + /** + * value for the current input stream when there are no subsequent streams left, i.e. when + * {@link #nextComponent(InputStream)} returns {@code null} + */ private static final InputStream EXHAUSTED_MARKER = InputStream.nullInputStream(); /** * The instance of the currently in use component input stream, - * i.e. the instance servicing the read and skip calls on the {@code ChainingInputStream} + * i.e. the instance currently servicing the read and skip calls on the {@code ChainingInputStream} */ private InputStream currentIn; /** * The instance of the component input stream at the time of the last {@code mark} call. */ private InputStream markIn; + /** + * {@code true} if {@link #close()} has been called; any subsequent {@code read}, {@code skip} + * {@code available} and {@code reset} calls will throw {@code IOException}s + */ private boolean closed; /** - * This method is passed the current element input stream and must return the successive one, - * or {@code null} if the current element is the last one. It is passed the {@code null} value - * at the very start, when no element input stream has yet been obtained. + * This method is responsible for generating the component input streams. + * It is passed the current input stream and must return the successive one, + * or {@code null} if the current component is the last one. It is passed the {@code null} value + * at the very start, when no component input stream has yet been obtained. */ - abstract @Nullable InputStream nextElement(@Nullable InputStream currentElementIn) throws IOException; + abstract @Nullable InputStream nextComponent(@Nullable InputStream currentComponentIn) throws IOException; @Override public int read() throws IOException { @@ -90,9 +101,12 @@ public long skip(long n) throws IOException { if (n <= 0) { return 0; } + if (currentIn == null) { + nextIn(); + } long bytesRemaining = n; while (bytesRemaining > 0) { - long bytesSkipped = currentIn == null ? 0 : currentIn.skip(bytesRemaining); + long bytesSkipped = currentIn.skip(bytesRemaining); if (bytesSkipped == 0) { int byteRead = read(); if (byteRead == -1) { @@ -110,7 +124,10 @@ public long skip(long n) throws IOException { @Override public int available() throws IOException { ensureOpen(); - return currentIn == null ? 0 : currentIn.available(); + if (currentIn == null) { + nextIn(); + } + return currentIn.available(); } @Override @@ -120,13 +137,13 @@ public boolean markSupported() { @Override public void mark(int readlimit) { - if (markSupported()) { + if (markSupported() && false == closed) { // closes any previously stored mark input stream if (markIn != null && markIn != EXHAUSTED_MARKER && currentIn != markIn) { try { markIn.close(); } catch (IOException e) { - // an IOException on an input stream element is not important + // an IOException on a component input stream close is not important } } // stores the current input stream to be reused in case of a reset @@ -159,8 +176,6 @@ public void close() throws IOException { if (markIn != null && markIn != currentIn && markIn != EXHAUSTED_MARKER) { markIn.close(); } - // iterate over the input stream elements and close them - while (nextIn()) {} } } @@ -174,11 +189,11 @@ private boolean nextIn() throws IOException { if (currentIn == EXHAUSTED_MARKER) { return false; } - // close the current element, but only if it is not saved because of mark + // close the current component, but only if it is not saved because of mark if (currentIn != null && currentIn != markIn) { currentIn.close(); } - currentIn = nextElement(currentIn); + currentIn = nextComponent(currentIn); if (markSupported() && currentIn != null && false == currentIn.markSupported()) { throw new IllegalStateException("Component input stream must support mark"); } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index d7d1dabd60073..c4cd39f466eea 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -57,8 +57,8 @@ public DecryptionPacketsInputStream(InputStream source, SecretKey secretKey, int } @Override - InputStream nextElement(InputStream currentElementIn) throws IOException { - if (currentElementIn != null && currentElementIn.read() != -1) { + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (currentComponentIn != null && currentComponentIn.read() != -1) { throw new IllegalStateException("Stream for previous packet has not been fully processed"); } if (false == hasNext) { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 3b5765f98054d..aa0a7df880f94 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -73,9 +73,9 @@ public EncryptionPacketsInputStream(InputStream source, SecretKey secretKey, int } @Override - InputStream nextElement(InputStream currentElementIn) throws IOException { + InputStream nextComponent(InputStream currentComponentIn) throws IOException { // the last packet input stream is the only one shorter than encryptedPacketLength - if (currentElementIn != null && ((CountingInputStream) currentElementIn).getCount() < encryptedPacketLength) { + if (currentComponentIn != null && ((CountingInputStream) currentComponentIn).getCount() < encryptedPacketLength) { // there are no more packets return null; } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java new file mode 100644 index 0000000000000..21f88fd5a1444 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.common.Randomness; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import org.junit.Assert; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +public class ChainingInputStreamTests extends ESTestCase { + + // test pass through element wise + // test empty component input stream + // test stream ends when receives null + + public void testEmpty() throws Exception { + ChainingInputStream emptyStream = newEmptyStream(); + assertThat(emptyStream.read(), Matchers.is(-1)); + emptyStream = newEmptyStream(); + byte[] b = new byte[1 + Randomness.get().nextInt(8)]; + int off = Randomness.get().nextInt(b.length); + assertThat(emptyStream.read(b, off, b.length - off), Matchers.is(-1)); + emptyStream = newEmptyStream(); + assertThat(emptyStream.available(), Matchers.is(0)); + emptyStream = newEmptyStream(); + assertThat(emptyStream.skip(1 + Randomness.get().nextInt(32)), Matchers.is(0L)); + } + + public void testHeadComponentIsNull() throws Exception { + AtomicReference headInputStream = new AtomicReference<>(); + AtomicBoolean nextCalled = new AtomicBoolean(false); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + headInputStream.set(currentComponentIn); + nextCalled.set(true); + return null; + } + }; + assertThat(test.read(), Matchers.is(-1)); + assertThat(nextCalled.get(), Matchers.is(true)); + assertThat(headInputStream.get(), Matchers.nullValue()); + } + + public void testReadAll() throws Exception { + byte[] b = new byte[2 + Randomness.get().nextInt(32)]; + Randomness.get().nextBytes(b); + int splitIdx = Randomness.get().nextInt(b.length - 1); + ByteArrayInputStream first = new ByteArrayInputStream(b, 0, splitIdx + 1); + ByteArrayInputStream second = new ByteArrayInputStream(b, splitIdx + 1, b.length - splitIdx - 1); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentElementIn) throws IOException { + if (currentElementIn == null) { + return first; + } else if (currentElementIn == first) { + return second; + } else if (currentElementIn == second) { + return null; + } else { + throw new IllegalArgumentException(); + } + } + }; + byte[] result = test.readAllBytes(); + assertThat(result.length, Matchers.is(b.length)); + for (int i = 0; i < result.length; i++) { + Assert.assertThat(result[i], Matchers.is(b[i])); + } + } + + private ChainingInputStream newEmptyStream() { + return new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentElementIn) throws IOException { + return null; + } + }; + } + +} From 7cb48f6674cb43c55e219fd7130dee94b4938855 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 9 Dec 2019 00:38:49 +0200 Subject: [PATCH 022/142] ChainingInputStreamTests --- .../encrypted/ChainingInputStreamTests.java | 104 +++++++++++++++++- 1 file changed, 102 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java index 21f88fd5a1444..e84b487c297a7 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -21,9 +21,9 @@ public class ChainingInputStreamTests extends ESTestCase { // test pass through element wise // test empty component input stream - // test stream ends when receives null + // test close - public void testEmpty() throws Exception { + public void testEmptyChain() throws Exception { ChainingInputStream emptyStream = newEmptyStream(); assertThat(emptyStream.read(), Matchers.is(-1)); emptyStream = newEmptyStream(); @@ -52,6 +52,82 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { assertThat(headInputStream.get(), Matchers.nullValue()); } + public void testChaining() throws Exception { + int componentCount = 2 + Randomness.get().nextInt(8); + TestInputStream[] sourceComponents = new TestInputStream[componentCount]; + TestInputStream[] chainComponents = new TestInputStream[componentCount + 2]; + for (int i = 0; i < sourceComponents.length; i++) { + byte[] b = new byte[Randomness.get().nextInt(2)]; + Randomness.get().nextBytes(b); + sourceComponents[i] = new TestInputStream(b); + } + ChainingInputStream test = new ChainingInputStream() { + int i = 0; + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + chainComponents[i] = (TestInputStream) currentComponentIn; + if (i < sourceComponents.length) { + return sourceComponents[i++]; + } else { + i++; + return null; + } + } + + @Override + public boolean markSupported() { + return false; + } + }; + test.readAllBytes(); + assertThat(chainComponents[0], Matchers.nullValue()); + assertThat(chainComponents[chainComponents.length - 1], Matchers.nullValue()); + for (int i = 0; i < sourceComponents.length; i++) { + assertThat(chainComponents[i+1], Matchers.is(sourceComponents[i])); + assertThat(chainComponents[i+1].closed.get(), Matchers.is(true)); + } + } + + public void testNullComponentTerminatesChain() throws Exception { + TestInputStream[] sourceComponents = new TestInputStream[3]; + TestInputStream[] chainComponents = new TestInputStream[5]; + byte[] b1 = new byte[1 + Randomness.get().nextInt(2)]; + Randomness.get().nextBytes(b1); + sourceComponents[0] = new TestInputStream(b1); + sourceComponents[1] = null; + byte[] b2 = new byte[1 + Randomness.get().nextInt(2)]; + Randomness.get().nextBytes(b2); + sourceComponents[2] = new TestInputStream(b2); + ChainingInputStream test = new ChainingInputStream() { + int i = 0; + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + chainComponents[i] = (TestInputStream) currentComponentIn; + if (i < sourceComponents.length) { + return sourceComponents[i++]; + } else { + i++; + return null; + } + } + + @Override + public boolean markSupported() { + return false; + } + }; + byte[] b = test.readAllBytes(); + assertThat(b.length, Matchers.is(b1.length)); + for (int i = 0; i < b.length; i++) { + Assert.assertThat(b[i], Matchers.is(b1[i])); + } + assertThat(chainComponents[0], Matchers.nullValue()); + assertThat(chainComponents[1], Matchers.is(sourceComponents[0])); + assertThat(chainComponents[1].closed.get(), Matchers.is(true)); + assertThat(chainComponents[2], Matchers.nullValue()); + assertThat(chainComponents[3], Matchers.nullValue()); + } + public void testReadAll() throws Exception { byte[] b = new byte[2 + Randomness.get().nextInt(32)]; Randomness.get().nextBytes(b); @@ -88,4 +164,28 @@ InputStream nextComponent(InputStream currentElementIn) throws IOException { }; } + static class TestInputStream extends InputStream { + + final byte[] b; + int i = 0; + final AtomicBoolean closed = new AtomicBoolean(false); + + TestInputStream(byte[] b) { + this.b = b; + } + + @Override + public int read() throws IOException { + if (b == null || i >= b.length) { + return -1; + } + return b[i++]; + } + + @Override + public void close() throws IOException { + closed.set(true); + } + + } } From 83e028b6e9732e2f0a2180252a9781c1ad505720 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 9 Dec 2019 17:14:18 +0200 Subject: [PATCH 023/142] ChainingInputStreamTests without mark/reset --- .../encrypted/ChainingInputStream.java | 4 +- .../BufferOnMarkInputStreamTests.java | 2 - .../encrypted/ChainingInputStreamTests.java | 260 +++++++++++++++++- 3 files changed, 249 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index a9f96a64d04d4..951b56f6dc0ba 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -48,7 +48,7 @@ public abstract class ChainingInputStream extends InputStream { * The instance of the currently in use component input stream, * i.e. the instance currently servicing the read and skip calls on the {@code ChainingInputStream} */ - private InputStream currentIn; + protected InputStream currentIn; // protected for tests /** * The instance of the component input stream at the time of the last {@code mark} call. */ @@ -156,10 +156,10 @@ public void mark(int readlimit) { @Override public void reset() throws IOException { + ensureOpen(); if (false == markSupported()) { throw new IOException("Mark/reset not supported"); } - ensureOpen(); currentIn = markIn; if (currentIn != null && currentIn != EXHAUSTED_MARKER) { currentIn.reset(); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index 02e4f1a89730e..bda5e5684fef3 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -791,8 +791,6 @@ private Tuple getMockInfiniteInputStream() throws IO AtomicInteger bytesRead = new AtomicInteger(0); when(mockSource.read(org.mockito.Matchers.any(), org.mockito.Matchers.anyInt(), org.mockito.Matchers.anyInt())). thenAnswer(invocationOnMock -> { - final byte[] b = (byte[]) invocationOnMock.getArguments()[0]; - final int off = (int) invocationOnMock.getArguments()[1]; final int len = (int) invocationOnMock.getArguments()[2]; if (len == 0) { return 0; diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java index e84b487c297a7..0cfa9d112decd 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -7,21 +7,64 @@ package org.elasticsearch.repositories.encrypted; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; import org.junit.Assert; +import org.mockito.Mockito; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; +import java.util.Arrays; +import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + public class ChainingInputStreamTests extends ESTestCase { - // test pass through element wise - // test empty component input stream - // test close + // test mark/reset + + public void testSkipAcrossComponents() throws Exception { + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + return null; + } + }; + byte[] b1 = new byte[1 + Randomness.get().nextInt(16)]; + Randomness.get().nextBytes(b1); + test.currentIn = new ByteArrayInputStream(b1); + long nSkip = test.skip(b1.length + 1 + Randomness.get().nextInt(16)); + assertThat(nSkip, Matchers.is((long)b1.length)); + byte[] b2 = new byte[1 + Randomness.get().nextInt(16)]; + Randomness.get().nextBytes(b2); + test = new ChainingInputStream() { + boolean second = false; + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (false == second) { + second = true; + return new ByteArrayInputStream(b2); + } else { + return null; + } + } + }; + test.currentIn = new ByteArrayInputStream(b1); + long skipArg = b1.length + 1 + Randomness.get().nextInt(b2.length); + nSkip = test.skip(skipArg); + assertThat(nSkip, Matchers.is(skipArg)); + byte[] rest = test.readAllBytes(); + assertThat((long)rest.length, Matchers.is(b1.length + b2.length - nSkip)); + for (int i = rest.length - 1; i >= 0; i--) { + assertThat(rest[i], Matchers.is(b2[i + (int)nSkip - b1.length])); + } + } public void testEmptyChain() throws Exception { ChainingInputStream emptyStream = newEmptyStream(); @@ -36,7 +79,42 @@ public void testEmptyChain() throws Exception { assertThat(emptyStream.skip(1 + Randomness.get().nextInt(32)), Matchers.is(0L)); } - public void testHeadComponentIsNull() throws Exception { + public void testClose() throws Exception { + ChainingInputStream test1 = newEmptyStream(); + test1.close(); + IOException e = expectThrows(IOException.class, () -> { + test1.read(); + }); + assertThat(e.getMessage(), Matchers.is("Stream is closed")); + ChainingInputStream test2 = newEmptyStream(); + test2.close(); + byte[] b = new byte[2 + Randomness.get().nextInt(8)]; + int off = Randomness.get().nextInt(b.length - 1); + e = expectThrows(IOException.class, () -> { + test2.read(b, off, Randomness.get().nextInt(b.length - off)); + }); + assertThat(e.getMessage(), Matchers.is("Stream is closed")); + ChainingInputStream test3 = newEmptyStream(); + test3.close(); + e = expectThrows(IOException.class, () -> { + test3.skip(Randomness.get().nextInt(32)); + }); + ChainingInputStream test4 = newEmptyStream(); + test4.close(); + e = expectThrows(IOException.class, () -> { + test4.available(); + }); + ChainingInputStream test5 = newEmptyStream(); + test5.close(); + e = expectThrows(IOException.class, () -> { + test5.reset(); + }); + ChainingInputStream test6 = newEmptyStream(); + test6.close(); + test6.mark(Randomness.get().nextInt()); + } + + public void testHeadComponentArgumentIsNull() throws Exception { AtomicReference headInputStream = new AtomicReference<>(); AtomicBoolean nextCalled = new AtomicBoolean(false); ChainingInputStream test = new ChainingInputStream() { @@ -55,7 +133,6 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { public void testChaining() throws Exception { int componentCount = 2 + Randomness.get().nextInt(8); TestInputStream[] sourceComponents = new TestInputStream[componentCount]; - TestInputStream[] chainComponents = new TestInputStream[componentCount + 2]; for (int i = 0; i < sourceComponents.length; i++) { byte[] b = new byte[Randomness.get().nextInt(2)]; Randomness.get().nextBytes(b); @@ -65,12 +142,20 @@ public void testChaining() throws Exception { int i = 0; @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { - chainComponents[i] = (TestInputStream) currentComponentIn; - if (i < sourceComponents.length) { + if (i == 0) { + assertThat(currentComponentIn, Matchers.nullValue()); return sourceComponents[i++]; - } else { + } else if (i < sourceComponents.length) { + assertThat(((TestInputStream) currentComponentIn).closed.get(), Matchers.is(true)); + assertThat(currentComponentIn, Matchers.is(sourceComponents[i-1])); + return sourceComponents[i++]; + } else if (i == sourceComponents.length) { + assertThat(((TestInputStream) currentComponentIn).closed.get(), Matchers.is(true)); + assertThat(currentComponentIn, Matchers.is(sourceComponents[i-1])); i++; return null; + } else { + throw new IllegalStateException(); } } @@ -80,11 +165,50 @@ public boolean markSupported() { } }; test.readAllBytes(); - assertThat(chainComponents[0], Matchers.nullValue()); - assertThat(chainComponents[chainComponents.length - 1], Matchers.nullValue()); - for (int i = 0; i < sourceComponents.length; i++) { - assertThat(chainComponents[i+1], Matchers.is(sourceComponents[i])); - assertThat(chainComponents[i+1].closed.get(), Matchers.is(true)); + } + + public void testEmptyInputStreamComponents() throws Exception { + // leading single empty stream + Tuple test = testEmptyComponentsInChain(3, Arrays.asList(0)); + byte[] result = test.v1().readAllBytes(); + assertThat(result.length, Matchers.is(test.v2().length)); + for (int i = 0; i < result.length; i++) { + Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + } + // leading double empty streams + test = testEmptyComponentsInChain(3, Arrays.asList(0, 1)); + result = test.v1().readAllBytes(); + assertThat(result.length, Matchers.is(test.v2().length)); + for (int i = 0; i < result.length; i++) { + Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + } + // trailing single empty stream + test = testEmptyComponentsInChain(3, Arrays.asList(2)); + result = test.v1().readAllBytes(); + assertThat(result.length, Matchers.is(test.v2().length)); + for (int i = 0; i < result.length; i++) { + Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + } + // trailing double empty stream + test = testEmptyComponentsInChain(3, Arrays.asList(1, 2)); + result = test.v1().readAllBytes(); + assertThat(result.length, Matchers.is(test.v2().length)); + for (int i = 0; i < result.length; i++) { + Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + } + // middle single empty stream + test = testEmptyComponentsInChain(3, Arrays.asList(1)); + result = test.v1().readAllBytes(); + assertThat(result.length, Matchers.is(test.v2().length)); + for (int i = 0; i < result.length; i++) { + Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + } + // leading and trailing empty streams + test = testEmptyComponentsInChain(3, Arrays.asList(0, 2)); + result = test.v1().readAllBytes(); + assertThat(result.length, Matchers.is(test.v2().length)); + for (int i = 0; i < result.length; i++) { + Assert.assertThat(result[i], Matchers.is(test.v2()[i])); } } @@ -128,6 +252,77 @@ public boolean markSupported() { assertThat(chainComponents[3], Matchers.nullValue()); } + public void testCallsForwardToCurrentComponent() throws Exception { + InputStream mockCurrentIn = mock(InputStream.class); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + throw new IllegalStateException(); + } + }; + test.currentIn = mockCurrentIn; + // verify "byte-wise read" is proxied to the current component stream + when(mockCurrentIn.read()).thenAnswer(invocationOnMock -> Randomness.get().nextInt(256)); + test.read(); + verify(mockCurrentIn).read(); + // verify "array read" is proxied to the current component stream + when(mockCurrentIn.read(org.mockito.Matchers.any(), org.mockito.Matchers.anyInt(), org.mockito.Matchers.anyInt())). + thenAnswer(invocationOnMock -> { + final int len = (int) invocationOnMock.getArguments()[2]; + if (len == 0) { + return 0; + } else { + // partial read return + int bytesCount = 1 + Randomness.get().nextInt(len); + return bytesCount; + } + }); + byte[] b = new byte[2 + Randomness.get().nextInt(32)]; + int len = 1 + Randomness.get().nextInt(b.length - 1); + int offset = Randomness.get().nextInt(b.length - len); + test.read(b, offset, len); + verify(mockCurrentIn).read(Mockito.eq(b), Mockito.eq(offset), Mockito.eq(len)); + // verify "skip" is proxied to the current component stream + long skipCount = 1 + Randomness.get().nextInt(3); + test.skip(skipCount); + verify(mockCurrentIn).skip(Mockito.eq(skipCount)); + // verify "available" is proxied to the current component stream + test.available(); + verify(mockCurrentIn).available(); + } + + public void testEmptyReadAsksForNext() throws Exception { + InputStream mockCurrentIn = mock(InputStream.class); + when(mockCurrentIn.markSupported()).thenAnswer(invocationOnMock -> true); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + return mockCurrentIn; + } + }; + test.currentIn = InputStream.nullInputStream(); + when(mockCurrentIn.read()).thenAnswer(invocationOnMock -> Randomness.get().nextInt(256)); + test.read(); + verify(mockCurrentIn).read(); + // test "array read" + test.currentIn = InputStream.nullInputStream(); + when(mockCurrentIn.read(org.mockito.Matchers.any(), org.mockito.Matchers.anyInt(), org.mockito.Matchers.anyInt())). + thenAnswer(invocationOnMock -> { + final int len = (int) invocationOnMock.getArguments()[2]; + if (len == 0) { + return 0; + } else { + int bytesCount = 1 + Randomness.get().nextInt(len); + return bytesCount; + } + }); + byte[] b = new byte[2 + Randomness.get().nextInt(32)]; + int len = 1 + Randomness.get().nextInt(b.length - 1); + int offset = Randomness.get().nextInt(b.length - len); + test.read(b, offset, len); + verify(mockCurrentIn).read(Mockito.eq(b), Mockito.eq(offset), Mockito.eq(len)); + } + public void testReadAll() throws Exception { byte[] b = new byte[2 + Randomness.get().nextInt(32)]; Randomness.get().nextBytes(b); @@ -155,6 +350,45 @@ InputStream nextComponent(InputStream currentElementIn) throws IOException { } } + private byte[] concatenateArrays(byte[] b1, byte[] b2) { + byte[] result = new byte[b1.length + b2.length]; + System.arraycopy(b1, 0, result, 0, b1.length); + System.arraycopy(b2, 0, result, b1.length, b2.length); + return result; + } + + private Tuple testEmptyComponentsInChain(int componentCount, + List emptyComponentIndices) throws Exception { + byte[] result = new byte[0]; + InputStream[] sourceComponents = new InputStream[componentCount]; + for (int i = 0; i < componentCount; i++) { + if (emptyComponentIndices.contains(i)) { + sourceComponents[i] = InputStream.nullInputStream(); + } else { + byte[] b = new byte[1 + Randomness.get().nextInt(8)]; + Randomness.get().nextBytes(b); + sourceComponents[i] = new ByteArrayInputStream(b); + result = concatenateArrays(result, b); + } + } + return new Tuple<>(new ChainingInputStream() { + int i = 0; + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (i < sourceComponents.length) { + return sourceComponents[i++]; + } else { + return null; + } + } + + @Override + public boolean markSupported() { + return false; + } + }, result); + } + private ChainingInputStream newEmptyStream() { return new ChainingInputStream() { @Override From 26a624f81318dc6b6c27e5ee884c0f72af717e5f Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 9 Dec 2019 21:46:14 +0200 Subject: [PATCH 024/142] ChainingInputStreamTests mark/reset --- .../encrypted/ChainingInputStream.java | 11 +- .../BufferOnMarkInputStreamTests.java | 8 +- .../encrypted/ChainingInputStreamTests.java | 154 +++++++++++++++++- .../encrypted/CountingInputStreamTests.java | 4 +- 4 files changed, 168 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index 951b56f6dc0ba..57fa05ade4c52 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -42,7 +42,7 @@ public abstract class ChainingInputStream extends InputStream { * value for the current input stream when there are no subsequent streams left, i.e. when * {@link #nextComponent(InputStream)} returns {@code null} */ - private static final InputStream EXHAUSTED_MARKER = InputStream.nullInputStream(); + protected static final InputStream EXHAUSTED_MARKER = InputStream.nullInputStream(); // protected for tests /** * The instance of the currently in use component input stream, @@ -52,7 +52,7 @@ public abstract class ChainingInputStream extends InputStream { /** * The instance of the component input stream at the time of the last {@code mark} call. */ - private InputStream markIn; + protected InputStream markIn; // protected for tests /** * {@code true} if {@link #close()} has been called; any subsequent {@code read}, {@code skip} * {@code available} and {@code reset} calls will throw {@code IOException}s @@ -160,6 +160,13 @@ public void reset() throws IOException { if (false == markSupported()) { throw new IOException("Mark/reset not supported"); } + if (currentIn != null && currentIn != EXHAUSTED_MARKER && currentIn != markIn) { + try { + currentIn.close(); + } catch (IOException e) { + // an IOException on a component input stream close is not important + } + } currentIn = markIn; if (currentIn != null && currentIn != EXHAUSTED_MARKER) { currentIn.reset(); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index bda5e5684fef3..867d2072e9ae4 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -327,7 +327,11 @@ public void testInvalidateMarkAfterReset() throws Exception { assertThat(test.getCurrentBufferCount(), Matchers.is(bufferSize)); assertThat(test.getRemainingBufferCapacity(), Matchers.is(0)); assertThat(test.markCalled, Matchers.is(true)); - assertThat(test.resetCalled, Matchers.is(false)); + if (readLen3 > 0) { + assertThat(test.resetCalled, Matchers.is(false)); + } else { + assertThat(test.resetCalled, Matchers.is(true)); + } // read more bytes bytesReadBefore = bytesRead.get(); int readLen4 = 1 + Randomness.get().nextInt(2 * bufferSize); @@ -344,7 +348,7 @@ public void testInvalidateMarkAfterReset() throws Exception { assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); assertThat(test.markCalled, Matchers.is(false)); - // assert reset does not work any more + // assert reset does not work anymore IOException e = expectThrows(IOException.class, () -> { test.reset(); }); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java index 0cfa9d112decd..fe5623340c610 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -22,13 +22,12 @@ import java.util.concurrent.atomic.AtomicReference; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class ChainingInputStreamTests extends ESTestCase { - // test mark/reset - public void testSkipAcrossComponents() throws Exception { ChainingInputStream test = new ChainingInputStream() { @Override @@ -350,6 +349,126 @@ InputStream nextComponent(InputStream currentElementIn) throws IOException { } } + public void testMark() throws Exception { + InputStream mockIn = mock(InputStream.class); + when(mockIn.markSupported()).thenAnswer(invocationOnMock -> true); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (currentComponentIn == null) { + return mockIn; + } else { + return null; + } + } + }; + int readLimit = Randomness.get().nextInt(64); + // mark at the beginning + test.mark(readLimit); + assertThat(test.markIn, Matchers.nullValue()); + // mark intermediary position + when(mockIn.read()).thenAnswer(invocationOnMock -> Randomness.get().nextInt(256)); + test.read(); + assertThat(test.currentIn, Matchers.is(mockIn)); + test.mark(readLimit); + assertThat(test.markIn, Matchers.is(mockIn)); + verify(mockIn).mark(Mockito.eq(readLimit)); + // mark end position + when(mockIn.read()).thenAnswer(invocationOnMock -> -1); + test.read(); + assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + verify(mockIn, never()).close(); + readLimit = Randomness.get().nextInt(64); + test.mark(readLimit); + verify(mockIn).close(); + assertThat(test.markIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + } + + public void testReset() throws Exception { + InputStream mockMarkIn = mock(InputStream.class); + when(mockMarkIn.markSupported()).thenAnswer(invocationOnMock -> true); + InputStream mockCurrentIn = mock(InputStream.class); + when(mockCurrentIn.markSupported()).thenAnswer(invocationOnMock -> true); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + return null; + } + }; + test.currentIn = mockCurrentIn; + test.markIn = mockMarkIn; + test.reset(); + assertThat(test.currentIn, Matchers.is(mockMarkIn)); + assertThat(test.markIn, Matchers.is(mockMarkIn)); + verify(mockMarkIn).reset(); + when(mockCurrentIn.read()).thenAnswer(invocationOnMock -> -1); + verify(mockMarkIn, never()).close(); + verify(mockCurrentIn).close(); + } + + public void testMarkAfterReset() throws Exception { + int len = 8 + Randomness.get().nextInt(8); + byte[] b = new byte[len]; + Randomness.get().nextBytes(b); + for (int p = 0; p <= len; p++) { + for (int mark1 = 0; mark1 < len; mark1++) { + for (int offset1 = 0; offset1 < len - mark1; offset1++) { + for (int mark2 = 0; mark2 < len - mark1; mark2++) { + for (int offset2 = 0; offset2 < len - mark1 - mark2; offset2++) { + final int pivot = p; + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (currentComponentIn == null) { + return new TestInputStream(b, 0, pivot, 1); + } else if (((TestInputStream) currentComponentIn).label == 1) { + return new TestInputStream(b, pivot, len - pivot, 2); + } else if (((TestInputStream) currentComponentIn).label == 2) { + return null; + } else { + throw new IllegalStateException(); + } + } + }; + // read "mark1" bytes + byte[] pre = test.readNBytes(mark1); + for (int i = 0; i < pre.length; i++) { + assertThat(pre[i], Matchers.is(b[i])); + } + // first mark + test.mark(len); + // read "offset" bytes + byte[] span1 = test.readNBytes(offset1); + for (int i = 0; i < span1.length; i++) { + assertThat(span1[i], Matchers.is(b[mark1 + i])); + } + // reset back to "mark1" offset + test.reset(); + // read/replay "mark2" bytes + byte[] span2 = test.readNBytes(mark2); + for (int i = 0; i < span2.length; i++) { + assertThat(span2[i], Matchers.is(b[mark1 + i])); + } + // second mark + test.mark(len); + byte[] span3 = test.readNBytes(offset2); + for (int i = 0; i < span3.length; i++) { + assertThat(span3[i], Matchers.is(b[mark1 + mark2 + i])); + } + // reset to second mark + test.reset(); + // read rest of bytes + byte[] span4 = test.readAllBytes(); + for (int i = 0; i < span4.length; i++) { + assertThat(span4[i], Matchers.is(b[mark1 + mark2 + i])); + } + } + } + } + } + } + } + private byte[] concatenateArrays(byte[] b1, byte[] b2) { byte[] result = new byte[b1.length + b2.length]; System.arraycopy(b1, 0, result, 0, b1.length); @@ -401,16 +520,30 @@ InputStream nextComponent(InputStream currentElementIn) throws IOException { static class TestInputStream extends InputStream { final byte[] b; + final int label; + final int len; int i = 0; + int mark = -1; final AtomicBoolean closed = new AtomicBoolean(false); TestInputStream(byte[] b) { + this(b, 0, b.length, 0); + } + + TestInputStream(byte[] b, int label) { + this(b, 0, b.length, label); + } + + TestInputStream(byte[] b, int offset, int len, int label) { this.b = b; + this.i = offset; + this.len = len; + this.label = label; } @Override public int read() throws IOException { - if (b == null || i >= b.length) { + if (b == null || i >= len) { return -1; } return b[i++]; @@ -421,5 +554,20 @@ public void close() throws IOException { closed.set(true); } + @Override + public void mark(int readlimit) { + this.mark = i; + } + + @Override + public void reset() { + this.i = this.mark; + } + + @Override + public boolean markSupported() { + return true; + } + } } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java index 40871edb7187e..a1b9fff9af540 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java @@ -27,7 +27,7 @@ public class CountingInputStreamTests extends ESTestCase { @BeforeClass static void createTestArray() throws Exception { - testArray = new byte[128]; + testArray = new byte[32]; for (int i = 0; i < testArray.length; i++) { testArray[i] = (byte) i; } @@ -123,7 +123,7 @@ public void testCountingForMarkAfterReset() throws Exception { CountingInputStream test = new CountingInputStream(new ByteArrayInputStream(testArray), randomBoolean()); assertThat(test.getCount(), Matchers.is(0L)); assertThat(test.markSupported(), Matchers.is(true)); - int offset1 = Randomness.get().nextInt(testArray.length); + int offset1 = Randomness.get().nextInt(testArray.length - 1); if (randomBoolean()) { test.skip(offset1); } else { From aeb66986e5dbb6594aa99a6f0edb81ad28515c00 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 10 Dec 2019 01:55:04 +0200 Subject: [PATCH 025/142] More tests --- .../encrypted/ChainingInputStreamTests.java | 2 +- .../encrypted/CountingInputStreamTests.java | 6 +- .../encrypted/EncryptedRepositoryTests.java | 181 +++++++++++++++++- 3 files changed, 184 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java index fe5623340c610..cf4620bd6e865 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -546,7 +546,7 @@ public int read() throws IOException { if (b == null || i >= len) { return -1; } - return b[i++]; + return b[i++] & 0xFF; } @Override diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java index a1b9fff9af540..5bc024a262f4c 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/CountingInputStreamTests.java @@ -87,7 +87,7 @@ public void testCountingForMarkAndReset() throws Exception { CountingInputStream test = new CountingInputStream(new ByteArrayInputStream(testArray), randomBoolean()); assertThat(test.getCount(), Matchers.is(0L)); assertThat(test.markSupported(), Matchers.is(true)); - int offset1 = Randomness.get().nextInt(testArray.length); + int offset1 = Randomness.get().nextInt(testArray.length - 1); if (randomBoolean()) { test.skip(offset1); } else { @@ -95,7 +95,7 @@ public void testCountingForMarkAndReset() throws Exception { } assertThat(test.getCount(), Matchers.is((long)offset1)); test.mark(testArray.length); - int offset2 = Randomness.get().nextInt(testArray.length - offset1); + int offset2 = 1 + Randomness.get().nextInt(testArray.length - offset1 - 1); if (randomBoolean()) { test.skip(offset2); } else { @@ -131,7 +131,7 @@ public void testCountingForMarkAfterReset() throws Exception { } assertThat(test.getCount(), Matchers.is((long)offset1)); test.mark(testArray.length); - int offset2 = Randomness.get().nextInt(testArray.length - offset1); + int offset2 = 1 + Randomness.get().nextInt(testArray.length - offset1 - 1); if (randomBoolean()) { test.skip(offset2); } else { diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java index e65120d749fcd..f2051b354a228 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java @@ -6,9 +6,188 @@ package org.elasticsearch.repositories.encrypted; +import org.elasticsearch.common.Randomness; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import javax.crypto.KeyGenerator; +import javax.crypto.SecretKey; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.security.SecureRandom; +import java.util.Arrays; public class EncryptedRepositoryTests extends ESTestCase { - public void testThatDoesNothing() { + + public void testSuccessEncryptAndDecryptSmallPacketLength() throws Exception { + int len = 8 + Randomness.get().nextInt(8); + byte[] plainBytes = new byte[len]; + Randomness.get().nextBytes(plainBytes); + SecretKey secretKey = generateSecretKey(); + int nonce = Randomness.get().nextInt(); + for (int packetLen : Arrays.asList(1, 2, 3, 4)) { + testEncryptAndDecryptSuccess(plainBytes, secretKey, nonce, packetLen); + } + } + + public void testSuccessEncryptAndDecryptLargePacketLength() throws Exception { + int len = 256 + Randomness.get().nextInt(256); + byte[] plainBytes = new byte[len]; + Randomness.get().nextBytes(plainBytes); + SecretKey secretKey = generateSecretKey(); + int nonce = Randomness.get().nextInt(); + for (int packetLen : Arrays.asList(len - 1, len - 2, len - 3, len - 4)) { + testEncryptAndDecryptSuccess(plainBytes, secretKey, nonce, packetLen); + } + } + + public void testSuccessEncryptAndDecryptTypicalPacketLength() throws Exception { + int len = 512 + Randomness.get().nextInt(512); + byte[] plainBytes = new byte[len]; + Randomness.get().nextBytes(plainBytes); + SecretKey secretKey = generateSecretKey(); + int nonce = Randomness.get().nextInt(); + for (int packetLen : Arrays.asList(128, 256, 512)) { + testEncryptAndDecryptSuccess(plainBytes, secretKey, nonce, packetLen); + } + } + + public void testFailureEncryptAndDecryptWrongNonce() throws Exception { + int len = 256 + Randomness.get().nextInt(256); + // 2-3 packets + int packetLen = 1 + Randomness.get().nextInt(len / 2); + byte[] plainBytes = new byte[len]; + Randomness.get().nextBytes(plainBytes); + SecretKey secretKey = generateSecretKey(); + int encryptNonce = Randomness.get().nextInt(); + int decryptNonce = Randomness.get().nextInt(); + while (decryptNonce == encryptNonce) { + decryptNonce = Randomness.get().nextInt(); + } + byte[] encryptedBytes; + try (InputStream in = new EncryptionPacketsInputStream(new ByteArrayInputStream(plainBytes, 0, len), secretKey, encryptNonce, + packetLen)) { + encryptedBytes = in.readAllBytes(); + } + try (InputStream in = new DecryptionPacketsInputStream(new ByteArrayInputStream(encryptedBytes), secretKey, decryptNonce, + packetLen)) { + IOException e = expectThrows(IOException.class, () -> { + in.readAllBytes(); + }); + assertThat(e.getMessage(), Matchers.is("Invalid packet IV")); + } + } + + public void testFailureEncryptAndDecryptWrongKey() throws Exception { + int len = 256 + Randomness.get().nextInt(256); + // 2-3 packets + int packetLen = 1 + Randomness.get().nextInt(len / 2); + byte[] plainBytes = new byte[len]; + Randomness.get().nextBytes(plainBytes); + SecretKey encryptSecretKey = generateSecretKey(); + SecretKey decryptSecretKey = generateSecretKey(); + int nonce = Randomness.get().nextInt(); + byte[] encryptedBytes; + try (InputStream in = new EncryptionPacketsInputStream(new ByteArrayInputStream(plainBytes, 0, len), encryptSecretKey, nonce, + packetLen)) { + encryptedBytes = in.readAllBytes(); + } + try (InputStream in = new DecryptionPacketsInputStream(new ByteArrayInputStream(encryptedBytes), decryptSecretKey, nonce, + packetLen)) { + IOException e = expectThrows(IOException.class, () -> { + in.readAllBytes(); + }); + assertThat(e.getMessage(), Matchers.is("javax.crypto.AEADBadTagException: Tag mismatch!")); + } + } + + public void testFailureEncryptAndDecryptAlteredCiphertext() throws Exception { + int len = 8 + Randomness.get().nextInt(8); + // one packet + int packetLen = len + Randomness.get().nextInt(8); + byte[] plainBytes = new byte[len]; + Randomness.get().nextBytes(plainBytes); + SecretKey secretKey = generateSecretKey(); + int nonce = Randomness.get().nextInt(); + byte[] encryptedBytes; + try (InputStream in = new EncryptionPacketsInputStream(new ByteArrayInputStream(plainBytes, 0, len), secretKey, nonce, + packetLen)) { + encryptedBytes = in.readAllBytes(); + } + for (int i = EncryptedRepository.GCM_IV_SIZE_IN_BYTES; i < EncryptedRepository.GCM_IV_SIZE_IN_BYTES + len + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; i++) { + for (int j = 0; j < 8; j++) { + // flip bit + encryptedBytes[i] ^= (1 << j); + // fail decryption + try (InputStream in = new DecryptionPacketsInputStream(new ByteArrayInputStream(encryptedBytes), secretKey, nonce, + packetLen)) { + IOException e = expectThrows(IOException.class, () -> { + in.readAllBytes(); + }); + assertThat(e.getMessage(), Matchers.is("javax.crypto.AEADBadTagException: Tag mismatch!")); + } + // flip bit back + encryptedBytes[i] ^= (1 << j); + } + } + } + +// public void testFailureEncryptAndDecryptAlteredCiphertextIV() throws Exception { +// int len = 16; +// int packetLen = 8; +// byte[] plainBytes = new byte[len]; +// Randomness.get().nextBytes(plainBytes); +// SecretKey secretKey = generateSecretKey(); +// int nonce = Randomness.get().nextInt(); +// byte[] encryptedBytes; +// try (InputStream in = new EncryptionPacketsInputStream(new ByteArrayInputStream(plainBytes, 0, len), secretKey, nonce, +// packetLen)) { +// encryptedBytes = in.readAllBytes(); +// } +// for (int k = 0; k < EncryptionPacketsInputStream.getEncryptionSize(len, packetLen); +// k += EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES + packetLen) { +// for (int i = 0; i < EncryptedRepository.GCM_IV_SIZE_IN_BYTES; i++) { +// for (int j = 0; i < 8; j++) { +// // flip bit +// encryptedBytes[k + i] ^= (1 << j); +// // fail decryption +// try (InputStream in = new DecryptionPacketsInputStream(new ByteArrayInputStream(encryptedBytes), secretKey, nonce, +// packetLen)) { +// IOException e = expectThrows(IOException.class, () -> { +// in.readAllBytes(); +// }); +// assertThat(e.getMessage(), Matchers.is("Invalid packet IV")); +// } +// // flip bit back +// encryptedBytes[k + i] ^= (1 << j); +// } +// } +// } +// } + + private void testEncryptAndDecryptSuccess(byte[] plainBytes, SecretKey secretKey, int nonce, int packetLen) throws Exception { + for (int len = 0; len < plainBytes.length; len++) { + byte[] encryptedBytes; + try (InputStream in = new EncryptionPacketsInputStream(new ByteArrayInputStream(plainBytes, 0, len), secretKey, nonce, + packetLen)) { + encryptedBytes = in.readAllBytes(); + } + byte[] decryptedBytes; + try (InputStream in = new DecryptionPacketsInputStream(new ByteArrayInputStream(encryptedBytes), secretKey, nonce, + packetLen)) { + decryptedBytes = in.readAllBytes(); + } + assertThat(decryptedBytes.length, Matchers.is(len)); + for (int i = 0; i < len; i++) { + assertThat(decryptedBytes[i], Matchers.is(plainBytes[i])); + } + } + } + + private SecretKey generateSecretKey() throws Exception { + KeyGenerator keyGen = KeyGenerator.getInstance("AES"); + keyGen.init(256, new SecureRandom()); + return keyGen.generateKey(); } } From 29392899bed27d3daf3250926dfc3a5261db7d6e Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 10 Dec 2019 10:39:56 +0200 Subject: [PATCH 026/142] WIP --- .../encrypted/EncryptedRepositoryTests.java | 67 ++++++++++--------- 1 file changed, 34 insertions(+), 33 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java index f2051b354a228..32a6dbcfdfa3b 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java @@ -115,7 +115,8 @@ public void testFailureEncryptAndDecryptAlteredCiphertext() throws Exception { packetLen)) { encryptedBytes = in.readAllBytes(); } - for (int i = EncryptedRepository.GCM_IV_SIZE_IN_BYTES; i < EncryptedRepository.GCM_IV_SIZE_IN_BYTES + len + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; i++) { + for (int i = EncryptedRepository.GCM_IV_SIZE_IN_BYTES; i < EncryptedRepository.GCM_IV_SIZE_IN_BYTES + len + + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; i++) { for (int j = 0; j < 8; j++) { // flip bit encryptedBytes[i] ^= (1 << j); @@ -133,38 +134,38 @@ public void testFailureEncryptAndDecryptAlteredCiphertext() throws Exception { } } -// public void testFailureEncryptAndDecryptAlteredCiphertextIV() throws Exception { -// int len = 16; -// int packetLen = 8; -// byte[] plainBytes = new byte[len]; -// Randomness.get().nextBytes(plainBytes); -// SecretKey secretKey = generateSecretKey(); -// int nonce = Randomness.get().nextInt(); -// byte[] encryptedBytes; -// try (InputStream in = new EncryptionPacketsInputStream(new ByteArrayInputStream(plainBytes, 0, len), secretKey, nonce, -// packetLen)) { -// encryptedBytes = in.readAllBytes(); -// } -// for (int k = 0; k < EncryptionPacketsInputStream.getEncryptionSize(len, packetLen); -// k += EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES + packetLen) { -// for (int i = 0; i < EncryptedRepository.GCM_IV_SIZE_IN_BYTES; i++) { -// for (int j = 0; i < 8; j++) { -// // flip bit -// encryptedBytes[k + i] ^= (1 << j); -// // fail decryption -// try (InputStream in = new DecryptionPacketsInputStream(new ByteArrayInputStream(encryptedBytes), secretKey, nonce, -// packetLen)) { -// IOException e = expectThrows(IOException.class, () -> { -// in.readAllBytes(); -// }); -// assertThat(e.getMessage(), Matchers.is("Invalid packet IV")); -// } -// // flip bit back -// encryptedBytes[k + i] ^= (1 << j); -// } -// } -// } -// } + public void testFailureEncryptAndDecryptAlteredCiphertextIV() throws Exception { + int len = 16; + int packetLen = 8; + byte[] plainBytes = new byte[len]; + Randomness.get().nextBytes(plainBytes); + SecretKey secretKey = generateSecretKey(); + int nonce = Randomness.get().nextInt(); + byte[] encryptedBytes; + try (InputStream in = new EncryptionPacketsInputStream(new ByteArrayInputStream(plainBytes, 0, len), secretKey, nonce, + packetLen)) { + encryptedBytes = in.readAllBytes(); + } + assertThat(encryptedBytes.length, Matchers.is((int) EncryptionPacketsInputStream.getEncryptionSize(len, packetLen))); + int encryptedPacketLen = EncryptedRepository.GCM_IV_SIZE_IN_BYTES + packetLen + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + for (int i = 0; i < encryptedBytes.length; i += encryptedPacketLen) { + for (int j = 0; j < EncryptedRepository.GCM_IV_SIZE_IN_BYTES; j++) { + for (int k = 0; k < 8; k++) { + // flip bit + encryptedBytes[i + j] ^= (1 << k); + try (InputStream in = new DecryptionPacketsInputStream(new ByteArrayInputStream(encryptedBytes), secretKey, nonce, + packetLen)) { + IOException e = expectThrows(IOException.class, () -> { + in.readAllBytes(); + }); + assertThat(e.getMessage(), Matchers.is("Invalid packet IV")); + } + // flip bit back + encryptedBytes[i + j] ^= (1 << k); + } + } + } + } private void testEncryptAndDecryptSuccess(byte[] plainBytes, SecretKey secretKey, int nonce, int packetLen) throws Exception { for (int len = 0; len < plainBytes.length; len++) { From 76678a62412d66a4031caaef50ae601f73a6b6bf Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 10 Dec 2019 11:09:21 +0200 Subject: [PATCH 027/142] DecryptionPacketsInputStream tests --- .../encrypted/DecryptionPacketsInputStream.java | 2 +- ...ts.java => DecryptionPacketsInputStreamTests.java} | 11 +++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) rename x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/{EncryptedRepositoryTests.java => DecryptionPacketsInputStreamTests.java} (94%) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index c4cd39f466eea..8d060bd9f15aa 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -39,7 +39,7 @@ public static long getDecryptionSize(long size, int packetLength) { long completePackets = size / encryptedPacketLength; long decryptedSize = completePackets * packetLength; if (size % encryptedPacketLength != 0) { - decryptedSize += (size % encryptedPacketLength) - EncryptedRepository.GCM_TAG_SIZE_IN_BYTES + decryptedSize += (size % encryptedPacketLength) - EncryptedRepository.GCM_IV_SIZE_IN_BYTES - EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; } return decryptedSize; diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java similarity index 94% rename from x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java rename to x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java index 32a6dbcfdfa3b..5be99a193ae91 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java @@ -18,7 +18,7 @@ import java.security.SecureRandom; import java.util.Arrays; -public class EncryptedRepositoryTests extends ESTestCase { +public class DecryptionPacketsInputStreamTests extends ESTestCase { public void testSuccessEncryptAndDecryptSmallPacketLength() throws Exception { int len = 8 + Randomness.get().nextInt(8); @@ -135,8 +135,8 @@ public void testFailureEncryptAndDecryptAlteredCiphertext() throws Exception { } public void testFailureEncryptAndDecryptAlteredCiphertextIV() throws Exception { - int len = 16; - int packetLen = 8; + int len = 8 + Randomness.get().nextInt(8); + int packetLen = 4 + Randomness.get().nextInt(4); byte[] plainBytes = new byte[len]; Randomness.get().nextBytes(plainBytes); SecretKey secretKey = generateSecretKey(); @@ -168,18 +168,21 @@ public void testFailureEncryptAndDecryptAlteredCiphertextIV() throws Exception { } private void testEncryptAndDecryptSuccess(byte[] plainBytes, SecretKey secretKey, int nonce, int packetLen) throws Exception { - for (int len = 0; len < plainBytes.length; len++) { + for (int len = 0; len <= plainBytes.length; len++) { byte[] encryptedBytes; try (InputStream in = new EncryptionPacketsInputStream(new ByteArrayInputStream(plainBytes, 0, len), secretKey, nonce, packetLen)) { encryptedBytes = in.readAllBytes(); } + assertThat((long) encryptedBytes.length, Matchers.is(EncryptionPacketsInputStream.getEncryptionSize(len, packetLen))); byte[] decryptedBytes; try (InputStream in = new DecryptionPacketsInputStream(new ByteArrayInputStream(encryptedBytes), secretKey, nonce, packetLen)) { decryptedBytes = in.readAllBytes(); } assertThat(decryptedBytes.length, Matchers.is(len)); + assertThat((long) decryptedBytes.length, Matchers.is(DecryptionPacketsInputStream.getDecryptionSize(encryptedBytes.length, + packetLen))); for (int i = 0; i < len; i++) { assertThat(decryptedBytes[i], Matchers.is(plainBytes[i])); } From f44b97c283c7d34574c3eae4a1b6322b6f726c11 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 10 Dec 2019 18:49:58 +0200 Subject: [PATCH 028/142] Tests done! --- .../encrypted/CountingInputStream.java | 6 +- .../EncryptionPacketsInputStream.java | 8 +- .../EncryptionPacketsInputStreamTests.java | 290 +++++++++++++++++- 3 files changed, 289 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java index b30dd5cab9e88..10150be8aa22a 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java @@ -24,9 +24,9 @@ */ public final class CountingInputStream extends FilterInputStream { - private long count; - private long mark; - private boolean closed; + protected long count; // protected for tests + protected long mark; // protected for tests + protected boolean closed; // protected for tests private final boolean closeSource; /** diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index aa0a7df880f94..67768c1aac922 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -43,15 +43,15 @@ public final class EncryptionPacketsInputStream extends ChainingInputStream { private static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 30; - private final InputStream source; + protected final InputStream source; // protected for tests private final SecretKey secretKey; private final int packetLength; private final ByteBuffer packetIv; private final int encryptedPacketLength; - private long counter; - private Long markCounter; - private int markSourceOnNextPacket; + protected long counter; // protected for tests + protected Long markCounter; // protected for tests + protected int markSourceOnNextPacket; // protected for tests public static long getEncryptionSize(long size, int packetLength) { return size + (size / packetLength + 1) * (EncryptedRepository.GCM_TAG_SIZE_IN_BYTES + EncryptedRepository.GCM_IV_SIZE_IN_BYTES); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java index f180900a16cee..1349feab45084 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; import org.junit.BeforeClass; +import org.mockito.Mockito; import javax.crypto.Cipher; import javax.crypto.CipherInputStream; @@ -24,6 +25,11 @@ import java.security.SecureRandom; import java.util.Arrays; import java.util.Objects; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class EncryptionPacketsInputStreamTests extends ESTestCase { @@ -110,7 +116,7 @@ public void testPacketSizeMultipleOfAESBlockSize() throws Exception { packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES, new DefaultBufferedReadAllStrategy()); } - public void testMarkAtPacketBoundary() throws Exception { + public void testMarkAndResetPacketBoundaryNoMock() throws Exception { int packetSize = 3 + Randomness.get().nextInt(512); int size = 4 * packetSize + Randomness.get().nextInt(512); int plaintextOffset = Randomness.get().nextInt(testPlaintextArray.length - size + 1); @@ -120,6 +126,7 @@ public void testMarkAtPacketBoundary() throws Exception { plaintextOffset, size), secretKey, nonce, packetSize)) { referenceCiphertextArray = encryptionInputStream.readAllBytes(); } + assertThat((long)referenceCiphertextArray.length, Matchers.is(EncryptionPacketsInputStream.getEncryptionSize(size, packetSize))); int encryptedPacketSize = packetSize + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; try (InputStream encryptionInputStream = new EncryptionPacketsInputStream(new ByteArrayInputStream(testPlaintextArray, plaintextOffset, size), secretKey, nonce, packetSize)) { @@ -129,14 +136,17 @@ public void testMarkAtPacketBoundary() throws Exception { assertSubArray(referenceCiphertextArray, 0, test, 0, test.length); // reset at the beginning encryptionInputStream.reset(); + // read packet fragment test = encryptionInputStream.readNBytes(1 + Randomness.get().nextInt(encryptedPacketSize)); assertSubArray(referenceCiphertextArray, 0, test, 0, test.length); // reset at the beginning encryptionInputStream.reset(); + // read complete packet test = encryptionInputStream.readNBytes(encryptedPacketSize); assertSubArray(referenceCiphertextArray, 0, test, 0, test.length); // mark at the second packet boundary - encryptionInputStream.mark(2 * encryptedPacketSize + 1); + encryptionInputStream.mark(Integer.MAX_VALUE); + // read more than one packet test = encryptionInputStream.readNBytes(encryptedPacketSize + 1 + Randomness.get().nextInt(encryptedPacketSize)); assertSubArray(referenceCiphertextArray, encryptedPacketSize, test, 0, test.length); // reset at the second packet boundary @@ -144,17 +154,227 @@ public void testMarkAtPacketBoundary() throws Exception { int middlePacketOffset = Randomness.get().nextInt(encryptedPacketSize); test = encryptionInputStream.readNBytes(middlePacketOffset); assertSubArray(referenceCiphertextArray, encryptedPacketSize, test, 0, test.length); - // mark before third packet boundary - encryptionInputStream.mark(encryptedPacketSize - middlePacketOffset); // read up to the third packet boundary test = encryptionInputStream.readNBytes(encryptedPacketSize - middlePacketOffset); assertSubArray(referenceCiphertextArray, encryptedPacketSize + middlePacketOffset, test, 0, test.length); - // reset before the third packet boundary + // mark at the third packet boundary + encryptionInputStream.mark(Integer.MAX_VALUE); + test = encryptionInputStream.readAllBytes(); + assertSubArray(referenceCiphertextArray, 2 * encryptedPacketSize, test, 0, test.length); encryptionInputStream.reset(); - test = encryptionInputStream.readNBytes( - encryptedPacketSize - middlePacketOffset + 1 + Randomness.get().nextInt(encryptedPacketSize)); - assertSubArray(referenceCiphertextArray, encryptedPacketSize + middlePacketOffset, test, 0, test.length); + test = encryptionInputStream.readNBytes(1 + Randomness.get().nextInt( + referenceCiphertextArray.length - 2 * encryptedPacketSize)); + assertSubArray(referenceCiphertextArray, 2 * encryptedPacketSize, test, 0, test.length); + } + } + + public void testMarkResetInsidePacketNoMock() throws Exception { + int packetSize = 3 + Randomness.get().nextInt(64); + int encryptedPacketSize = EncryptedRepository.GCM_IV_SIZE_IN_BYTES + packetSize + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + int size = 3 * packetSize + Randomness.get().nextInt(64); + byte[] bytes = new byte[size]; + Randomness.get().nextBytes(bytes); + int nonce = Randomness.get().nextInt(); + EncryptionPacketsInputStream test = new EncryptionPacketsInputStream(new TestInputStream(bytes), secretKey, nonce, packetSize); + int offset1 = 1 + Randomness.get().nextInt(encryptedPacketSize); + // read past the first packet + test.readNBytes(encryptedPacketSize + offset1); + assertThat(test.counter, Matchers.is(Long.MIN_VALUE + 2)); + assertThat(((CountingInputStream)test.currentIn).count, Matchers.is((long)offset1)); + assertThat(test.markCounter, Matchers.nullValue()); + int readLimit = 1 + Randomness.get().nextInt(packetSize); + // first mark + test.mark(readLimit); + assertThat(test.markCounter, Matchers.is(test.counter)); + assertThat(test.markSourceOnNextPacket, Matchers.is(readLimit)); + assertThat(test.markIn, Matchers.is(test.currentIn)); + assertThat(((CountingInputStream)test.markIn).mark, Matchers.is((long)offset1)); + assertThat(((TestInputStream)test.source).mark, Matchers.is(-1)); + // read before packet is complete + test.readNBytes(1 + Randomness.get().nextInt(encryptedPacketSize - offset1)); + assertThat(((TestInputStream)test.source).mark, Matchers.is(-1)); + // reset + test.reset(); + assertThat(test.markSourceOnNextPacket, Matchers.is(readLimit)); + assertThat(test.counter, Matchers.is(test.markCounter)); + assertThat(test.currentIn, Matchers.is(test.markIn)); + assertThat(((CountingInputStream)test.currentIn).count, Matchers.is((long)offset1)); + // read before the packet is complete + int offset2 = 1 + Randomness.get().nextInt(encryptedPacketSize - offset1); + test.readNBytes(offset2); + assertThat(((TestInputStream)test.source).mark, Matchers.is(-1)); + assertThat(((CountingInputStream)test.currentIn).count, Matchers.is((long)offset1 + offset2)); + // second mark + readLimit = 1 + Randomness.get().nextInt(packetSize); + test.mark(readLimit); + assertThat(((TestInputStream)test.source).mark, Matchers.is(-1)); + assertThat(test.markCounter, Matchers.is(test.counter)); + assertThat(test.markSourceOnNextPacket, Matchers.is(readLimit)); + assertThat(test.markIn, Matchers.is(test.currentIn)); + assertThat(((CountingInputStream)test.markIn).mark, Matchers.is((long)offset1 + offset2)); + } + + public void testMarkResetAcrossPacketsNoMock() throws Exception { + int packetSize = 3 + Randomness.get().nextInt(64); + int encryptedPacketSize = EncryptedRepository.GCM_IV_SIZE_IN_BYTES + packetSize + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + int size = 3 * packetSize + Randomness.get().nextInt(64); + byte[] bytes = new byte[size]; + Randomness.get().nextBytes(bytes); + int nonce = Randomness.get().nextInt(); + EncryptionPacketsInputStream test = new EncryptionPacketsInputStream(new TestInputStream(bytes), secretKey, nonce, packetSize); + int readLimit = 2 * size + Randomness.get().nextInt(4096); + // mark at the beginning + test.mark(readLimit); + assertThat(test.counter, Matchers.is(Long.MIN_VALUE)); + assertThat(test.markCounter, Matchers.is(Long.MIN_VALUE)); + assertThat(test.markSourceOnNextPacket, Matchers.is(readLimit)); + assertThat(test.markIn, Matchers.nullValue()); + // read past the first packet + int offset1 = 1 + Randomness.get().nextInt(encryptedPacketSize); + test.readNBytes(encryptedPacketSize + offset1); + assertThat(test.markSourceOnNextPacket, Matchers.is(-1)); + assertThat(((TestInputStream)test.source).mark, Matchers.is(0)); + assertThat(test.counter, Matchers.is(Long.MIN_VALUE + 2)); + assertThat(((CountingInputStream)test.currentIn).count, Matchers.is((long)offset1)); + assertThat(test.markCounter, Matchers.is(Long.MIN_VALUE)); + assertThat(test.markIn, Matchers.nullValue()); + // reset at the beginning + test.reset(); + assertThat(test.markSourceOnNextPacket, Matchers.is(-1)); + assertThat(test.counter, Matchers.is(Long.MIN_VALUE)); + assertThat(test.currentIn, Matchers.nullValue()); + assertThat(((TestInputStream)test.source).off, Matchers.is(0)); + // read past the first two packets + int offset2 = 1 + Randomness.get().nextInt(encryptedPacketSize); + test.readNBytes(2 * encryptedPacketSize + offset2); + assertThat(test.markSourceOnNextPacket, Matchers.is(-1)); + assertThat(((TestInputStream)test.source).mark, Matchers.is(0)); + assertThat(test.counter, Matchers.is(Long.MIN_VALUE + 3)); + assertThat(((CountingInputStream)test.currentIn).count, Matchers.is((long)offset2)); + assertThat(test.markCounter, Matchers.is(Long.MIN_VALUE)); + assertThat(test.markIn, Matchers.nullValue()); + // mark inside the third packet + test.mark(readLimit); + assertThat(test.markCounter, Matchers.is(Long.MIN_VALUE + 3)); + assertThat(test.markSourceOnNextPacket, Matchers.is(readLimit)); + assertThat(((CountingInputStream)test.currentIn).count, Matchers.is((long)offset2)); + assertThat(test.markIn, Matchers.is(test.currentIn)); + assertThat(((CountingInputStream)test.markIn).mark, Matchers.is((long)offset2)); + // read until the end + test.readAllBytes(); + assertThat(test.markCounter, Matchers.is(Long.MIN_VALUE + 3)); + assertThat(test.counter, Matchers.not(Long.MIN_VALUE + 3)); + assertThat(test.markSourceOnNextPacket, Matchers.is(-1)); + assertThat(test.markIn, Matchers.not(test.currentIn)); + assertThat(((CountingInputStream)test.markIn).mark, Matchers.is((long)offset2)); + // reset + test.reset(); + assertThat(test.markSourceOnNextPacket, Matchers.is(-1)); + assertThat(test.counter, Matchers.is(Long.MIN_VALUE + 3)); + assertThat(((CountingInputStream)test.currentIn).count, Matchers.is((long)offset2)); + assertThat(test.markIn, Matchers.is(test.currentIn)); + } + + public void testMarkAfterResetNoMock() throws Exception { + int packetSize = 1 + Randomness.get().nextInt(3); + int plainLen = packetSize + 1 + Randomness.get().nextInt(packetSize); + int plaintextOffset = Randomness.get().nextInt(testPlaintextArray.length - plainLen + 1); + int nonce = Randomness.get().nextInt(); + final byte[] referenceCiphertextArray; + try (InputStream encryptionInputStream = new EncryptionPacketsInputStream(new ByteArrayInputStream(testPlaintextArray, + plaintextOffset, plainLen), secretKey, nonce, packetSize)) { + referenceCiphertextArray = encryptionInputStream.readAllBytes(); } + int encryptedLen = referenceCiphertextArray.length; + assertThat((long) encryptedLen, Matchers.is(EncryptionPacketsInputStream.getEncryptionSize(plainLen, packetSize))); + for (int mark1 = 0; mark1 < encryptedLen; mark1++) { + for (int offset1 = 0; offset1 < encryptedLen - mark1; offset1++) { + int mark2 = Randomness.get().nextInt(encryptedLen - mark1); + int offset2 = Randomness.get().nextInt(encryptedLen - mark1 - mark2); + EncryptionPacketsInputStream test = + new EncryptionPacketsInputStream(new ByteArrayInputStream(testPlaintextArray, + plaintextOffset, plainLen), secretKey, nonce, packetSize); + // read "mark1" bytes + byte[] pre = test.readNBytes(mark1); + for (int i = 0; i < pre.length; i++) { + assertThat(pre[i], Matchers.is(referenceCiphertextArray[i])); + } + // first mark + test.mark(encryptedLen); + // read "offset" bytes + byte[] span1 = test.readNBytes(offset1); + for (int i = 0; i < span1.length; i++) { + assertThat(span1[i], Matchers.is(referenceCiphertextArray[mark1 + i])); + } + // reset back to "mark1" offset + test.reset(); + // read/replay "mark2" bytes + byte[] span2 = test.readNBytes(mark2); + for (int i = 0; i < span2.length; i++) { + assertThat(span2[i], Matchers.is(referenceCiphertextArray[mark1 + i])); + } + // second mark + test.mark(encryptedLen); + byte[] span3 = test.readNBytes(offset2); + for (int i = 0; i < span3.length; i++) { + assertThat(span3[i], Matchers.is(referenceCiphertextArray[mark1 + mark2 + i])); + } + // reset to second mark + test.reset(); + // read rest of bytes + byte[] span4 = test.readAllBytes(); + for (int i = 0; i < span4.length; i++) { + assertThat(span4[i], Matchers.is(referenceCiphertextArray[mark1 + mark2 + i])); + } + } + } + } + + public void testMark() throws Exception { + InputStream mockSource = mock(InputStream.class); + when(mockSource.markSupported()).thenAnswer(invocationOnMock -> true); + EncryptionPacketsInputStream test = new EncryptionPacketsInputStream(mockSource, mock(SecretKey.class), + Randomness.get().nextInt(), 1 + Randomness.get().nextInt(32)); + int readLimit = 1 + Randomness.get().nextInt(4096); + InputStream mockMarkIn = mock(InputStream.class); + test.markIn = mockMarkIn; + InputStream mockCurrentIn = mock(InputStream.class); + test.currentIn = mockCurrentIn; + test.counter = Randomness.get().nextLong(); + test.markCounter = Randomness.get().nextLong(); + test.markSourceOnNextPacket = Randomness.get().nextInt(); + // mark + test.mark(readLimit); + verify(mockMarkIn).close(); + assertThat(test.markIn, Matchers.is(mockCurrentIn)); + verify(test.markIn).mark(Mockito.anyInt()); + assertThat(test.currentIn, Matchers.is(mockCurrentIn)); + assertThat(test.markCounter, Matchers.is(test.counter)); + assertThat(test.markSourceOnNextPacket, Matchers.is(readLimit)); + } + + public void testReset() throws Exception { + InputStream mockSource = mock(InputStream.class); + when(mockSource.markSupported()).thenAnswer(invocationOnMock -> true); + EncryptionPacketsInputStream test = new EncryptionPacketsInputStream(mockSource, mock(SecretKey.class), + Randomness.get().nextInt(), 1 + Randomness.get().nextInt(32)); + InputStream mockMarkIn = mock(InputStream.class); + test.markIn = mockMarkIn; + InputStream mockCurrentIn = mock(InputStream.class); + test.currentIn = mockCurrentIn; + test.counter = Randomness.get().nextLong(); + test.markCounter = Randomness.get().nextLong(); + // source requires reset as well + test.markSourceOnNextPacket = -1; + // reset + test.reset(); + verify(mockCurrentIn).close(); + assertThat(test.currentIn, Matchers.is(mockMarkIn)); + verify(test.currentIn).reset(); + assertThat(test.markIn, Matchers.is(mockMarkIn)); + assertThat(test.counter, Matchers.is(test.markCounter)); + assertThat(test.markSourceOnNextPacket, Matchers.is(-1)); + verify(mockSource).reset(); } private void testEncryptPacketWise(int size, int packetSize, ReadStrategy readStrategy) throws Exception { @@ -209,4 +429,58 @@ public byte[] readAll(InputStream inputStream) throws IOException { } } + static class TestInputStream extends InputStream { + + final byte[] b; + final int label; + final int len; + int off = 0; + int mark = -1; + final AtomicBoolean closed = new AtomicBoolean(false); + + TestInputStream(byte[] b) { + this(b, 0, b.length, 0); + } + + TestInputStream(byte[] b, int label) { + this(b, 0, b.length, label); + } + + TestInputStream(byte[] b, int offset, int len, int label) { + this.b = b; + this.off = offset; + this.len = len; + this.label = label; + } + + @Override + public int read() throws IOException { + if (b == null || off >= len) { + return -1; + } + return b[off++] & 0xFF; + } + + @Override + public void close() throws IOException { + closed.set(true); + } + + @Override + public void mark(int readlimit) { + this.mark = off; + } + + @Override + public void reset() { + this.off = this.mark; + } + + @Override + public boolean markSupported() { + return true; + } + + } + } From 016164a6ae25a60180d4167f77ab9dfab0931aa2 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 11 Dec 2019 01:10:51 +0200 Subject: [PATCH 029/142] More javadocs --- .../encrypted/CountingInputStream.java | 2 +- .../DecryptionPacketsInputStream.java | 25 ++++++++++++ .../encrypted/EncryptedRepository.java | 1 + .../EncryptionPacketsInputStream.java | 39 ++++++++++++------- .../EncryptionPacketsInputStreamTests.java | 6 +-- 5 files changed, 56 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java index 10150be8aa22a..6a9e7df9b75e1 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java @@ -12,7 +12,7 @@ /** * A {@code CountingInputStream} wraps another input stream and counts the number of bytes - * that have been read or skipped. This input stream must be used in place of the wrapped one. + * that have been read or skipped. * Bytes replayed following a {@code reset} call are not counted multiple times, i.e. only * the bytes that are produced in a single pass, without resets, by the wrapped stream are counted. * This input stream does no buffering on its own and only supports {@code mark} and diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index 8d060bd9f15aa..48b36465cc58b 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -22,6 +22,28 @@ import java.security.NoSuchAlgorithmException; import java.util.Objects; +/** + * A {@code DecryptionPacketsInputStream} wraps an encrypted input stream and decrypts + * its contents. This is designed (and tested) to decrypt only the encryption format that + * {@link EncryptionPacketsInputStream} generates. No decrypted bytes are returned before + * they are authenticated. + *

+ * The same parameters, namely {@code secretKey}, {@code nonce} and {@code packetLength}, + * that have been used during encryption must also be used for decryption, otherwise + * decryption will fail. + *

+ * This implementation buffers the encrypted packet in memory. The maximum packet size it can + * accommodate is {@link EncryptedRepository#MAX_PACKET_LENGTH_IN_BYTES}. + *

+ * This implementation does not support {@code mark} and {@code reset}. + *

+ * The {@code close} call will close the decryption input stream and any subsequent {@code read}, + * {@code skip}, {@code available} and {@code reset} calls will throw {@code IOException}s. + *

+ * This is NOT thread-safe, multiple threads sharing a single instance must synchronize access. + * + * @see EncryptionPacketsInputStream + */ public final class DecryptionPacketsInputStream extends ChainingInputStream { private final InputStream source; @@ -49,6 +71,9 @@ public DecryptionPacketsInputStream(InputStream source, SecretKey secretKey, int this.source = Objects.requireNonNull(source); this.secretKey = Objects.requireNonNull(secretKey); this.nonce = nonce; + if (packetLength <= 0 || packetLength >= EncryptedRepository.MAX_PACKET_LENGTH_IN_BYTES) { + throw new IllegalArgumentException("Invalid packet length [" + packetLength + "]"); + } this.packetLength = packetLength; this.packet = new byte[packetLength + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES]; this.iv = new byte[EncryptedRepository.GCM_IV_SIZE_IN_BYTES]; diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 7e1131224118b..7ca6748e7783e 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -12,4 +12,5 @@ public class EncryptedRepository { static final int AES_BLOCK_SIZE_IN_BYTES = 128; static final String GCM_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; static final long PACKET_START_COUNTER = Long.MIN_VALUE; + static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 30; } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 67768c1aac922..5c5c26d59f0ab 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -24,25 +24,38 @@ /** * An {@code EncryptionPacketsInputStream} wraps another input stream and encrypts its contents. - * The method of encryption is AES/GCM/NoPadding, which is a variant of authenticated encryption. - * The encryption works packet wise, i.e. the bytes are encrypted separately, using an unique - * {@code Cipher}. All the packets are encrypted using the same {@code SecretKey} but using a - * different Initialization Vector. The IV is comprised of an integer the same for all packets, - * a {@code nonce} that must not repeat for the same {@code secretKey}, and a monotonically - * increasing long counter. The packet size is preferably a large multiple of the AES block size, - * but this is not a requirement. + * The method of encryption is AES/GCM/NoPadding, which is a type of authenticated encryption. + * The encryption works packet wise, i.e. the stream is segmented into fixed-size byte packets + * which are separately encrypted using a unique {@link Cipher}. As an exception, only the last + * packet will have a different size, possibly zero. Note that the encrypted packets are + * larger compared to the plaintext packets, because they contain a 16 byte length trailing + * authentication tag. The resulting encrypted and authenticated packets are assembled back into + * the resulting stream. *

- * This input stream supports the {@code mark} and {@code reset} operations only if the wrapped - * stream also supports them. A {@code mark} call will trigger the memory buffering of the current + * The packets are encrypted using the same {@link SecretKey} but using a different initialization + * vector. The IV is 12 bytes wide and it's comprised of an integer {@code nonce}, the same for + * every packet in a stream, but which MUST not otherwise be repeated for the same {@code SecretKey} + * across other streams, and a monotonically increasing long counter. When assembling the resulting + * stream, the IV is prepended to the corresponding packet's ciphertext. + *

+ * The packet size is preferably a large multiple of the AES block size (128 bytes), but any positive + * integer value smaller than {@link EncryptedRepository#MAX_PACKET_LENGTH_IN_BYTES} is valid. + *

+ * This input stream supports the {@code mark} and {@code reset} operations, but only if the wrapped + * stream supports them as well. A {@code mark} call will trigger the memory buffering of the current * packet and will also trigger a {@code mark} call on the wrapped input stream on the next - * packet boundary. + * packet boundary. Upon a {@code reset} call, the buffered packet will be replayed and new packets + * will be generated starting from the marked packet boundary on the wrapped stream. + *

+ * The {@code close} call will close the encryption input stream and any subsequent {@code read}, + * {@code skip}, {@code available} and {@code reset} calls will throw {@code IOException}s. + *

+ * This is NOT thread-safe, multiple threads sharing a single instance must synchronize access. * * @see DecryptionPacketsInputStream */ public final class EncryptionPacketsInputStream extends ChainingInputStream { - private static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 30; - protected final InputStream source; // protected for tests private final SecretKey secretKey; private final int packetLength; @@ -60,7 +73,7 @@ public static long getEncryptionSize(long size, int packetLength) { public EncryptionPacketsInputStream(InputStream source, SecretKey secretKey, int nonce, int packetLength) { this.source = Objects.requireNonNull(source); this.secretKey = Objects.requireNonNull(secretKey); - if (packetLength <= 0 || packetLength >= MAX_PACKET_LENGTH_IN_BYTES) { + if (packetLength <= 0 || packetLength >= EncryptedRepository.MAX_PACKET_LENGTH_IN_BYTES) { throw new IllegalArgumentException("Invalid packet length [" + packetLength + "]"); } this.packetLength = packetLength; diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java index 1349feab45084..3257d6a7598ac 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -176,7 +176,7 @@ public void testMarkResetInsidePacketNoMock() throws Exception { Randomness.get().nextBytes(bytes); int nonce = Randomness.get().nextInt(); EncryptionPacketsInputStream test = new EncryptionPacketsInputStream(new TestInputStream(bytes), secretKey, nonce, packetSize); - int offset1 = 1 + Randomness.get().nextInt(encryptedPacketSize); + int offset1 = 1 + Randomness.get().nextInt(encryptedPacketSize - 1); // read past the first packet test.readNBytes(encryptedPacketSize + offset1); assertThat(test.counter, Matchers.is(Long.MIN_VALUE + 2)); @@ -276,8 +276,8 @@ public void testMarkResetAcrossPacketsNoMock() throws Exception { } public void testMarkAfterResetNoMock() throws Exception { - int packetSize = 1 + Randomness.get().nextInt(3); - int plainLen = packetSize + 1 + Randomness.get().nextInt(packetSize); + int packetSize = 4 + Randomness.get().nextInt(4); + int plainLen = packetSize + 1 + Randomness.get().nextInt(packetSize - 1); int plaintextOffset = Randomness.get().nextInt(testPlaintextArray.length - plainLen + 1); int nonce = Randomness.get().nextInt(); final byte[] referenceCiphertextArray; From bae9a8d5786efe30b7f90b9819e66da5b4a2a730 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 19 Dec 2019 13:38:34 +0200 Subject: [PATCH 030/142] Repository and plugin, WIP --- .../plugin/repository-encrypted/build.gradle | 4 ++ .../licenses/bcpkix-fips-1.0.3.jar.sha1 | 1 + .../licenses/bcpkix-fips-LICENSE.txt | 12 ++++ .../licenses/bcpkix-fips-NOTICE.txt | 0 .../encrypted/EncryptedRepository.java | 31 +++++++- .../encrypted/EncryptedRepositoryPlugin.java | 70 +++++++++++++++++-- 6 files changed, 112 insertions(+), 6 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-1.0.3.jar.sha1 create mode 100644 x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-LICENSE.txt create mode 100644 x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-NOTICE.txt diff --git a/x-pack/plugin/repository-encrypted/build.gradle b/x-pack/plugin/repository-encrypted/build.gradle index 5a2f82946f711..b216fd4de1810 100644 --- a/x-pack/plugin/repository-encrypted/build.gradle +++ b/x-pack/plugin/repository-encrypted/build.gradle @@ -8,4 +8,8 @@ esplugin { extendedPlugins = ['x-pack-core'] } +dependencies { + compile "org.bouncycastle:bcpkix-fips:1.0.3" +} + integTest.enabled = false diff --git a/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-1.0.3.jar.sha1 b/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-1.0.3.jar.sha1 new file mode 100644 index 0000000000000..a5b07bac95422 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-1.0.3.jar.sha1 @@ -0,0 +1 @@ +33c47b105777c9dcc8a08188186bd35401366bd1 diff --git a/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-LICENSE.txt b/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-LICENSE.txt new file mode 100644 index 0000000000000..66ba32c7e58ca --- /dev/null +++ b/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) 2000 - 2019 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-NOTICE.txt b/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 7ca6748e7783e..9c1087b68105f 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -6,11 +6,40 @@ package org.elasticsearch.repositories.encrypted; -public class EncryptedRepository { +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; + +import java.util.function.Function; + +public class EncryptedRepository extends BlobStoreRepository { static final int GCM_TAG_SIZE_IN_BYTES = 16; static final int GCM_IV_SIZE_IN_BYTES = 12; static final int AES_BLOCK_SIZE_IN_BYTES = 128; static final String GCM_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 30; + + private static final String ENCRYPTION_METADATA_PREFIX = "encryption-metadata-"; + + private final BlobStoreRepository delegatedRepository; + private final char[] masterPassword; + + protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService + , BlobStoreRepository delegatedRepository, char[] materPassword) { + super(metadata, namedXContentRegistry, clusterService, delegatedRepository.basePath()); + this.delegatedRepository = delegatedRepository; + this.masterPassword = materPassword; + } + + @Override + protected BlobStore createBlobStore() throws Exception { + return null; + } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 93e511bb14383..52431cf3a3ff2 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -6,26 +6,86 @@ package org.elasticsearch.repositories.encrypted; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.function.Function; -public final class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin, ReloadablePlugin { +public final class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin { - public EncryptedRepositoryPlugin(final Settings settings) { + static final Setting.AffixSetting ENCRYPTION_PASSWORD_SETTING = Setting.affixKeySetting("repository.encrypted.", + "password", key -> SecureSetting.secureString(key, null)); + static final Setting DELEGATE_TYPE = new Setting<>("delegate_type", "", Function.identity()); + static final String TYPE = "encrypted"; + + private final Map cachedRepositoryPasswords = new HashMap<>(); + + public EncryptedRepositoryPlugin(Settings settings) { + // cache the passwords for all encrypted repositories during *plugin* instantiation + // the keystore-based secure passwords are not readable on repository instantiation + for (String repositoryName : ENCRYPTION_PASSWORD_SETTING.getNamespaces(settings)) { + Setting encryptionPasswordSetting = ENCRYPTION_PASSWORD_SETTING + .getConcreteSettingForNamespace(repositoryName); + SecureString encryptionPassword = encryptionPasswordSetting.get(settings); + cachedRepositoryPasswords.put(repositoryName, encryptionPassword.getChars()); + } } @Override public List> getSettings() { - return List.of(); + return List.of(ENCRYPTION_PASSWORD_SETTING); } @Override - public void reload(Settings settings) { - // Secure settings should be readable inside this method. + public Map getRepositories(final Environment env, final NamedXContentRegistry registry, + final ClusterService clusterService) { + return Collections.singletonMap(TYPE, new Repository.Factory() { + + @Override + public Repository create(RepositoryMetaData metadata) { + throw new UnsupportedOperationException(); + } + + @Override + public Repository create(RepositoryMetaData metaData, Function typeLookup) throws Exception { + String delegateType = DELEGATE_TYPE.get(metaData.settings()); + if (Strings.hasLength(delegateType) == false) { + throw new IllegalArgumentException(DELEGATE_TYPE.getKey() + " must be set"); + } + if (TYPE.equals(delegateType)) { + throw new IllegalArgumentException("Cannot encrypt an already encrypted repository. " + DELEGATE_TYPE.getKey() + + " must not be equal to " + TYPE); + } + if (false == cachedRepositoryPasswords.containsKey(metaData.name())) { + throw new IllegalArgumentException( + ENCRYPTION_PASSWORD_SETTING.getConcreteSettingForNamespace(metaData.name()).getKey() + " must be set"); + } + Repository.Factory factory = typeLookup.apply(delegateType); + Repository delegatedRepository = factory.create(new RepositoryMetaData(metaData.name(), + delegateType, metaData.settings())); + if (false == (delegatedRepository instanceof BlobStoreRepository) || delegatedRepository instanceof EncryptedRepository) { + throw new IllegalArgumentException("Unsupported type " + DELEGATE_TYPE.getKey()); + } + char[] masterPassword = cachedRepositoryPasswords.get(metaData.name()); + return new EncryptedRepository(metaData, registry, clusterService, (BlobStoreRepository) delegatedRepository, + masterPassword); + } + }); } } From 80a079bde234e6f9d11f2d9513c9ae848550fb95 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 19 Dec 2019 19:38:08 +0200 Subject: [PATCH 031/142] Mhm, needs bc-jar --- .../encrypted/BlobEncryptionMetadata.java | 56 +++++ .../encrypted/EncryptedRepository.java | 227 +++++++++++++++++- 2 files changed, 276 insertions(+), 7 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java new file mode 100644 index 0000000000000..d5ab66c41a2c0 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java @@ -0,0 +1,56 @@ +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +public final class BlobEncryptionMetadata { + + private final byte[] dataEncryptionKeyMaterial; + private final int nonce; + private final int packetLengthInBytes; + + public BlobEncryptionMetadata(byte[] dataEncryptionKeyMaterial, int nonce, int packetLengthInBytes) { + this.dataEncryptionKeyMaterial = dataEncryptionKeyMaterial; + this.nonce = nonce; + this.packetLengthInBytes = packetLengthInBytes; + } + + public byte[] getDataEncryptionKeyMaterial() { + return dataEncryptionKeyMaterial; + } + + public int getPacketLengthInBytes() { + return packetLengthInBytes; + } + + public int getNonce() { + return nonce; + } + + public BlobEncryptionMetadata(InputStream inputStream) throws IOException { + try (StreamInput in = new InputStreamStreamInput(inputStream)) { + final Version version = Version.readVersion(in); + in.setVersion(version); + this.dataEncryptionKeyMaterial = in.readByteArray(); + this.nonce = in.readInt(); + this.packetLengthInBytes = in.readInt(); + } + } + + public void write(OutputStream outputStream) throws IOException { + try (StreamOutput out = new OutputStreamStreamOutput(outputStream)) { + out.setVersion(Version.CURRENT); + out.writeByteArray(this.dataEncryptionKeyMaterial); + out.writeInt(this.nonce); + out.writeInt(this.packetLengthInBytes); + } + } + +} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 9c1087b68105f..aab71a0e8c489 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -6,17 +6,41 @@ package org.elasticsearch.repositories.encrypted; +import org.bouncycastle.cms.CMSAlgorithm; +import org.bouncycastle.cms.CMSEnvelopedData; +import org.bouncycastle.cms.CMSEnvelopedDataGenerator; +import org.bouncycastle.cms.CMSException; +import org.bouncycastle.cms.CMSTypedData; +import org.bouncycastle.cms.PasswordRecipientId; +import org.bouncycastle.cms.PasswordRecipientInfoGenerator; +import org.bouncycastle.cms.RecipientInformation; +import org.bouncycastle.cms.RecipientInformationStore; +import org.bouncycastle.cms.jcajce.JceCMSContentEncryptorBuilder; +import org.bouncycastle.cms.jcajce.JcePasswordEnvelopedRecipient; +import org.bouncycastle.cms.jcajce.JcePasswordRecipientInfoGenerator; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.settings.SecureSetting; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import java.util.function.Function; +import javax.crypto.KeyGenerator; +import javax.crypto.SecretKey; +import javax.crypto.spec.SecretKeySpec; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.util.List; +import java.util.Map; public class EncryptedRepository extends BlobStoreRepository { static final int GCM_TAG_SIZE_IN_BYTES = 16; @@ -24,22 +48,211 @@ public class EncryptedRepository extends BlobStoreRepository { static final int AES_BLOCK_SIZE_IN_BYTES = 128; static final String GCM_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; static final long PACKET_START_COUNTER = Long.MIN_VALUE; - static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 30; + static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 20; // 1MB + static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB + // when something about the encryption scheme changes (eg. metadata format) we increment this version number + static final int ENCRYPTION_PROTOCOL_VERSION_NUMBER = 1; private static final String ENCRYPTION_METADATA_PREFIX = "encryption-metadata-"; private final BlobStoreRepository delegatedRepository; private final char[] masterPassword; + private final KeyGenerator keyGenerator; + private final SecureRandom secureRandom; protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService - , BlobStoreRepository delegatedRepository, char[] materPassword) { + , BlobStoreRepository delegatedRepository, char[] materPassword) throws NoSuchAlgorithmException { super(metadata, namedXContentRegistry, clusterService, delegatedRepository.basePath()); this.delegatedRepository = delegatedRepository; this.masterPassword = materPassword; + this.keyGenerator = KeyGenerator.getInstance("AES"); + this.keyGenerator.init(256, SecureRandom.getInstance("SHA1PRNG")); + this.secureRandom = SecureRandom.getInstance("SHA1PRNG"); + // TODO run self-test to make sure encryption/decryption works correctly on this JVM } @Override protected BlobStore createBlobStore() throws Exception { - return null; + return new EncryptedBlobStoreDecorator(this.delegatedRepository.blobStore(), keyGenerator, secureRandom, masterPassword); } + + @Override + protected void doStart() { + this.delegatedRepository.start(); + super.doStart(); + } + + @Override + protected void doStop() { + super.doStop(); + this.delegatedRepository.stop(); + } + + @Override + protected void doClose() { + super.doClose(); + this.delegatedRepository.close(); + } + + private static class EncryptedBlobStoreDecorator implements BlobStore { + + private final BlobStore delegatedBlobStore; + private final KeyGenerator keyGenerator; + private final SecureRandom secureRandom; + private final char[] masterPassword; + + EncryptedBlobStoreDecorator(BlobStore delegatedBlobStore, KeyGenerator keyGenerator, SecureRandom secureRandom, + char[] masterPassword) { + this.delegatedBlobStore = delegatedBlobStore; + this.keyGenerator = keyGenerator; + this.secureRandom = secureRandom; + this.masterPassword = masterPassword; + } + + @Override + public void close() throws IOException { + this.delegatedBlobStore.close(); + } + + @Override + public BlobContainer blobContainer(BlobPath path) { + BlobPath encryptionMetadataBlobPath = BlobPath.cleanPath(); + encryptionMetadataBlobPath = encryptionMetadataBlobPath.add(ENCRYPTION_METADATA_PREFIX + ENCRYPTION_PROTOCOL_VERSION_NUMBER); + for (String pathComponent : path) { + encryptionMetadataBlobPath = encryptionMetadataBlobPath.add(pathComponent); + } + return new EncryptedBlobContainerDecorator(this.delegatedBlobStore.blobContainer(path), + this.delegatedBlobStore.blobContainer(encryptionMetadataBlobPath), this.keyGenerator, this.secureRandom, + this.masterPassword); + } + } + + private static class EncryptedBlobContainerDecorator implements BlobContainer { + + private final BlobContainer delegatedBlobContainer; + private final BlobContainer encryptionMetadataBlobContainer; + private final KeyGenerator keyGenerator; + private final SecureRandom secureRandom; + private final char[] masterPassword; + + EncryptedBlobContainerDecorator(BlobContainer delegatedBlobContainer, BlobContainer encryptionMetadataBlobContainer, + KeyGenerator keyGenerator, SecureRandom secureRandom, char[] masterPassword) { + this.delegatedBlobContainer = delegatedBlobContainer; + this.encryptionMetadataBlobContainer = encryptionMetadataBlobContainer; + this.masterPassword = masterPassword; + this.secureRandom = secureRandom; + this.keyGenerator = keyGenerator; + } + + @Override + public BlobPath path() { + return this.delegatedBlobContainer.path(); + } + + @Override + public InputStream readBlob(String blobName) throws IOException { + BytesReference encryptedMetadataBytes = Streams.readFully(this.encryptionMetadataBlobContainer.readBlob(blobName)); + final BlobEncryptionMetadata metadata; + try { + metadata = decryptMetadata(BytesReference.toBytes(encryptedMetadataBytes)); + } catch (CMSException e) { + throw new IOException(e); + } + SecretKey dataDecryptionKey = new SecretKeySpec(metadata.getDataEncryptionKeyMaterial(), 0, + metadata.getDataEncryptionKeyMaterial().length, "AES"); + return new DecryptionPacketsInputStream(this.delegatedBlobContainer.readBlob(blobName), dataDecryptionKey, + metadata.getNonce(), metadata.getPacketLengthInBytes()); + } + + @Override + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + SecretKey dataEncryptionKey = keyGenerator.generateKey(); + int nonce = secureRandom.nextInt(); + long encryptedBlobSize = EncryptionPacketsInputStream.getEncryptionSize(blobSize, PACKET_LENGTH_IN_BYTES); + try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, + dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { + this.delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); + } + BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(dataEncryptionKey.getEncoded(), nonce, PACKET_LENGTH_IN_BYTES); + final byte[] encryptedMetadata; + try { + encryptedMetadata = encryptMetadata(metadata); + } catch (CMSException e) { + throw new IOException(e); + } + try (InputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { + this.encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, false); + } + } + + private byte[] encryptMetadata(BlobEncryptionMetadata metadata) throws IOException, CMSException { + CMSEnvelopedDataGenerator envelopedDataGenerator = new CMSEnvelopedDataGenerator(); + PasswordRecipientInfoGenerator passwordRecipientInfoGenerator = new JcePasswordRecipientInfoGenerator(CMSAlgorithm.AES256_GCM + , masterPassword); + envelopedDataGenerator.addRecipientInfoGenerator(passwordRecipientInfoGenerator); + final CMSEnvelopedData envelopedData = envelopedDataGenerator.generate(new CMSTypedData() { + @Override + public ASN1ObjectIdentifier getContentType() { + return CMSObjectIdentifiers.data; + } + + @Override + public void write(OutputStream out) throws IOException, CMSException { + metadata.write(out); + } + + @Override + public Object getContent() { + return metadata; + } + }, new JceCMSContentEncryptorBuilder(CMSAlgorithm.AES256_GCM).build()); + return envelopedData.getEncoded(); + } + + private BlobEncryptionMetadata decryptMetadata(byte[] metadata) throws CMSException, IOException { + final CMSEnvelopedData envelopedData = new CMSEnvelopedData(metadata); + RecipientInformationStore recipients = envelopedData.getRecipientInfos(); + RecipientInformation recipient = recipients.get(new PasswordRecipientId()); + if (recipient == null) { + throw new IllegalArgumentException(); + } + final byte[] decryptedMetadata = recipient.getContent(new JcePasswordEnvelopedRecipient(masterPassword)); + return new BlobEncryptionMetadata(new ByteArrayInputStream(decryptedMetadata)); + } + + @Override + public void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException { + // does not support atomic write + writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); + } + + @Override + public DeleteResult delete() throws IOException { + this.encryptionMetadataBlobContainer.delete(); + return this.delegatedBlobContainer.delete(); + } + + @Override + public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOException { + this.encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); + this.delegatedBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); + } + + @Override + public Map listBlobs() throws IOException { + return this.delegatedBlobContainer.listBlobs(); + } + + @Override + public Map children() throws IOException { + return this.delegatedBlobContainer.children(); + } + + @Override + public Map listBlobsByPrefix(String blobNamePrefix) throws IOException { + return this.delegatedBlobContainer.listBlobsByPrefix(blobNamePrefix); + } + } + } From 0a4cc774e36e9970f9ad8ab17bdc864753511163 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 20 Dec 2019 09:34:54 +0200 Subject: [PATCH 032/142] WIP how to prevent multiple repositories with different passwords --- x-pack/plugin/repository-encrypted/build.gradle | 16 ++++++++++++++++ .../licenses/bc-fips-1.0.1.jar.sha1 | 1 + .../licenses/bc-fips-LICENSE.txt | 12 ++++++++++++ .../licenses/bc-fips-NOTICE.txt | 0 .../encrypted/BlobEncryptionMetadata.java | 6 ++++++ .../encrypted/EncryptedRepository.java | 4 +++- .../encrypted/EncryptedRepositoryPlugin.java | 1 - 7 files changed, 38 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/licenses/bc-fips-1.0.1.jar.sha1 create mode 100644 x-pack/plugin/repository-encrypted/licenses/bc-fips-LICENSE.txt create mode 100644 x-pack/plugin/repository-encrypted/licenses/bc-fips-NOTICE.txt diff --git a/x-pack/plugin/repository-encrypted/build.gradle b/x-pack/plugin/repository-encrypted/build.gradle index b216fd4de1810..08e04070d926c 100644 --- a/x-pack/plugin/repository-encrypted/build.gradle +++ b/x-pack/plugin/repository-encrypted/build.gradle @@ -8,7 +8,23 @@ esplugin { extendedPlugins = ['x-pack-core'] } +thirdPartyAudit { + ignoreViolations ( + 'org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider$CoreSecureRandom', + 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF', + 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$BaseTLSKeyGeneratorSpi', + 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator', + 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator$2', + 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator', + 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator$2', + 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSPRFKeyGenerator', + 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator', + 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator$2', + ) +} + dependencies { + compile "org.bouncycastle:bc-fips:1.0.1" compile "org.bouncycastle:bcpkix-fips:1.0.3" } diff --git a/x-pack/plugin/repository-encrypted/licenses/bc-fips-1.0.1.jar.sha1 b/x-pack/plugin/repository-encrypted/licenses/bc-fips-1.0.1.jar.sha1 new file mode 100644 index 0000000000000..c0612895533ca --- /dev/null +++ b/x-pack/plugin/repository-encrypted/licenses/bc-fips-1.0.1.jar.sha1 @@ -0,0 +1 @@ +ed8dd3144761eaa33b9c56f5e2bef85f1b731d6f diff --git a/x-pack/plugin/repository-encrypted/licenses/bc-fips-LICENSE.txt b/x-pack/plugin/repository-encrypted/licenses/bc-fips-LICENSE.txt new file mode 100644 index 0000000000000..66ba32c7e58ca --- /dev/null +++ b/x-pack/plugin/repository-encrypted/licenses/bc-fips-LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) 2000 - 2019 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/x-pack/plugin/repository-encrypted/licenses/bc-fips-NOTICE.txt b/x-pack/plugin/repository-encrypted/licenses/bc-fips-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java index d5ab66c41a2c0..95e7a471d5d44 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + package org.elasticsearch.repositories.encrypted; import org.elasticsearch.Version; diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index aab71a0e8c489..984f6dec36ee2 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -6,6 +6,8 @@ package org.elasticsearch.repositories.encrypted; +import org.bouncycastle.asn1.ASN1ObjectIdentifier; +import org.bouncycastle.asn1.cms.CMSObjectIdentifiers; import org.bouncycastle.cms.CMSAlgorithm; import org.bouncycastle.cms.CMSEnvelopedData; import org.bouncycastle.cms.CMSEnvelopedDataGenerator; @@ -43,6 +45,7 @@ import java.util.Map; public class EncryptedRepository extends BlobStoreRepository { + static final int GCM_TAG_SIZE_IN_BYTES = 16; static final int GCM_IV_SIZE_IN_BYTES = 12; static final int AES_BLOCK_SIZE_IN_BYTES = 128; @@ -68,7 +71,6 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry this.keyGenerator = KeyGenerator.getInstance("AES"); this.keyGenerator.init(256, SecureRandom.getInstance("SHA1PRNG")); this.secureRandom = SecureRandom.getInstance("SHA1PRNG"); - // TODO run self-test to make sure encryption/decryption works correctly on this JVM } @Override diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 52431cf3a3ff2..5b49fa2a5ac9f 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; From a0751c6df95ca3dd3353f7c9c13db8ab8ff5a124 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 23 Dec 2019 03:33:00 -0500 Subject: [PATCH 033/142] Update x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java Co-Authored-By: Tim Vernum --- .../repositories/encrypted/ChainingInputStream.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index 57fa05ade4c52..35c84ff14c309 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -19,7 +19,8 @@ *

* The implementing subclass provides the component input streams by implementing the * {@link #nextComponent(InputStream)} method. This method receives the instance of the - * current input stream, which has been exhausted, and must return the next input stream. + * current input stream, which has been exhausted, and must return the next input stream, + or {@code null} if there are no more component streams. * The {@code ChainingInputStream} assumes ownership of the newly generated component input * stream, i.e. they should not be used by other callers and they will be closed when they * are exhausted or when the {@code ChainingInputStream} is closed. From 92e177fca29d87a9f78083d16161ff4db1b5c880 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 23 Dec 2019 03:56:11 -0500 Subject: [PATCH 034/142] Update x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java Co-Authored-By: Tim Vernum --- .../repositories/encrypted/ChainingInputStream.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index 35c84ff14c309..afc6c2c9acb5a 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -33,7 +33,8 @@ * The {@code close} call will close the current component input stream and any subsequent {@code read}, * {@code skip}, {@code available} and {@code reset} calls will throw {@code IOException}s. *

- * The {@code ChainingInputStream} is similar in purpose to the {@link java.io.SequenceInputStream}. + * The {@code ChainingInputStream} is similar in purpose to the {@link java.io.SequenceInputStream}, + with the addition of `mark` / `reset` support. *

* This is NOT thread-safe, multiple threads sharing a single instance must synchronize access. */ From 5d9321abf4f3aab1e20cb661863972bdc0edad80 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 23 Dec 2019 03:58:51 -0500 Subject: [PATCH 035/142] Update x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java Co-Authored-By: Tim Vernum --- .../repositories/encrypted/EncryptionPacketsInputStream.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 5c5c26d59f0ab..d5ae530bbbbb5 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -33,7 +33,7 @@ * the resulting stream. *

* The packets are encrypted using the same {@link SecretKey} but using a different initialization - * vector. The IV is 12 bytes wide and it's comprised of an integer {@code nonce}, the same for + * vector. The IV is 12 bytes wide and is comprised of a 4-byte integer {@code nonce}, the same for * every packet in a stream, but which MUST not otherwise be repeated for the same {@code SecretKey} * across other streams, and a monotonically increasing long counter. When assembling the resulting * stream, the IV is prepended to the corresponding packet's ciphertext. From c2314869bb37c58c5b8511391bed7022d4e77842 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 25 Dec 2019 15:47:36 +0200 Subject: [PATCH 036/142] Tim's review WIP --- .../DecryptionPacketsInputStream.java | 31 ++++++++---- .../encrypted/EncryptedRepository.java | 4 +- .../EncryptionPacketsInputStream.java | 50 +++++++++++++------ .../DecryptionPacketsInputStreamTests.java | 14 +++--- .../EncryptionPacketsInputStreamTests.java | 24 ++++----- 5 files changed, 76 insertions(+), 47 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index 48b36465cc58b..a5644c1fee496 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -56,13 +56,22 @@ public final class DecryptionPacketsInputStream extends ChainingInputStream { private boolean hasNext; private long counter; - public static long getDecryptionSize(long size, int packetLength) { - long encryptedPacketLength = packetLength + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES + EncryptedRepository.GCM_IV_SIZE_IN_BYTES; - long completePackets = size / encryptedPacketLength; + /** + * Computes and returns the length of the plaintext given the {@code ciphertextLength} and the {@code packetLength} + * used during encryption. + * Each ciphertext packet is prepended by the Initilization Vector and appended the Authentication Tag. + * Decryption is 1:1, and the ciphertext is not padded, but stripping away the IV and the AT amounts to a shorter + * plaintext. + * + * @see EncryptionPacketsInputStream#getEncryptionLength(long, int) + */ + public static long getDecryptionLength(long ciphertextLength, int packetLength) { + long encryptedPacketLength = packetLength + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES; + long completePackets = ciphertextLength / encryptedPacketLength; long decryptedSize = completePackets * packetLength; - if (size % encryptedPacketLength != 0) { - decryptedSize += (size % encryptedPacketLength) - EncryptedRepository.GCM_IV_SIZE_IN_BYTES - - EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + if (ciphertextLength % encryptedPacketLength != 0) { + decryptedSize += (ciphertextLength % encryptedPacketLength) - EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + - EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; } return decryptedSize; } @@ -75,8 +84,8 @@ public DecryptionPacketsInputStream(InputStream source, SecretKey secretKey, int throw new IllegalArgumentException("Invalid packet length [" + packetLength + "]"); } this.packetLength = packetLength; - this.packet = new byte[packetLength + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES]; - this.iv = new byte[EncryptedRepository.GCM_IV_SIZE_IN_BYTES]; + this.packet = new byte[packetLength + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES]; + this.iv = new byte[EncryptedRepository.GCM_IV_LENGTH_IN_BYTES]; this.hasNext = true; this.counter = EncryptedRepository.PACKET_START_COUNTER; } @@ -90,7 +99,7 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { return null; } PrefixInputStream packetInputStream = new PrefixInputStream(source, - packetLength + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES, + packetLength + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES, false); int currentPacketLength = decrypt(packetInputStream); // only the last packet is shorter, so this must be the last packet @@ -123,7 +132,7 @@ private int decrypt(PrefixInputStream packetInputStream) throws IOException { throw new IOException("Invalid packet IV"); } int packetLength = packetInputStream.read(packet); - if (packetLength < EncryptedRepository.GCM_TAG_SIZE_IN_BYTES) { + if (packetLength < EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES) { throw new IOException("Error while reading the packet"); } Cipher packetCipher = getPacketDecryptionCipher(iv); @@ -136,7 +145,7 @@ private int decrypt(PrefixInputStream packetInputStream) throws IOException { } private Cipher getPacketDecryptionCipher(byte[] packetIv) throws IOException { - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_SIZE_IN_BYTES * Byte.SIZE, packetIv); + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, packetIv); try { Cipher packetCipher = Cipher.getInstance(EncryptedRepository.GCM_ENCRYPTION_SCHEME); packetCipher.init(Cipher.DECRYPT_MODE, secretKey, gcmParameterSpec); diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 7ca6748e7783e..4a6b2fa801f08 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -7,8 +7,8 @@ package org.elasticsearch.repositories.encrypted; public class EncryptedRepository { - static final int GCM_TAG_SIZE_IN_BYTES = 16; - static final int GCM_IV_SIZE_IN_BYTES = 12; + static final int GCM_TAG_LENGTH_IN_BYTES = 16; + static final int GCM_IV_LENGTH_IN_BYTES = 12; static final int AES_BLOCK_SIZE_IN_BYTES = 128; static final String GCM_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; static final long PACKET_START_COUNTER = Long.MIN_VALUE; diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index d5ae530bbbbb5..a8caacf196a8d 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -33,13 +33,22 @@ * the resulting stream. *

* The packets are encrypted using the same {@link SecretKey} but using a different initialization - * vector. The IV is 12 bytes wide and is comprised of a 4-byte integer {@code nonce}, the same for - * every packet in a stream, but which MUST not otherwise be repeated for the same {@code SecretKey} - * across other streams, and a monotonically increasing long counter. When assembling the resulting - * stream, the IV is prepended to the corresponding packet's ciphertext. + * vector. The IV of each packet is 12 bytes wide and is comprised of a 4-byte integer {@code nonce}, + * the same for every packet in the stream, and a monotonically increasing 8-byte integer counter. + * The caller must assure that the same {@code nonce} is not reused for other encrypted streams + * using the same {@code secretKey}. The counter from the IV identifies the position of the packet + * in the encrypted stream, so that packets cannot be reordered without breaking the decryption. + * When assembling the encrypted stream, the IV is prepended to the corresponding packet's ciphertext. *

- * The packet size is preferably a large multiple of the AES block size (128 bytes), but any positive - * integer value smaller than {@link EncryptedRepository#MAX_PACKET_LENGTH_IN_BYTES} is valid. + * The packet length is preferably a large multiple (typically 128) of the AES block size (128 bytes), + * but any positive integer value smaller than {@link EncryptedRepository#MAX_PACKET_LENGTH_IN_BYTES} + * is valid. A larger packet length incurs smaller relative size overhead because the 12 byte wide IV + * and the 16 byte wide authentication tag are constant no matter the packet length. A larger packet + * length also exposes more opportunities for the JIT compilation of the AES encryption loop. But + * {@code mark} will buffer up to packet length bytes, and, more importantly, decryption might + * need to allocate a memory buffer the size of the packet in order to assure that no un-authenticated + * decrypted ciphertext is returned. The decryption procedure is the primary factor that limits the + * packet length. *

* This input stream supports the {@code mark} and {@code reset} operations, but only if the wrapped * stream supports them as well. A {@code mark} call will trigger the memory buffering of the current @@ -56,18 +65,29 @@ */ public final class EncryptionPacketsInputStream extends ChainingInputStream { - protected final InputStream source; // protected for tests private final SecretKey secretKey; private final int packetLength; private final ByteBuffer packetIv; private final int encryptedPacketLength; - protected long counter; // protected for tests - protected Long markCounter; // protected for tests - protected int markSourceOnNextPacket; // protected for tests + final InputStream source; // package-protected for tests + long counter; // package-protected for tests + Long markCounter; // package-protected for tests + int markSourceOnNextPacket; // package-protected for tests - public static long getEncryptionSize(long size, int packetLength) { - return size + (size / packetLength + 1) * (EncryptedRepository.GCM_TAG_SIZE_IN_BYTES + EncryptedRepository.GCM_IV_SIZE_IN_BYTES); + /** + * Computes and returns the length of the ciphertext given the {@code plaintextLength} and the {@code packetLength} + * used during encryption. + * The plaintext is segmented into packets of equal {@code packetLength} length, with the exception of the last + * packet which is shorter and can have a length of {@code 0}. Encryption is packet-wise and is 1:1, with no padding. + * But each encrypted packet is prepended by the Initilization Vector and appended the Authentication Tag, including + * the last packet, so when pieced together will amount to a longer resulting ciphertext. + * + * @see DecryptionPacketsInputStream#getDecryptionLength(long, int) + */ + public static long getEncryptionLength(long plaintextLength, int packetLength) { + return plaintextLength + (plaintextLength / packetLength + 1) * (EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES + + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES); } public EncryptionPacketsInputStream(InputStream source, SecretKey secretKey, int nonce, int packetLength) { @@ -77,9 +97,9 @@ public EncryptionPacketsInputStream(InputStream source, SecretKey secretKey, int throw new IllegalArgumentException("Invalid packet length [" + packetLength + "]"); } this.packetLength = packetLength; - this.packetIv = ByteBuffer.allocate(EncryptedRepository.GCM_IV_SIZE_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); + this.packetIv = ByteBuffer.allocate(EncryptedRepository.GCM_IV_LENGTH_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); this.packetIv.putInt(0, nonce); - this.encryptedPacketLength = packetLength + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + this.encryptedPacketLength = packetLength + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; this.counter = EncryptedRepository.PACKET_START_COUNTER; this.markCounter = null; this.markSourceOnNextPacket = -1; @@ -147,7 +167,7 @@ public void reset() throws IOException { } private static Cipher getPacketEncryptionCipher(SecretKey secretKey, byte[] packetIv) throws IOException { - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_SIZE_IN_BYTES * Byte.SIZE, packetIv); + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, packetIv); try { Cipher packetCipher = Cipher.getInstance(EncryptedRepository.GCM_ENCRYPTION_SCHEME); packetCipher.init(Cipher.ENCRYPT_MODE, secretKey, gcmParameterSpec); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java index 5be99a193ae91..82bd551925833 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java @@ -115,8 +115,8 @@ public void testFailureEncryptAndDecryptAlteredCiphertext() throws Exception { packetLen)) { encryptedBytes = in.readAllBytes(); } - for (int i = EncryptedRepository.GCM_IV_SIZE_IN_BYTES; i < EncryptedRepository.GCM_IV_SIZE_IN_BYTES + len + - EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; i++) { + for (int i = EncryptedRepository.GCM_IV_LENGTH_IN_BYTES; i < EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + len + + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; i++) { for (int j = 0; j < 8; j++) { // flip bit encryptedBytes[i] ^= (1 << j); @@ -146,10 +146,10 @@ public void testFailureEncryptAndDecryptAlteredCiphertextIV() throws Exception { packetLen)) { encryptedBytes = in.readAllBytes(); } - assertThat(encryptedBytes.length, Matchers.is((int) EncryptionPacketsInputStream.getEncryptionSize(len, packetLen))); - int encryptedPacketLen = EncryptedRepository.GCM_IV_SIZE_IN_BYTES + packetLen + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + assertThat(encryptedBytes.length, Matchers.is((int) EncryptionPacketsInputStream.getEncryptionLength(len, packetLen))); + int encryptedPacketLen = EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + packetLen + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; for (int i = 0; i < encryptedBytes.length; i += encryptedPacketLen) { - for (int j = 0; j < EncryptedRepository.GCM_IV_SIZE_IN_BYTES; j++) { + for (int j = 0; j < EncryptedRepository.GCM_IV_LENGTH_IN_BYTES; j++) { for (int k = 0; k < 8; k++) { // flip bit encryptedBytes[i + j] ^= (1 << k); @@ -174,14 +174,14 @@ private void testEncryptAndDecryptSuccess(byte[] plainBytes, SecretKey secretKey packetLen)) { encryptedBytes = in.readAllBytes(); } - assertThat((long) encryptedBytes.length, Matchers.is(EncryptionPacketsInputStream.getEncryptionSize(len, packetLen))); + assertThat((long) encryptedBytes.length, Matchers.is(EncryptionPacketsInputStream.getEncryptionLength(len, packetLen))); byte[] decryptedBytes; try (InputStream in = new DecryptionPacketsInputStream(new ByteArrayInputStream(encryptedBytes), secretKey, nonce, packetLen)) { decryptedBytes = in.readAllBytes(); } assertThat(decryptedBytes.length, Matchers.is(len)); - assertThat((long) decryptedBytes.length, Matchers.is(DecryptionPacketsInputStream.getDecryptionSize(encryptedBytes.length, + assertThat((long) decryptedBytes.length, Matchers.is(DecryptionPacketsInputStream.getDecryptionLength(encryptedBytes.length, packetLen))); for (int i = 0; i < len; i++) { assertThat(decryptedBytes[i], Matchers.is(plainBytes[i])); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java index 3257d6a7598ac..90ec1b677c141 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -126,8 +126,8 @@ public void testMarkAndResetPacketBoundaryNoMock() throws Exception { plaintextOffset, size), secretKey, nonce, packetSize)) { referenceCiphertextArray = encryptionInputStream.readAllBytes(); } - assertThat((long)referenceCiphertextArray.length, Matchers.is(EncryptionPacketsInputStream.getEncryptionSize(size, packetSize))); - int encryptedPacketSize = packetSize + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + assertThat((long)referenceCiphertextArray.length, Matchers.is(EncryptionPacketsInputStream.getEncryptionLength(size, packetSize))); + int encryptedPacketSize = packetSize + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; try (InputStream encryptionInputStream = new EncryptionPacketsInputStream(new ByteArrayInputStream(testPlaintextArray, plaintextOffset, size), secretKey, nonce, packetSize)) { // mark at the beginning @@ -170,7 +170,7 @@ public void testMarkAndResetPacketBoundaryNoMock() throws Exception { public void testMarkResetInsidePacketNoMock() throws Exception { int packetSize = 3 + Randomness.get().nextInt(64); - int encryptedPacketSize = EncryptedRepository.GCM_IV_SIZE_IN_BYTES + packetSize + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + int encryptedPacketSize = EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + packetSize + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; int size = 3 * packetSize + Randomness.get().nextInt(64); byte[] bytes = new byte[size]; Randomness.get().nextBytes(bytes); @@ -216,7 +216,7 @@ public void testMarkResetInsidePacketNoMock() throws Exception { public void testMarkResetAcrossPacketsNoMock() throws Exception { int packetSize = 3 + Randomness.get().nextInt(64); - int encryptedPacketSize = EncryptedRepository.GCM_IV_SIZE_IN_BYTES + packetSize + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + int encryptedPacketSize = EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + packetSize + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; int size = 3 * packetSize + Randomness.get().nextInt(64); byte[] bytes = new byte[size]; Randomness.get().nextBytes(bytes); @@ -286,7 +286,7 @@ public void testMarkAfterResetNoMock() throws Exception { referenceCiphertextArray = encryptionInputStream.readAllBytes(); } int encryptedLen = referenceCiphertextArray.length; - assertThat((long) encryptedLen, Matchers.is(EncryptionPacketsInputStream.getEncryptionSize(plainLen, packetSize))); + assertThat((long) encryptedLen, Matchers.is(EncryptionPacketsInputStream.getEncryptionLength(plainLen, packetSize))); for (int mark1 = 0; mark1 < encryptedLen; mark1++) { for (int offset1 = 0; offset1 < encryptedLen - mark1; offset1++) { int mark2 = Randomness.get().nextInt(encryptedLen - mark1); @@ -378,27 +378,27 @@ public void testReset() throws Exception { } private void testEncryptPacketWise(int size, int packetSize, ReadStrategy readStrategy) throws Exception { - int encryptedPacketSize = packetSize + EncryptedRepository.GCM_IV_SIZE_IN_BYTES + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES; + int encryptedPacketSize = packetSize + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; int plaintextOffset = Randomness.get().nextInt(testPlaintextArray.length - size + 1); int nonce = Randomness.get().nextInt(); long counter = EncryptedRepository.PACKET_START_COUNTER; try (InputStream encryptionInputStream = new EncryptionPacketsInputStream(new ByteArrayInputStream(testPlaintextArray, plaintextOffset, size), secretKey, nonce, packetSize)) { byte[] ciphertextArray = readStrategy.readAll(encryptionInputStream); - assertThat((long)ciphertextArray.length, Matchers.is(EncryptionPacketsInputStream.getEncryptionSize(size, packetSize))); + assertThat((long)ciphertextArray.length, Matchers.is(EncryptionPacketsInputStream.getEncryptionLength(size, packetSize))); for (int ciphertextOffset = 0; ciphertextOffset < ciphertextArray.length; ciphertextOffset += encryptedPacketSize) { ByteBuffer ivBuffer = ByteBuffer.wrap(ciphertextArray, ciphertextOffset, - EncryptedRepository.GCM_IV_SIZE_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); assertThat(ivBuffer.getInt(), Matchers.is(nonce)); assertThat(ivBuffer.getLong(), Matchers.is(counter++)); - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_SIZE_IN_BYTES * Byte.SIZE, + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, Arrays.copyOfRange(ciphertextArray, ciphertextOffset, - ciphertextOffset + EncryptedRepository.GCM_IV_SIZE_IN_BYTES)); + ciphertextOffset + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES)); Cipher packetCipher = Cipher.getInstance(EncryptedRepository.GCM_ENCRYPTION_SCHEME); packetCipher.init(Cipher.DECRYPT_MODE, secretKey, gcmParameterSpec); try (InputStream packetDecryptionInputStream = new CipherInputStream(new ByteArrayInputStream(ciphertextArray, - ciphertextOffset + EncryptedRepository.GCM_IV_SIZE_IN_BYTES, - packetSize + EncryptedRepository.GCM_TAG_SIZE_IN_BYTES), packetCipher)) { + ciphertextOffset + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES, + packetSize + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES), packetCipher)) { byte[] decryptedCiphertext = packetDecryptionInputStream.readAllBytes(); int decryptedPacketSize = size <= packetSize ? size : packetSize; assertThat(decryptedCiphertext.length, Matchers.is(decryptedPacketSize)); From bda96b67c033184888e861744b08c69379e5ae9d Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 25 Dec 2019 19:41:19 +0200 Subject: [PATCH 037/142] ChainingInputStream javadocs --- .../encrypted/ChainingInputStream.java | 177 ++++++++++++++++-- .../DecryptionPacketsInputStream.java | 5 +- .../EncryptionPacketsInputStream.java | 3 +- 3 files changed, 169 insertions(+), 16 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index afc6c2c9acb5a..9ae88c6ee1fa7 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -20,28 +20,34 @@ * The implementing subclass provides the component input streams by implementing the * {@link #nextComponent(InputStream)} method. This method receives the instance of the * current input stream, which has been exhausted, and must return the next input stream, - or {@code null} if there are no more component streams. + * or {@code null} if there are no more component streams. * The {@code ChainingInputStream} assumes ownership of the newly generated component input - * stream, i.e. they should not be used by other callers and they will be closed when they - * are exhausted or when the {@code ChainingInputStream} is closed. + * stream, i.e. components should not be used by other callers and they will be closed + * when they are exhausted or when the {@code ChainingInputStream} is closed. *

* This stream does support {@code mark} and {@code reset} but it expects that the component - * streams also support it. If the component input streams do not support {@code mark} and - * {@code reset}, the implementing subclass must override {@code markSupported} to return - * {@code false}. + * streams also support it. When {@code mark} is invoked on the chaining input stream, the + * call is forwarded to the current input stream component and a reference to that component + * is stored internally. A {@reset} invocation on the chaining input stream will then make the + * stored component the current component and will then call the {@code reset} on it. + * The {@link #nextComponent(InputStream)} method must be able to generate the same components + * anew, starting from the component of the {@code reset} call. + * If the component input streams do not support {@code mark}/{@code reset} or + * {@link #nextComponent(InputStream)} cannot generate the same component multiple times, + * the implementing subclass must override {@link #markSupported()} to return {@code false}. *

* The {@code close} call will close the current component input stream and any subsequent {@code read}, * {@code skip}, {@code available} and {@code reset} calls will throw {@code IOException}s. *

* The {@code ChainingInputStream} is similar in purpose to the {@link java.io.SequenceInputStream}, - with the addition of `mark` / `reset` support. + * with the addition of {@code mark}/{@code reset} support. *

* This is NOT thread-safe, multiple threads sharing a single instance must synchronize access. */ public abstract class ChainingInputStream extends InputStream { /** - * value for the current input stream when there are no subsequent streams left, i.e. when + * value for the current input stream when there are no subsequent streams remaining, i.e. when * {@link #nextComponent(InputStream)} returns {@code null} */ protected static final InputStream EXHAUSTED_MARKER = InputStream.nullInputStream(); // protected for tests @@ -64,11 +70,44 @@ public abstract class ChainingInputStream extends InputStream { /** * This method is responsible for generating the component input streams. * It is passed the current input stream and must return the successive one, - * or {@code null} if the current component is the last one. It is passed the {@code null} value - * at the very start, when no component input stream has yet been obtained. + * or {@code null} if the current component is the last one. + * It is passed the {@code null} value at the very start, when no component + * input stream has yet been generated. + * The successive input stream returns the bytes (during reading) that should + * logically follow the bytes that have been previously returned by the passed-in + * {@code currentComponentIn}; i.e. the first {@code read} call on the next + * component returns the byte logically following the last byte of the previous + * component. + * In order to support {@code mark}/{@code reset} this method must be able + * to generate the successive input stream given any of the previously generated + * ones, i.e. implementors must not assume that the passed-in argument is the + * instance last returned by this method. Therefore, implementors must identify + * the bytes that the passed-in component generated and must return a new + * {@code InputStream} which returns the bytes that logically follow, even if + * the same sequence has been previously returned by another component. + * If this is not possible, and the implementation + * can only generate the component input streams once, it must override + * {@link #nextComponent(InputStream)} to return {@code false}. */ abstract @Nullable InputStream nextComponent(@Nullable InputStream currentComponentIn) throws IOException; + /** + * Reads the next byte of data from this chaining input stream. + * The value byte is returned as an {@code int} in the range + * {@code 0} to {@code 255}. If no byte is available + * because the end of the stream has been reached, the value + * {@code -1} is returned. The end of the chaining input stream + * is reached when the end of the last component stream is reached. + * This method blocks until input data is available (possibly + * asking for the next input stream component), the end of + * the stream is detected, or an exception is thrown. + * + * @return the next byte of data, or {@code -1} if the end of the + * stream is reached. + * @exception IOException if this stream has been closed or + * an I/O error occurs on the current component stream. + * @see ChainingInputStream#read(byte[], int, int) + */ @Override public int read() throws IOException { ensureOpen(); @@ -81,6 +120,34 @@ public int read() throws IOException { return -1; } + + /** + * Reads up to {@code len} bytes of data into an array of bytes from this + * chaining input stream. If {@code len} is zero, then no bytes are read + * and {@code 0} is returned; otherwise, there is an attempt to read at least one byte. + * The {@code read} call is forwarded to the current component input stream. + * If the current component input stream is exhausted, the next one is obtained + * by invoking {@link #nextComponent(InputStream)} and the {@code read} call is + * forwarded to that. If the current component is exhausted + * and there is no subsequent component the value {@code -1} is returned; + * otherwise, at least one byte is read and stored into {@code b}, starting at + * offset {@code off}. + * + * @param b the buffer into which the data is read. + * @param off the start offset in the destination array {@code b} + * @param len the maximum number of bytes read. + * @return the total number of bytes read into the buffer, or + * {@code -1} if there is no more data because the current + * input stream component is exhausted and there is no next one + * {@link #nextComponent(InputStream)} retuns {@code null}. + * @throws NullPointerException If {@code b} is {@code null}. + * @throws IndexOutOfBoundsException If {@code off} is negative, + * {@code len} is negative, or {@code len} is greater than + * {@code b.length - off} + * @throws IOException if this stream has been closed or an I/O error + * occurs on the current component input stream. + * @see java.io.InputStream#read(byte[], int, int) + */ @Override public int read(byte[] b, int off, int len) throws IOException { ensureOpen(); @@ -97,6 +164,22 @@ public int read(byte[] b, int off, int len) throws IOException { return -1; } + /** + * Skips over and discards {@code n} bytes of data from the + * chaining input stream. If {@code n} is negative or {@code 0}, + * the value {@code 0} is returned and no bytes are skipped. + * The {@code skip} method will skip exactly {@code n} bytes, + * possibly generating the next component input streams and + * recurring to {@code read} if {@code skip} on the current + * component does not make progress (returns 0). + * The actual number of bytes skipped, which can be smaller than + * {@code n}, is returned. + * + * @param n the number of bytes to be skipped. + * @return the actual number of bytes skipped. + * @throws IOException if this stream is closed, or if + * {@code currentComponentIn.skip(n)} throws an IOException + */ @Override public long skip(long n) throws IOException { ensureOpen(); @@ -123,6 +206,24 @@ public long skip(long n) throws IOException { return n - bytesRemaining; } + /** + * Returns an estimate of the number of bytes that can be read (or + * skipped over) from this chaining input stream without blocking by the next + * caller of a method for this stream. The next caller might be + * the same thread or another thread. A single read or skip of this + * many bytes will not block, but may read or skip fewer bytes. + *

+ * This simply forwards the {@code available} call to the current + * component input stream, so the returned value is a conservative + * lower bound of the available byte count; i.e. it's possible that + * subsequent component streams have available bytes which are not + * accounted for in the return value of this method. + * + * @return an estimate of the number of bytes that can be read (or skipped + * over) from this input stream without blocking. + * @exception IOException if this stream is closed or if + * {@code currentIn.available()} throws an IOException + */ @Override public int available() throws IOException { ensureOpen(); @@ -132,11 +233,35 @@ public int available() throws IOException { return currentIn.available(); } + /** + * Tests if this chaining input stream supports the {@code mark} and + * {@code reset} methods. By default this returns {@code true} but there + * are some requirements for how components are generated (see + * {@link #nextComponent(InputStream)}), in which case, if the implementer + * cannot satisfy them, it should override this to return {@code false}. + */ @Override public boolean markSupported() { return true; } + /** + * Marks the current position in this input stream. A subsequent call to + * the {@code reset} method repositions this stream at the last marked + * position so that subsequent reads re-read the same bytes. + *

+ * The {@code readlimit} arguments tells this input stream to + * allow that many bytes to be read before the mark position can be + * invalidated. + *

+ * The {@code mark} call is forwarded to the current component input + * stream and a reference to it is stored internally. + * + * @param readlimit the maximum limit of bytes that can be read before + * the mark position can be invalidated. + * @see BufferOnMarkInputStream#reset() + * @see java.io.InputStream#mark(int) + */ @Override public void mark(int readlimit) { if (markSupported() && false == closed) { @@ -156,6 +281,26 @@ public void mark(int readlimit) { } } + /** + * Repositions this stream to the position at the time the + * {@code mark} method was last called on this chaining input stream. + * Subsequent read calls will return the same bytes in the same + * order since the point of the {@code mark} call. Naturally, + * {@code mark} can be invoked at any moment, even after a + * {@code reset}. + *

+ * The previously stored reference to the current component during the + * {@code mark} invocation is made the new current component and then + * the {@code reset} call is forwarded to it. The next internal call to + * {@link #nextComponent(InputStream)} will use this component, so + * the {@link #nextComponent(InputStream)} must not assume monotonous + * arguments. + * + * @throws IOException if the stream has been closed or the number of bytes + * read since the last mark call exceeded the + * {@code readLimit} parameter + * @see java.io.InputStream#mark(int) + */ @Override public void reset() throws IOException { ensureOpen(); @@ -175,6 +320,12 @@ public void reset() throws IOException { } } + /** + * Closes this chaining input stream, closing the current component stream as well + * as any internally stored reference of a component during a {@code mark} call. + * + * @exception IOException if an I/O error occurs while closing the current or the marked stream. + */ @Override public void close() throws IOException { if (false == closed) { @@ -203,13 +354,13 @@ private boolean nextIn() throws IOException { currentIn.close(); } currentIn = nextComponent(currentIn); - if (markSupported() && currentIn != null && false == currentIn.markSupported()) { - throw new IllegalStateException("Component input stream must support mark"); - } if (currentIn == null) { currentIn = EXHAUSTED_MARKER; return false; } + if (markSupported() && false == currentIn.markSupported()) { + throw new IllegalStateException("Component input stream must support mark"); + } return true; } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index a5644c1fee496..a0a6d983b1de0 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -61,12 +61,13 @@ public final class DecryptionPacketsInputStream extends ChainingInputStream { * used during encryption. * Each ciphertext packet is prepended by the Initilization Vector and appended the Authentication Tag. * Decryption is 1:1, and the ciphertext is not padded, but stripping away the IV and the AT amounts to a shorter - * plaintext. + * plaintext compared to the ciphertext. * * @see EncryptionPacketsInputStream#getEncryptionLength(long, int) */ public static long getDecryptionLength(long ciphertextLength, int packetLength) { - long encryptedPacketLength = packetLength + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES; + long encryptedPacketLength = + packetLength + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES; long completePackets = ciphertextLength / encryptedPacketLength; long decryptedSize = completePackets * packetLength; if (ciphertextLength % encryptedPacketLength != 0) { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index a8caacf196a8d..e6292277db01f 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -99,7 +99,8 @@ public EncryptionPacketsInputStream(InputStream source, SecretKey secretKey, int this.packetLength = packetLength; this.packetIv = ByteBuffer.allocate(EncryptedRepository.GCM_IV_LENGTH_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); this.packetIv.putInt(0, nonce); - this.encryptedPacketLength = packetLength + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; + this.encryptedPacketLength = + packetLength + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; this.counter = EncryptedRepository.PACKET_START_COUNTER; this.markCounter = null; this.markSourceOnNextPacket = -1; From cb6006d2062f1c80898b4ec744c873144068c3b3 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 25 Dec 2019 20:14:31 +0200 Subject: [PATCH 038/142] Logging on component close --- .../repositories/encrypted/ChainingInputStream.java | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index 9ae88c6ee1fa7..fd5d64bd342de 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -8,6 +8,9 @@ import java.io.IOException; import java.io.InputStream; import java.util.Objects; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Nullable; /** @@ -46,6 +49,8 @@ */ public abstract class ChainingInputStream extends InputStream { + private static final Logger LOGGER = LogManager.getLogger(ChainingInputStream.class); + /** * value for the current input stream when there are no subsequent streams remaining, i.e. when * {@link #nextComponent(InputStream)} returns {@code null} @@ -216,8 +221,8 @@ public long skip(long n) throws IOException { * This simply forwards the {@code available} call to the current * component input stream, so the returned value is a conservative * lower bound of the available byte count; i.e. it's possible that - * subsequent component streams have available bytes which are not - * accounted for in the return value of this method. + * subsequent component streams have available bytes but this method + * only returns the available bytes of the current component. * * @return an estimate of the number of bytes that can be read (or skipped * over) from this input stream without blocking. @@ -271,6 +276,7 @@ public void mark(int readlimit) { markIn.close(); } catch (IOException e) { // an IOException on a component input stream close is not important + LOGGER.info("IOException while closing a marked component input stream during a mark", e); } } // stores the current input stream to be reused in case of a reset @@ -312,6 +318,7 @@ public void reset() throws IOException { currentIn.close(); } catch (IOException e) { // an IOException on a component input stream close is not important + LOGGER.info("IOException while closing the current component input stream during a reset", e); } } currentIn = markIn; From 65f0adb67a71bf89a8ecf1f136bff13a5b6032dc Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 25 Dec 2019 20:35:38 +0200 Subject: [PATCH 039/142] Nit --- .../repositories/encrypted/ChainingInputStream.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index fd5d64bd342de..2a406ca793dd4 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -31,7 +31,7 @@ * This stream does support {@code mark} and {@code reset} but it expects that the component * streams also support it. When {@code mark} is invoked on the chaining input stream, the * call is forwarded to the current input stream component and a reference to that component - * is stored internally. A {@reset} invocation on the chaining input stream will then make the + * is stored internally. A {@code reset} invocation on the chaining input stream will then make the * stored component the current component and will then call the {@code reset} on it. * The {@link #nextComponent(InputStream)} method must be able to generate the same components * anew, starting from the component of the {@code reset} call. From b40d9992d6ca2cd36f0e9a5eb4f72bab20468c2f Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 26 Dec 2019 14:38:03 +0200 Subject: [PATCH 040/142] No Randomness in ChainingInputStreamTests --- .../encrypted/ChainingInputStreamTests.java | 91 +++++++++---------- 1 file changed, 41 insertions(+), 50 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java index cf4620bd6e865..9413a11d00b48 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.repositories.encrypted; -import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -35,13 +34,11 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { return null; } }; - byte[] b1 = new byte[1 + Randomness.get().nextInt(16)]; - Randomness.get().nextBytes(b1); + byte[] b1 = randomByteArrayOfLength(randomIntBetween(1, 16)); test.currentIn = new ByteArrayInputStream(b1); - long nSkip = test.skip(b1.length + 1 + Randomness.get().nextInt(16)); + long nSkip = test.skip(b1.length + randomIntBetween(1, 16)); assertThat(nSkip, Matchers.is((long)b1.length)); - byte[] b2 = new byte[1 + Randomness.get().nextInt(16)]; - Randomness.get().nextBytes(b2); + byte[] b2 = randomByteArrayOfLength(randomIntBetween(1, 16)); test = new ChainingInputStream() { boolean second = false; @Override @@ -55,7 +52,7 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { } }; test.currentIn = new ByteArrayInputStream(b1); - long skipArg = b1.length + 1 + Randomness.get().nextInt(b2.length); + long skipArg = b1.length + randomIntBetween(1, b2.length); nSkip = test.skip(skipArg); assertThat(nSkip, Matchers.is(skipArg)); byte[] rest = test.readAllBytes(); @@ -69,13 +66,13 @@ public void testEmptyChain() throws Exception { ChainingInputStream emptyStream = newEmptyStream(); assertThat(emptyStream.read(), Matchers.is(-1)); emptyStream = newEmptyStream(); - byte[] b = new byte[1 + Randomness.get().nextInt(8)]; - int off = Randomness.get().nextInt(b.length); + byte[] b = randomByteArrayOfLength(randomIntBetween(1, 8)); + int off = randomInt(b.length - 1); assertThat(emptyStream.read(b, off, b.length - off), Matchers.is(-1)); emptyStream = newEmptyStream(); assertThat(emptyStream.available(), Matchers.is(0)); emptyStream = newEmptyStream(); - assertThat(emptyStream.skip(1 + Randomness.get().nextInt(32)), Matchers.is(0L)); + assertThat(emptyStream.skip(randomIntBetween(1, 32)), Matchers.is(0L)); } public void testClose() throws Exception { @@ -87,16 +84,16 @@ public void testClose() throws Exception { assertThat(e.getMessage(), Matchers.is("Stream is closed")); ChainingInputStream test2 = newEmptyStream(); test2.close(); - byte[] b = new byte[2 + Randomness.get().nextInt(8)]; - int off = Randomness.get().nextInt(b.length - 1); + byte[] b = randomByteArrayOfLength(randomIntBetween(2, 9)); + int off = randomInt(b.length - 2); e = expectThrows(IOException.class, () -> { - test2.read(b, off, Randomness.get().nextInt(b.length - off)); + test2.read(b, off, randomInt(b.length - off - 1)); }); assertThat(e.getMessage(), Matchers.is("Stream is closed")); ChainingInputStream test3 = newEmptyStream(); test3.close(); e = expectThrows(IOException.class, () -> { - test3.skip(Randomness.get().nextInt(32)); + test3.skip(randomInt(31)); }); ChainingInputStream test4 = newEmptyStream(); test4.close(); @@ -110,7 +107,7 @@ public void testClose() throws Exception { }); ChainingInputStream test6 = newEmptyStream(); test6.close(); - test6.mark(Randomness.get().nextInt()); + test6.mark(randomInt()); } public void testHeadComponentArgumentIsNull() throws Exception { @@ -130,11 +127,10 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { } public void testChaining() throws Exception { - int componentCount = 2 + Randomness.get().nextInt(8); + int componentCount = randomIntBetween(2, 9); TestInputStream[] sourceComponents = new TestInputStream[componentCount]; for (int i = 0; i < sourceComponents.length; i++) { - byte[] b = new byte[Randomness.get().nextInt(2)]; - Randomness.get().nextBytes(b); + byte[] b = randomByteArrayOfLength(randomInt(1)); sourceComponents[i] = new TestInputStream(b); } ChainingInputStream test = new ChainingInputStream() { @@ -214,12 +210,10 @@ public void testEmptyInputStreamComponents() throws Exception { public void testNullComponentTerminatesChain() throws Exception { TestInputStream[] sourceComponents = new TestInputStream[3]; TestInputStream[] chainComponents = new TestInputStream[5]; - byte[] b1 = new byte[1 + Randomness.get().nextInt(2)]; - Randomness.get().nextBytes(b1); + byte[] b1 = randomByteArrayOfLength(randomIntBetween(1, 2)); sourceComponents[0] = new TestInputStream(b1); sourceComponents[1] = null; - byte[] b2 = new byte[1 + Randomness.get().nextInt(2)]; - Randomness.get().nextBytes(b2); + byte[] b2 = randomByteArrayOfLength(randomIntBetween(1, 2)); sourceComponents[2] = new TestInputStream(b2); ChainingInputStream test = new ChainingInputStream() { int i = 0; @@ -261,7 +255,7 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { }; test.currentIn = mockCurrentIn; // verify "byte-wise read" is proxied to the current component stream - when(mockCurrentIn.read()).thenAnswer(invocationOnMock -> Randomness.get().nextInt(256)); + when(mockCurrentIn.read()).thenReturn(randomInt(255)); test.read(); verify(mockCurrentIn).read(); // verify "array read" is proxied to the current component stream @@ -272,17 +266,17 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { return 0; } else { // partial read return - int bytesCount = 1 + Randomness.get().nextInt(len); + int bytesCount = randomIntBetween(1, len); return bytesCount; } }); - byte[] b = new byte[2 + Randomness.get().nextInt(32)]; - int len = 1 + Randomness.get().nextInt(b.length - 1); - int offset = Randomness.get().nextInt(b.length - len); + byte[] b = randomByteArrayOfLength(randomIntBetween(2, 33)); + int len = randomIntBetween(1, b.length - 1); + int offset = randomInt(b.length - len - 1); test.read(b, offset, len); verify(mockCurrentIn).read(Mockito.eq(b), Mockito.eq(offset), Mockito.eq(len)); // verify "skip" is proxied to the current component stream - long skipCount = 1 + Randomness.get().nextInt(3); + long skipCount = randomIntBetween(1, 3); test.skip(skipCount); verify(mockCurrentIn).skip(Mockito.eq(skipCount)); // verify "available" is proxied to the current component stream @@ -292,7 +286,7 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { public void testEmptyReadAsksForNext() throws Exception { InputStream mockCurrentIn = mock(InputStream.class); - when(mockCurrentIn.markSupported()).thenAnswer(invocationOnMock -> true); + when(mockCurrentIn.markSupported()).thenReturn(true); ChainingInputStream test = new ChainingInputStream() { @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { @@ -300,7 +294,7 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { } }; test.currentIn = InputStream.nullInputStream(); - when(mockCurrentIn.read()).thenAnswer(invocationOnMock -> Randomness.get().nextInt(256)); + when(mockCurrentIn.read()).thenReturn(randomInt(255)); test.read(); verify(mockCurrentIn).read(); // test "array read" @@ -311,21 +305,20 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { if (len == 0) { return 0; } else { - int bytesCount = 1 + Randomness.get().nextInt(len); + int bytesCount = randomIntBetween(1, len); return bytesCount; } }); - byte[] b = new byte[2 + Randomness.get().nextInt(32)]; - int len = 1 + Randomness.get().nextInt(b.length - 1); - int offset = Randomness.get().nextInt(b.length - len); + byte[] b = new byte[randomIntBetween(2, 33)]; + int len = randomIntBetween(1, b.length - 1); + int offset = randomInt(b.length - len - 1); test.read(b, offset, len); verify(mockCurrentIn).read(Mockito.eq(b), Mockito.eq(offset), Mockito.eq(len)); } public void testReadAll() throws Exception { - byte[] b = new byte[2 + Randomness.get().nextInt(32)]; - Randomness.get().nextBytes(b); - int splitIdx = Randomness.get().nextInt(b.length - 1); + byte[] b = randomByteArrayOfLength(randomIntBetween(2, 33)); + int splitIdx = randomInt(b.length - 2); ByteArrayInputStream first = new ByteArrayInputStream(b, 0, splitIdx + 1); ByteArrayInputStream second = new ByteArrayInputStream(b, splitIdx + 1, b.length - splitIdx - 1); ChainingInputStream test = new ChainingInputStream() { @@ -351,7 +344,7 @@ InputStream nextComponent(InputStream currentElementIn) throws IOException { public void testMark() throws Exception { InputStream mockIn = mock(InputStream.class); - when(mockIn.markSupported()).thenAnswer(invocationOnMock -> true); + when(mockIn.markSupported()).thenReturn(true); ChainingInputStream test = new ChainingInputStream() { @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { @@ -362,23 +355,23 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { } } }; - int readLimit = Randomness.get().nextInt(64); + int readLimit = randomInt(63); // mark at the beginning test.mark(readLimit); assertThat(test.markIn, Matchers.nullValue()); // mark intermediary position - when(mockIn.read()).thenAnswer(invocationOnMock -> Randomness.get().nextInt(256)); + when(mockIn.read()).thenReturn(randomInt(255)); test.read(); assertThat(test.currentIn, Matchers.is(mockIn)); test.mark(readLimit); assertThat(test.markIn, Matchers.is(mockIn)); verify(mockIn).mark(Mockito.eq(readLimit)); // mark end position - when(mockIn.read()).thenAnswer(invocationOnMock -> -1); + when(mockIn.read()).thenReturn(-1); test.read(); assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); verify(mockIn, never()).close(); - readLimit = Randomness.get().nextInt(64); + readLimit = randomInt(63); test.mark(readLimit); verify(mockIn).close(); assertThat(test.markIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); @@ -386,9 +379,9 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { public void testReset() throws Exception { InputStream mockMarkIn = mock(InputStream.class); - when(mockMarkIn.markSupported()).thenAnswer(invocationOnMock -> true); + when(mockMarkIn.markSupported()).thenReturn(true); InputStream mockCurrentIn = mock(InputStream.class); - when(mockCurrentIn.markSupported()).thenAnswer(invocationOnMock -> true); + when(mockCurrentIn.markSupported()).thenReturn(true); ChainingInputStream test = new ChainingInputStream() { @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { @@ -401,15 +394,14 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { assertThat(test.currentIn, Matchers.is(mockMarkIn)); assertThat(test.markIn, Matchers.is(mockMarkIn)); verify(mockMarkIn).reset(); - when(mockCurrentIn.read()).thenAnswer(invocationOnMock -> -1); + when(mockCurrentIn.read()).thenReturn(-1); verify(mockMarkIn, never()).close(); verify(mockCurrentIn).close(); } public void testMarkAfterReset() throws Exception { - int len = 8 + Randomness.get().nextInt(8); - byte[] b = new byte[len]; - Randomness.get().nextBytes(b); + int len = randomIntBetween(8, 15); + byte[] b = randomByteArrayOfLength(len); for (int p = 0; p <= len; p++) { for (int mark1 = 0; mark1 < len; mark1++) { for (int offset1 = 0; offset1 < len - mark1; offset1++) { @@ -484,8 +476,7 @@ private Tuple testEmptyComponentsInChain(int compon if (emptyComponentIndices.contains(i)) { sourceComponents[i] = InputStream.nullInputStream(); } else { - byte[] b = new byte[1 + Randomness.get().nextInt(8)]; - Randomness.get().nextBytes(b); + byte[] b = randomByteArrayOfLength(randomIntBetween(1, 8)); sourceComponents[i] = new ByteArrayInputStream(b); result = concatenateArrays(result, b); } From cb7bc1c80b7c18a6a2d4a9ebbd47a6a0feb59f0b Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 26 Dec 2019 07:54:49 -0500 Subject: [PATCH 041/142] Update x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java Co-Authored-By: Tim Vernum --- .../repositories/encrypted/ChainingInputStreamTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java index 9413a11d00b48..8f3d754ebfee0 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -110,7 +110,7 @@ public void testClose() throws Exception { test6.mark(randomInt()); } - public void testHeadComponentArgumentIsNull() throws Exception { + public void testInitialComponentArgumentIsNull() throws Exception { AtomicReference headInputStream = new AtomicReference<>(); AtomicBoolean nextCalled = new AtomicBoolean(false); ChainingInputStream test = new ChainingInputStream() { From 7acaf5432242a4e19027fceb767fa85b5f844afa Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 27 Dec 2019 10:19:03 +0200 Subject: [PATCH 042/142] Tim's review WIP before mark/reset review --- .../encrypted/ChainingInputStreamTests.java | 167 ++++++++++++------ 1 file changed, 111 insertions(+), 56 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java index 8f3d754ebfee0..79bf36492ef11 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -9,13 +9,14 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import org.junit.Assert; import org.mockito.Mockito; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Iterator; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -27,62 +28,88 @@ public class ChainingInputStreamTests extends ESTestCase { - public void testSkipAcrossComponents() throws Exception { + public void testSkipWithinComponent() throws Exception { + byte[] b1 = randomByteArrayOfLength(randomIntBetween(2, 16)); ChainingInputStream test = new ChainingInputStream() { @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { - return null; + if (currentComponentIn == null) { + return new ByteArrayInputStream(b1); + } else { + return null; + } } }; + int prefix = randomIntBetween(0, b1.length - 2); + test.readNBytes(prefix); + // skip less bytes than the component has + int nSkip1 = randomInt(b1.length - prefix); + long nSkip = test.skip(nSkip1); + assertThat((int)nSkip, Matchers.is(nSkip1)); + int nSkip2 = randomInt(b1.length - prefix - nSkip1 + randomIntBetween(1, 8)); + // skip more bytes than the component has + nSkip = test.skip(nSkip2); + assertThat((int) nSkip, Matchers.is(b1.length - prefix - nSkip1)); + } + + public void testSkipAcrossComponents() throws Exception { byte[] b1 = randomByteArrayOfLength(randomIntBetween(1, 16)); - test.currentIn = new ByteArrayInputStream(b1); - long nSkip = test.skip(b1.length + randomIntBetween(1, 16)); - assertThat(nSkip, Matchers.is((long)b1.length)); byte[] b2 = randomByteArrayOfLength(randomIntBetween(1, 16)); - test = new ChainingInputStream() { - boolean second = false; + ChainingInputStream test = new ChainingInputStream() { + final Iterator iter = List.of(new ByteArrayInputStream(b1), new ByteArrayInputStream(b2)).iterator(); + @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { - if (false == second) { - second = true; - return new ByteArrayInputStream(b2); + if (iter.hasNext()) { + return iter.next(); } else { return null; } } }; - test.currentIn = new ByteArrayInputStream(b1); long skipArg = b1.length + randomIntBetween(1, b2.length); - nSkip = test.skip(skipArg); + long nSkip = test.skip(skipArg); assertThat(nSkip, Matchers.is(skipArg)); byte[] rest = test.readAllBytes(); - assertThat((long)rest.length, Matchers.is(b1.length + b2.length - nSkip)); + assertThat((long) rest.length, Matchers.is(b1.length + b2.length - nSkip)); for (int i = rest.length - 1; i >= 0; i--) { - assertThat(rest[i], Matchers.is(b2[i + (int)nSkip - b1.length])); + assertThat(rest[i], Matchers.is(b2[i + (int) nSkip - b1.length])); } } public void testEmptyChain() throws Exception { - ChainingInputStream emptyStream = newEmptyStream(); + // chain is empty because it doesn't have any components + ChainingInputStream emptyStream = newEmptyStream(false); assertThat(emptyStream.read(), Matchers.is(-1)); - emptyStream = newEmptyStream(); + emptyStream = newEmptyStream(false); byte[] b = randomByteArrayOfLength(randomIntBetween(1, 8)); int off = randomInt(b.length - 1); assertThat(emptyStream.read(b, off, b.length - off), Matchers.is(-1)); - emptyStream = newEmptyStream(); + emptyStream = newEmptyStream(false); assertThat(emptyStream.available(), Matchers.is(0)); - emptyStream = newEmptyStream(); + emptyStream = newEmptyStream(false); + assertThat(emptyStream.skip(randomIntBetween(1, 32)), Matchers.is(0L)); + // chain is empty because all its components are empty + emptyStream = newEmptyStream(true); + assertThat(emptyStream.read(), Matchers.is(-1)); + emptyStream = newEmptyStream(true); + b = randomByteArrayOfLength(randomIntBetween(1, 8)); + off = randomInt(b.length - 1); + assertThat(emptyStream.read(b, off, b.length - off), Matchers.is(-1)); + emptyStream = newEmptyStream(true); + assertThat(emptyStream.available(), Matchers.is(0)); + emptyStream = newEmptyStream(true); assertThat(emptyStream.skip(randomIntBetween(1, 32)), Matchers.is(0L)); } public void testClose() throws Exception { - ChainingInputStream test1 = newEmptyStream(); + ChainingInputStream test1 = newEmptyStream(randomBoolean()); test1.close(); IOException e = expectThrows(IOException.class, () -> { test1.read(); }); assertThat(e.getMessage(), Matchers.is("Stream is closed")); - ChainingInputStream test2 = newEmptyStream(); + ChainingInputStream test2 = newEmptyStream(randomBoolean()); test2.close(); byte[] b = randomByteArrayOfLength(randomIntBetween(2, 9)); int off = randomInt(b.length - 2); @@ -90,51 +117,61 @@ public void testClose() throws Exception { test2.read(b, off, randomInt(b.length - off - 1)); }); assertThat(e.getMessage(), Matchers.is("Stream is closed")); - ChainingInputStream test3 = newEmptyStream(); + ChainingInputStream test3 = newEmptyStream(randomBoolean()); test3.close(); e = expectThrows(IOException.class, () -> { test3.skip(randomInt(31)); }); - ChainingInputStream test4 = newEmptyStream(); + assertThat(e.getMessage(), Matchers.is("Stream is closed")); + ChainingInputStream test4 = newEmptyStream(randomBoolean()); test4.close(); e = expectThrows(IOException.class, () -> { test4.available(); }); - ChainingInputStream test5 = newEmptyStream(); + assertThat(e.getMessage(), Matchers.is("Stream is closed")); + ChainingInputStream test5 = newEmptyStream(randomBoolean()); test5.close(); e = expectThrows(IOException.class, () -> { test5.reset(); }); - ChainingInputStream test6 = newEmptyStream(); + assertThat(e.getMessage(), Matchers.is("Stream is closed")); + ChainingInputStream test6 = newEmptyStream(randomBoolean()); test6.close(); - test6.mark(randomInt()); + try { + test6.mark(randomInt()); + } catch (Exception e1) { + assumeNoException("mark on a closed stream should not throw", e1); + } } public void testInitialComponentArgumentIsNull() throws Exception { - AtomicReference headInputStream = new AtomicReference<>(); + AtomicReference initialInputStream = new AtomicReference<>(); AtomicBoolean nextCalled = new AtomicBoolean(false); ChainingInputStream test = new ChainingInputStream() { @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { - headInputStream.set(currentComponentIn); + initialInputStream.set(currentComponentIn); nextCalled.set(true); return null; } }; assertThat(test.read(), Matchers.is(-1)); assertThat(nextCalled.get(), Matchers.is(true)); - assertThat(headInputStream.get(), Matchers.nullValue()); + assertThat(initialInputStream.get(), Matchers.nullValue()); } public void testChaining() throws Exception { int componentCount = randomIntBetween(2, 9); + ByteBuffer testSource = ByteBuffer.allocate(componentCount); TestInputStream[] sourceComponents = new TestInputStream[componentCount]; for (int i = 0; i < sourceComponents.length; i++) { byte[] b = randomByteArrayOfLength(randomInt(1)); + testSource.put(b); sourceComponents[i] = new TestInputStream(b); } ChainingInputStream test = new ChainingInputStream() { int i = 0; + @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { if (i == 0) { @@ -142,11 +179,11 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { return sourceComponents[i++]; } else if (i < sourceComponents.length) { assertThat(((TestInputStream) currentComponentIn).closed.get(), Matchers.is(true)); - assertThat(currentComponentIn, Matchers.is(sourceComponents[i-1])); + assertThat(currentComponentIn, Matchers.is(sourceComponents[i - 1])); return sourceComponents[i++]; } else if (i == sourceComponents.length) { assertThat(((TestInputStream) currentComponentIn).closed.get(), Matchers.is(true)); - assertThat(currentComponentIn, Matchers.is(sourceComponents[i-1])); + assertThat(currentComponentIn, Matchers.is(sourceComponents[i - 1])); i++; return null; } else { @@ -159,7 +196,12 @@ public boolean markSupported() { return false; } }; - test.readAllBytes(); + byte[] testArr = test.readAllBytes(); + byte[] ref = testSource.array(); + // testArr and ref should be equal, but ref might have trailing zeroes + for (int i = 0; i < testArr.length; i++) { + assertThat(testArr[i], Matchers.is(ref[i])); + } } public void testEmptyInputStreamComponents() throws Exception { @@ -168,42 +210,42 @@ public void testEmptyInputStreamComponents() throws Exception { byte[] result = test.v1().readAllBytes(); assertThat(result.length, Matchers.is(test.v2().length)); for (int i = 0; i < result.length; i++) { - Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + assertThat(result[i], Matchers.is(test.v2()[i])); } // leading double empty streams test = testEmptyComponentsInChain(3, Arrays.asList(0, 1)); result = test.v1().readAllBytes(); assertThat(result.length, Matchers.is(test.v2().length)); for (int i = 0; i < result.length; i++) { - Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + assertThat(result[i], Matchers.is(test.v2()[i])); } // trailing single empty stream test = testEmptyComponentsInChain(3, Arrays.asList(2)); result = test.v1().readAllBytes(); assertThat(result.length, Matchers.is(test.v2().length)); for (int i = 0; i < result.length; i++) { - Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + assertThat(result[i], Matchers.is(test.v2()[i])); } // trailing double empty stream test = testEmptyComponentsInChain(3, Arrays.asList(1, 2)); result = test.v1().readAllBytes(); assertThat(result.length, Matchers.is(test.v2().length)); for (int i = 0; i < result.length; i++) { - Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + assertThat(result[i], Matchers.is(test.v2()[i])); } // middle single empty stream test = testEmptyComponentsInChain(3, Arrays.asList(1)); result = test.v1().readAllBytes(); assertThat(result.length, Matchers.is(test.v2().length)); for (int i = 0; i < result.length; i++) { - Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + assertThat(result[i], Matchers.is(test.v2()[i])); } // leading and trailing empty streams test = testEmptyComponentsInChain(3, Arrays.asList(0, 2)); result = test.v1().readAllBytes(); assertThat(result.length, Matchers.is(test.v2().length)); for (int i = 0; i < result.length; i++) { - Assert.assertThat(result[i], Matchers.is(test.v2()[i])); + assertThat(result[i], Matchers.is(test.v2()[i])); } } @@ -217,6 +259,7 @@ public void testNullComponentTerminatesChain() throws Exception { sourceComponents[2] = new TestInputStream(b2); ChainingInputStream test = new ChainingInputStream() { int i = 0; + @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { chainComponents[i] = (TestInputStream) currentComponentIn; @@ -233,11 +276,7 @@ public boolean markSupported() { return false; } }; - byte[] b = test.readAllBytes(); - assertThat(b.length, Matchers.is(b1.length)); - for (int i = 0; i < b.length; i++) { - Assert.assertThat(b[i], Matchers.is(b1[i])); - } + assertThat(test.readAllBytes(), Matchers.equalTo(b1)); assertThat(chainComponents[0], Matchers.nullValue()); assertThat(chainComponents[1], Matchers.is(sourceComponents[0])); assertThat(chainComponents[1].closed.get(), Matchers.is(true)); @@ -247,13 +286,17 @@ public boolean markSupported() { public void testCallsForwardToCurrentComponent() throws Exception { InputStream mockCurrentIn = mock(InputStream.class); + when(mockCurrentIn.markSupported()).thenReturn(true); ChainingInputStream test = new ChainingInputStream() { @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { - throw new IllegalStateException(); + if (currentComponentIn == null) { + return mockCurrentIn; + } else { + throw new IllegalStateException(); + } } }; - test.currentIn = mockCurrentIn; // verify "byte-wise read" is proxied to the current component stream when(mockCurrentIn.read()).thenReturn(randomInt(255)); test.read(); @@ -336,10 +379,7 @@ InputStream nextComponent(InputStream currentElementIn) throws IOException { } }; byte[] result = test.readAllBytes(); - assertThat(result.length, Matchers.is(b.length)); - for (int i = 0; i < result.length; i++) { - Assert.assertThat(result[i], Matchers.is(b[i])); - } + assertThat(result, Matchers.equalTo(b)); } public void testMark() throws Exception { @@ -483,6 +523,7 @@ private Tuple testEmptyComponentsInChain(int compon } return new Tuple<>(new ChainingInputStream() { int i = 0; + @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { if (i < sourceComponents.length) { @@ -499,13 +540,27 @@ public boolean markSupported() { }, result); } - private ChainingInputStream newEmptyStream() { - return new ChainingInputStream() { - @Override - InputStream nextComponent(InputStream currentElementIn) throws IOException { - return null; - } - }; + private ChainingInputStream newEmptyStream(boolean hasEmptyComponents) { + if (hasEmptyComponents) { + final Iterator iterator = Arrays.asList( + randomArray(1, 5, ByteArrayInputStream[]::new, () -> new ByteArrayInputStream(new byte[0]))).iterator(); + return new ChainingInputStream() { + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (iterator.hasNext()) { + return iterator.next(); + } else { + return null; + } + } + }; + } else { + return new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentElementIn) throws IOException { + return null; + } + }; + } } static class TestInputStream extends InputStream { From dfeea8311c3c16942836f3ad6b3d712e04aca163 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 27 Dec 2019 19:45:52 +0200 Subject: [PATCH 043/142] Almost WIP --- .../encrypted/ChainingInputStream.java | 3 +- .../encrypted/ChainingInputStreamTests.java | 498 +++++++++++++++++- 2 files changed, 474 insertions(+), 27 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index 2a406ca793dd4..ff4b4ec8dc18b 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -289,7 +289,8 @@ public void mark(int readlimit) { /** * Repositions this stream to the position at the time the - * {@code mark} method was last called on this chaining input stream. + * {@code mark} method was last called on this chaining input stream, + * or at the beginning if the {@code mark} method was never called. * Subsequent read calls will return the same bytes in the same * order since the point of the {@code mark} call. Naturally, * {@code mark} can be invoked at any moment, even after a diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java index 79bf36492ef11..6095579ce519b 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -9,20 +9,24 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; +import org.junit.Assert; import org.mockito.Mockito; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; +import java.io.UncheckedIOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -46,7 +50,7 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { int nSkip1 = randomInt(b1.length - prefix); long nSkip = test.skip(nSkip1); assertThat((int)nSkip, Matchers.is(nSkip1)); - int nSkip2 = randomInt(b1.length - prefix - nSkip1 + randomIntBetween(1, 8)); + int nSkip2 = b1.length - prefix - nSkip1 + randomIntBetween(1, 8); // skip more bytes than the component has nSkip = test.skip(nSkip2); assertThat((int) nSkip, Matchers.is(b1.length - prefix - nSkip1)); @@ -247,6 +251,10 @@ public void testEmptyInputStreamComponents() throws Exception { for (int i = 0; i < result.length; i++) { assertThat(result[i], Matchers.is(test.v2()[i])); } + // all streams are empty + test = testEmptyComponentsInChain(3, Arrays.asList(0, 1, 2)); + result = test.v1().readAllBytes(); + assertThat(result.length, Matchers.is(0)); } public void testNullComponentTerminatesChain() throws Exception { @@ -382,9 +390,10 @@ InputStream nextComponent(InputStream currentElementIn) throws IOException { assertThat(result, Matchers.equalTo(b)); } - public void testMark() throws Exception { + public void testMarkAtBeginning() throws Exception { InputStream mockIn = mock(InputStream.class); when(mockIn.markSupported()).thenReturn(true); + when(mockIn.read()).thenAnswer(invocationOnMock -> randomInt(255)); ChainingInputStream test = new ChainingInputStream() { @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { @@ -395,51 +404,488 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { } } }; - int readLimit = randomInt(63); + assertThat(test.currentIn, Matchers.nullValue()); // mark at the beginning - test.mark(readLimit); assertThat(test.markIn, Matchers.nullValue()); - // mark intermediary position - when(mockIn.read()).thenReturn(randomInt(255)); + test.mark(randomInt(63)); + assertThat(test.markIn, Matchers.nullValue()); + // another mark is a no-op + test.mark(randomInt(63)); + assertThat(test.markIn, Matchers.nullValue()); + // read does not change the marK test.read(); assertThat(test.currentIn, Matchers.is(mockIn)); - test.mark(readLimit); - assertThat(test.markIn, Matchers.is(mockIn)); - verify(mockIn).mark(Mockito.eq(readLimit)); - // mark end position + // mark reference is still unchanged + assertThat(test.markIn, Matchers.nullValue()); + // read reaches end when(mockIn.read()).thenReturn(-1); test.read(); assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); - verify(mockIn, never()).close(); + verify(mockIn).close(); + // mark reference is still unchanged + assertThat(test.markIn, Matchers.nullValue()); + } + + public void testMarkAtEnding() throws Exception { + InputStream mockIn = mock(InputStream.class); + when(mockIn.markSupported()).thenReturn(true); + when(mockIn.read()).thenAnswer(invocationOnMock -> randomFrom(-1, randomInt(255))); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (currentComponentIn == null) { + return mockIn; + } else { + return null; + } + } + }; + // read all bytes + while (test.read() != -1) {} + assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + // mark is null (beginning) + assertThat(test.markIn, Matchers.nullValue()); + test.mark(randomInt(255)); + assertThat(test.markIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + // another mark is a no-op + test.mark(randomInt(255)); + assertThat(test.markIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + } + + public void testSingleMarkAnywhere() throws Exception { + Supplier mockInputStreamSupplier = () -> { + InputStream mockIn = mock(InputStream.class); + when(mockIn.markSupported()).thenReturn(true); + try { + when(mockIn.read()).thenAnswer(invocationOnMock -> randomFrom(-1, randomInt(1))); + when(mockIn.read(org.mockito.Matchers.any(), org.mockito.Matchers.anyInt(), org.mockito.Matchers.anyInt())). + thenAnswer(invocationOnMock -> { + final int len = (int) invocationOnMock.getArguments()[2]; + if (len == 0) { + return 0; + } else { + if (randomBoolean()) { + return -1; + } else { + // partial read return + return randomIntBetween(1, len); + } + } + }); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + return mockIn; + }; + AtomicBoolean chainingInputStreamEOF = new AtomicBoolean(false); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (chainingInputStreamEOF.get()) { + return null; + } else { + return mockInputStreamSupplier.get(); + } + } + }; + // possibly skips over several components + for (int i = 0; i < randomIntBetween(4, 16); i++) { + test.readNBytes(randomInt(63)); + } + InputStream currentIn = test.currentIn; + int readLimit = randomInt(63); + test.mark(readLimit); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(currentIn).mark(Mockito.eq(readLimit)); + // mark again, same position + readLimit = randomInt(63); + test.mark(readLimit); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(currentIn).mark(Mockito.eq(readLimit)); + // read more (possibly moving on to a new component) + test.readNBytes(randomInt(63)); + // mark does not budge + assertThat(test.markIn, Matchers.is(currentIn)); + // read until the end + chainingInputStreamEOF.set(true); + test.readAllBytes(); + // current component is at the end + assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + // mark is still put + assertThat(test.markIn, Matchers.is(currentIn)); + verify(test.markIn, never()).close(); + // but close also closes the mark + test.close(); + verify(test.markIn).close(); + } + + public void testMarkOverwritesPreviousMark() throws Exception { + AtomicBoolean chainingInputStreamEOF = new AtomicBoolean(false); + Supplier mockInputStreamSupplier = () -> { + InputStream mockIn = mock(InputStream.class); + when(mockIn.markSupported()).thenReturn(true); + try { + // single byte read never returns "-1" so it never advances component + when(mockIn.read()).thenAnswer(invocationOnMock -> randomInt(255)); + when(mockIn.read(org.mockito.Matchers.any(), org.mockito.Matchers.anyInt(), org.mockito.Matchers.anyInt())). + thenAnswer(invocationOnMock -> { + final int len = (int) invocationOnMock.getArguments()[2]; + if (len == 0) { + return 0; + } else { + if (randomBoolean()) { + return -1; + } else { + // partial read return + return randomIntBetween(1, len); + } + } + }); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + return mockIn; + }; + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (chainingInputStreamEOF.get()) { + return null; + } else { + return mockInputStreamSupplier.get(); + } + } + }; + // possibly skips over several components + for (int i = 0; i < randomIntBetween(4, 16); i++) { + test.readNBytes(randomInt(63)); + } + InputStream currentIn = test.currentIn; + int readLimit = randomInt(63); + test.mark(readLimit); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(test.markIn).mark(Mockito.eq(readLimit)); + // read more within the same component + for (int i = 0; i < randomIntBetween(4, 16); i++) { + test.read(); + } + // mark does not budge + assertThat(test.markIn, Matchers.is(currentIn)); + // mark again + readLimit = randomInt(63); + test.mark(readLimit); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(currentIn, never()).close(); + verify(currentIn).mark(Mockito.eq(readLimit)); + // read more while switching the component + for (int i = 0; i < randomIntBetween(4, 16) || test.currentIn == currentIn; i++) { + test.readNBytes(randomInt(63)); + } + // mark does not budge + assertThat(test.markIn, Matchers.is(currentIn)); + // mark again readLimit = randomInt(63); test.mark(readLimit); + assertThat(test.markIn, Matchers.is(test.currentIn)); + // previous mark closed + verify(currentIn).close(); + verify(test.markIn).mark(Mockito.eq(readLimit)); + InputStream markIn = test.markIn; + // read until the end + chainingInputStreamEOF.set(true); + test.readAllBytes(); + // current component is at the end + assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + // mark is still put + assertThat(test.markIn, Matchers.is(markIn)); + verify(test.markIn, never()).close(); + // mark at the end + readLimit = randomInt(63); + test.mark(readLimit); + assertThat(test.markIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + verify(markIn).close(); + } + + public void testResetAtBeginning() throws Exception { + InputStream mockIn = mock(InputStream.class); + when(mockIn.markSupported()).thenReturn(true); + when(mockIn.read()).thenAnswer(invocationOnMock -> randomInt(255)); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (currentComponentIn == null) { + return mockIn; + } else { + return null; + } + } + }; + assertThat(test.currentIn, Matchers.nullValue()); + assertThat(test.markIn, Matchers.nullValue()); + if (randomBoolean()) { + // mark at the beginning + test.mark(randomInt(63)); + assertThat(test.markIn, Matchers.nullValue()); + } + // reset immediately + test.reset(); + assertThat(test.currentIn, Matchers.nullValue()); + // read does not change the marK + test.read(); + assertThat(test.currentIn, Matchers.is(mockIn)); + // mark reference is still unchanged + assertThat(test.markIn, Matchers.nullValue()); + // reset back to beginning + test.reset(); verify(mockIn).close(); + assertThat(test.currentIn, Matchers.nullValue()); + // read reaches end + when(mockIn.read()).thenReturn(-1); + test.read(); + assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + // mark reference is still unchanged + assertThat(test.markIn, Matchers.nullValue()); + // reset back to beginning + test.reset(); + assertThat(test.currentIn, Matchers.nullValue()); + } + + public void testResetAtEnding() throws Exception { + InputStream mockIn = mock(InputStream.class); + when(mockIn.markSupported()).thenReturn(true); + when(mockIn.read()).thenAnswer(invocationOnMock -> randomFrom(-1, randomInt(255))); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (currentComponentIn == null) { + return mockIn; + } else { + return null; + } + } + }; + // read all bytes + while (test.read() != -1) {} + assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + // mark is null (beginning) + assertThat(test.markIn, Matchers.nullValue()); + test.mark(randomInt(255)); + assertThat(test.markIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + // reset + test.reset(); + assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + assertThat(test.read(), Matchers.is(-1)); + // another mark is a no-op + test.mark(randomInt(255)); assertThat(test.markIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + assertThat(test.read(), Matchers.is(-1)); } - public void testReset() throws Exception { - InputStream mockMarkIn = mock(InputStream.class); - when(mockMarkIn.markSupported()).thenReturn(true); - InputStream mockCurrentIn = mock(InputStream.class); - when(mockCurrentIn.markSupported()).thenReturn(true); + public void testResetForSingleMarkAnywhere() throws Exception { + Supplier mockInputStreamSupplier = () -> { + InputStream mockIn = mock(InputStream.class); + when(mockIn.markSupported()).thenReturn(true); + try { + // single byte read never returns "-1" so it never advances component + when(mockIn.read()).thenAnswer(invocationOnMock -> randomInt(255)); + when(mockIn.read(org.mockito.Matchers.any(), org.mockito.Matchers.anyInt(), org.mockito.Matchers.anyInt())). + thenAnswer(invocationOnMock -> { + final int len = (int) invocationOnMock.getArguments()[2]; + if (len == 0) { + return 0; + } else { + if (randomBoolean()) { + return -1; + } else { + // partial read return + return randomIntBetween(1, len); + } + } + }); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + return mockIn; + }; + AtomicBoolean chainingInputStreamEOF = new AtomicBoolean(false); + AtomicReference nextComponentArg = new AtomicReference<>(); ChainingInputStream test = new ChainingInputStream() { @Override InputStream nextComponent(InputStream currentComponentIn) throws IOException { - return null; + if (nextComponentArg.get() != null) { + Assert.assertThat(currentComponentIn, Matchers.is(nextComponentArg.get())); + nextComponentArg.set(null); + } + if (chainingInputStreamEOF.get()) { + return null; + } else { + return mockInputStreamSupplier.get(); + } } }; - test.currentIn = mockCurrentIn; - test.markIn = mockMarkIn; + // possibly skips over several components + for (int i = 0; i < randomIntBetween(4, 16); i++) { + test.readNBytes(randomInt(63)); + } + InputStream currentIn = test.currentIn; + int readLimit = randomInt(63); + test.mark(readLimit); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(currentIn).mark(Mockito.eq(readLimit)); + // read more without moving to a new component + for (int i = 0; i < randomIntBetween(4, 16); i++) { + test.read(); + } + test.reset(); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(test.currentIn, never()).close(); + verify(test.currentIn).reset(); + // read more, moving on to a new component + for (int i = 0; i < randomIntBetween(4, 16) || test.currentIn == currentIn; i++) { + test.readNBytes(randomInt(63)); + } + // mark does not budge + assertThat(test.markIn, Matchers.is(currentIn)); + assertThat(test.currentIn, Matchers.not(currentIn)); + InputStream lastCurrentIn = test.currentIn; + // reset back + test.reset(); + verify(lastCurrentIn).close(); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(test.currentIn, times(2)).reset(); + // assert the "nextComponent" argument + nextComponentArg.set(currentIn); + // read more, moving on to a new component + for (int i = 0; i < randomIntBetween(4, 16) || test.currentIn == currentIn; i++) { + test.readNBytes(randomInt(63)); + } + // read until the end + chainingInputStreamEOF.set(true); + test.readAllBytes(); + // current component is at the end + assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + // mark is still put + assertThat(test.markIn, Matchers.is(currentIn)); + verify(test.markIn, never()).close(); + // reset when stream is at the end + test.reset(); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(test.currentIn, times(3)).reset(); + // assert the "nextComponent" argument + nextComponentArg.set(currentIn); + // read more to verify that current component is passed as nextComponent argument + test.readAllBytes(); + } + + public void testResetForDoubleMarkAnywhere() throws Exception { + Supplier mockInputStreamSupplier = () -> { + InputStream mockIn = mock(InputStream.class); + when(mockIn.markSupported()).thenReturn(true); + try { + // single byte read never returns "-1" so it never advances component + when(mockIn.read()).thenAnswer(invocationOnMock -> randomInt(255)); + when(mockIn.read(org.mockito.Matchers.any(), org.mockito.Matchers.anyInt(), org.mockito.Matchers.anyInt())). + thenAnswer(invocationOnMock -> { + final int len = (int) invocationOnMock.getArguments()[2]; + if (len == 0) { + return 0; + } else { + if (randomBoolean()) { + return -1; + } else { + // partial read return + return randomIntBetween(1, len); + } + } + }); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + return mockIn; + }; + AtomicBoolean chainingInputStreamEOF = new AtomicBoolean(false); + AtomicReference nextComponentArg = new AtomicReference<>(); + ChainingInputStream test = new ChainingInputStream() { + @Override + InputStream nextComponent(InputStream currentComponentIn) throws IOException { + if (nextComponentArg.get() != null) { + Assert.assertThat(currentComponentIn, Matchers.is(nextComponentArg.get())); + nextComponentArg.set(null); + } + if (chainingInputStreamEOF.get()) { + return null; + } else { + return mockInputStreamSupplier.get(); + } + } + }; + // possibly skips over several components + for (int i = 0; i < randomIntBetween(4, 16); i++) { + test.readNBytes(randomInt(63)); + } + InputStream currentIn = test.currentIn; + int readLimit = randomInt(63); + // first mark + test.mark(readLimit); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(currentIn).mark(Mockito.eq(readLimit)); + // read more without moving to a new component + for (int i = 0; i < randomIntBetween(4, 16); i++) { + test.read(); + } + test.reset(); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(test.currentIn, never()).close(); + verify(test.currentIn).reset(); + // read more, moving on to a new component + for (int i = 0; i < randomIntBetween(4, 16) || test.currentIn == currentIn; i++) { + test.readNBytes(randomInt(63)); + } + // mark does not budge + assertThat(test.markIn, Matchers.is(currentIn)); + assertThat(test.currentIn, Matchers.not(currentIn)); + InputStream lastCurrentIn = test.currentIn; + // reset back + test.reset(); + verify(lastCurrentIn).close(); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(test.currentIn, times(2)).reset(); + // assert the "nextComponent" argument + nextComponentArg.set(currentIn); + // read more, moving on to a new component + for (int i = 0; i < randomIntBetween(4, 16) || test.currentIn == currentIn; i++) { + test.readNBytes(randomInt(63)); + } + // read until the end + chainingInputStreamEOF.set(true); + test.readAllBytes(); + // current component is at the end + assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); + // mark is still put + assertThat(test.markIn, Matchers.is(currentIn)); + verify(test.markIn, never()).close(); + // reset when stream is at the end test.reset(); - assertThat(test.currentIn, Matchers.is(mockMarkIn)); - assertThat(test.markIn, Matchers.is(mockMarkIn)); - verify(mockMarkIn).reset(); - when(mockCurrentIn.read()).thenReturn(-1); - verify(mockMarkIn, never()).close(); - verify(mockCurrentIn).close(); + assertThat(test.currentIn, Matchers.is(currentIn)); + assertThat(test.markIn, Matchers.is(currentIn)); + verify(test.currentIn, times(3)).reset(); + // assert the "nextComponent" argument + nextComponentArg.set(currentIn); + // read more to verify that current component is passed as nextComponent argument + test.readAllBytes(); } - public void testMarkAfterReset() throws Exception { + public void testMarkAfterResetNoMock() throws Exception { int len = randomIntBetween(8, 15); byte[] b = randomByteArrayOfLength(len); for (int p = 0; p <= len; p++) { From da29e2fb5501679fcdc7530cbe5d4f1dcff3539f Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sat, 28 Dec 2019 10:58:20 +0200 Subject: [PATCH 044/142] Review complete --- .../encrypted/ChainingInputStreamTests.java | 84 +++++++++++-------- 1 file changed, 49 insertions(+), 35 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java index 6095579ce519b..17f69dd461ce3 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -740,6 +740,7 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { for (int i = 0; i < randomIntBetween(4, 16); i++) { test.read(); } + // first reset test.reset(); assertThat(test.currentIn, Matchers.is(currentIn)); assertThat(test.markIn, Matchers.is(currentIn)); @@ -753,7 +754,7 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { assertThat(test.markIn, Matchers.is(currentIn)); assertThat(test.currentIn, Matchers.not(currentIn)); InputStream lastCurrentIn = test.currentIn; - // reset back + // second reset test.reset(); verify(lastCurrentIn).close(); assertThat(test.currentIn, Matchers.is(currentIn)); @@ -837,52 +838,65 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { assertThat(test.currentIn, Matchers.is(currentIn)); assertThat(test.markIn, Matchers.is(currentIn)); verify(currentIn).mark(Mockito.eq(readLimit)); - // read more without moving to a new component - for (int i = 0; i < randomIntBetween(4, 16); i++) { - test.read(); - } - test.reset(); - assertThat(test.currentIn, Matchers.is(currentIn)); - assertThat(test.markIn, Matchers.is(currentIn)); - verify(test.currentIn, never()).close(); - verify(test.currentIn).reset(); - // read more, moving on to a new component - for (int i = 0; i < randomIntBetween(4, 16) || test.currentIn == currentIn; i++) { + // possibly skips over several components + for (int i = 0; i < randomIntBetween(1, 2); i++) { test.readNBytes(randomInt(63)); } - // mark does not budge - assertThat(test.markIn, Matchers.is(currentIn)); - assertThat(test.currentIn, Matchers.not(currentIn)); InputStream lastCurrentIn = test.currentIn; - // reset back + // second mark + readLimit = randomInt(63); + test.mark(readLimit); + if (lastCurrentIn != currentIn) { + verify(currentIn).close(); + } + assertThat(test.currentIn, Matchers.is(lastCurrentIn)); + assertThat(test.markIn, Matchers.is(lastCurrentIn)); + verify(lastCurrentIn).mark(Mockito.eq(readLimit)); + currentIn = lastCurrentIn; + // possibly skips over several components + for (int i = 0; i < randomIntBetween(1, 2); i++) { + test.readNBytes(randomInt(63)); + } + lastCurrentIn = test.currentIn; + // reset test.reset(); - verify(lastCurrentIn).close(); assertThat(test.currentIn, Matchers.is(currentIn)); assertThat(test.markIn, Matchers.is(currentIn)); - verify(test.currentIn, times(2)).reset(); - // assert the "nextComponent" argument + if (lastCurrentIn != currentIn) { + verify(lastCurrentIn).close(); + } + verify(currentIn).reset(); + // assert the "nextComponet" arg is the current component nextComponentArg.set(currentIn); - // read more, moving on to a new component - for (int i = 0; i < randomIntBetween(4, 16) || test.currentIn == currentIn; i++) { + // possibly skips over several components + for (int i = 0; i < randomIntBetween(4, 16); i++) { test.readNBytes(randomInt(63)); } - // read until the end - chainingInputStreamEOF.set(true); - test.readAllBytes(); - // current component is at the end - assertThat(test.currentIn, Matchers.is(ChainingInputStream.EXHAUSTED_MARKER)); - // mark is still put - assertThat(test.markIn, Matchers.is(currentIn)); - verify(test.markIn, never()).close(); - // reset when stream is at the end + lastCurrentIn = test.currentIn; + // third mark after reset + readLimit = randomInt(63); + test.mark(readLimit); + if (lastCurrentIn != currentIn) { + verify(currentIn).close(); + } + assertThat(test.currentIn, Matchers.is(lastCurrentIn)); + assertThat(test.markIn, Matchers.is(lastCurrentIn)); + verify(lastCurrentIn).mark(Mockito.eq(readLimit)); + nextComponentArg.set(lastCurrentIn); + currentIn = lastCurrentIn; + // possibly skips over several components + for (int i = 0; i < randomIntBetween(1, 2); i++) { + test.readNBytes(randomInt(63)); + } + lastCurrentIn = test.currentIn; + // reset after mark after reset test.reset(); assertThat(test.currentIn, Matchers.is(currentIn)); assertThat(test.markIn, Matchers.is(currentIn)); - verify(test.currentIn, times(3)).reset(); - // assert the "nextComponent" argument - nextComponentArg.set(currentIn); - // read more to verify that current component is passed as nextComponent argument - test.readAllBytes(); + if (lastCurrentIn != currentIn) { + verify(lastCurrentIn).close(); + } + verify(currentIn).reset(); } public void testMarkAfterResetNoMock() throws Exception { From bed1e5164952b5ee5491f0fa8d9760518e5c09b0 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 29 Dec 2019 14:19:06 +0200 Subject: [PATCH 045/142] Merge fallout --- .../repositories/encrypted/EncryptedRepository.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 840a64074c143..da4fd63c71c49 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -170,7 +170,7 @@ public InputStream readBlob(String blobName) throws IOException { public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { SecretKey dataEncryptionKey = keyGenerator.generateKey(); int nonce = secureRandom.nextInt(); - long encryptedBlobSize = EncryptionPacketsInputStream.getEncryptionSize(blobSize, PACKET_LENGTH_IN_BYTES); + long encryptedBlobSize = EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { this.delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); From 5e7269bcfb87e271b8fef0ec891b8d6201eb438b Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 02:48:15 -0500 Subject: [PATCH 046/142] Update x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java Co-Authored-By: Tim Vernum --- .../repositories/encrypted/EncryptionPacketsInputStream.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index e6292277db01f..1a1a30577f083 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -113,7 +113,8 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { // there are no more packets return null; } - // mark source input stream at packet boundary + // If the enclosing stream has a mark set, + // then apply it to the source input stream when we reach a packet boundary if (markSourceOnNextPacket != -1) { source.mark(markSourceOnNextPacket); markSourceOnNextPacket = -1; From 14ee4aad9f4863527e97984bbb9e27c9db3f9d38 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 10:23:28 +0200 Subject: [PATCH 047/142] IV position --- .../repositories/encrypted/EncryptionPacketsInputStream.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 1a1a30577f083..73810dba980b1 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -98,6 +98,7 @@ public EncryptionPacketsInputStream(InputStream source, SecretKey secretKey, int } this.packetLength = packetLength; this.packetIv = ByteBuffer.allocate(EncryptedRepository.GCM_IV_LENGTH_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); + // nonce takes the first 4 bytes of the IV this.packetIv.putInt(0, nonce); this.encryptedPacketLength = packetLength + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; @@ -121,7 +122,9 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { } // create the new packet InputStream encryptionInputStream = new PrefixInputStream(source, packetLength, false); - packetIv.putLong(4, counter++); + // the counter takes up the last 8 bytes of the packet IV (12 byte wide) + // the first 4 bytes are used by the nonce (which is the same for every packet IV) + packetIv.putLong(Integer.BYTES, counter++); if (counter == EncryptedRepository.PACKET_START_COUNTER) { // counter wrap around throw new IOException("Maximum packet count limit exceeded"); From 7ee63c0c15e631238d714ae17003ca5a2716c11f Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 03:26:40 -0500 Subject: [PATCH 048/142] Update x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java Co-Authored-By: Tim Vernum --- .../elasticsearch/repositories/encrypted/PrefixInputStream.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java index 7f20bdc3d4dac..3d81d4b202a32 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java @@ -94,6 +94,7 @@ public int available() throws IOException { @Override public void mark(int readlimit) { + // mark and reset are not supported } @Override From 74f38b2a17631cbc68e63e327ff4a7edf4d685d0 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 10:33:57 +0200 Subject: [PATCH 049/142] move mark supported before mark/reset implementations --- .../repositories/encrypted/PrefixInputStream.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java index 3d81d4b202a32..250140ccaaa63 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java @@ -92,6 +92,11 @@ public int available() throws IOException { return Math.min(length - position, in.available()); } + @Override + public boolean markSupported() { + return false; + } + @Override public void mark(int readlimit) { // mark and reset are not supported @@ -102,11 +107,6 @@ public void reset() throws IOException { throw new IOException("mark/reset not supported"); } - @Override - public boolean markSupported() { - return false; - } - @Override public void close() throws IOException { if (closed) { From fee2d795c8db9027cfaed15190b3138666c74445 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 10:40:12 +0200 Subject: [PATCH 050/142] Package-protected instead of protected for final classes --- .../encrypted/BufferOnMarkInputStream.java | 35 +++++++++---------- .../encrypted/CountingInputStream.java | 6 ++-- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index d7f508ceaed10..1221d3267d0c8 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -30,8 +30,7 @@ */ public final class BufferOnMarkInputStream extends FilterInputStream { - // all protected for tests - protected final int bufferSize; + final int bufferSize; // package-protected for tests /** * The array used to store the bytes to be replayed upon a reset call. * The buffer portion that stores valid bytes, which must be returned by the read calls after a reset call, @@ -42,29 +41,29 @@ public final class BufferOnMarkInputStream extends FilterInputStream { * are equal. The buffer is full if it stores {@code bufferSize} elements. * To avoid mixing up the two states, the actual allocated size of the array is {@code bufferSize + 1}. */ - protected byte[] ringBuffer; + byte[] ringBuffer; // package-protected for tests /** * The inclusive start offset of the bytes that must be replayed after a reset call. */ - protected int head; + int head; // package-protected for tests /** * The exclusive end offset of the bytes that must be replayed after a reset call. */ - protected int tail; + int tail; // package-protected for tests /** * The current offset of the next byte to be returned from the buffer for the reads following a reset. * This is defined only when {@code resetCalled} is {@code true}. */ - protected int position; + int position; // package-protected for tests /** * {@code true} when the result of a read or skip from the underlying stream must also be stored in the buffer */ - protected boolean markCalled; + boolean markCalled; // package-protected for tests /** * {@code true} when the returned bytes must come from the buffer and not from the underlying stream */ - protected boolean resetCalled; - protected boolean closed; + boolean resetCalled; // package-protected for tests + boolean closed; // package-protected for tests /** * Creates a {@code BufferOnMarkInputStream} that buffers a maximum of {@code bufferSize} elements @@ -236,6 +235,15 @@ public int available() throws IOException { return bytesAvailable; } + /** + * Tests if this input stream supports the {@code mark} and + * {@code reset} methods. This always returns {@code true}. + */ + @Override + public boolean markSupported() { + return true; + } + /** * Marks the current position in this input stream. A subsequent call to * the {@code reset} method repositions this stream at the last marked @@ -286,15 +294,6 @@ public void mark(int readlimit) { } } - /** - * Tests if this input stream supports the {@code mark} and - * {@code reset} methods. This always returns {@code true}. - */ - @Override - public boolean markSupported() { - return true; - } - /** * Repositions this stream to the position at the time the * {@code mark} method was last called on this input stream. diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java index 6a9e7df9b75e1..de9692a0f6191 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java @@ -24,9 +24,9 @@ */ public final class CountingInputStream extends FilterInputStream { - protected long count; // protected for tests - protected long mark; // protected for tests - protected boolean closed; // protected for tests + long count; // package-protected for tests + long mark; // package-protected for tests + boolean closed; // package-protected for tests private final boolean closeSource; /** From eda1fe00ce6f5cf9c6bc0f8c46f828181e74c3e3 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 11:31:11 +0200 Subject: [PATCH 051/142] RemainingPrefixByteCount --- .../encrypted/PrefixInputStream.java | 62 ++++++++++++------- 1 file changed, 41 insertions(+), 21 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java index 250140ccaaa63..e0e795f29e3ca 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java @@ -15,27 +15,43 @@ * A {@code PrefixInputStream} wraps another input stream and exposes * only the first bytes of it. Reading from the wrapping * {@code PrefixInputStream} consumes the underlying stream. The stream - * is exhausted when {@code length} bytes have been read or the underlying - * stream is exhausted. + * is exhausted when {@code prefixLength} bytes have been read, or the underlying + * stream is exhausted before that. *

- * If the {@code closeSource} constructor argument is {@code true}, closing this - * stream will also close the underlying input stream. Any subsequent {@code read}, - * {@code skip} and {@code available} calls will throw {@code IOException}s. + * Only if the {@code closeSource} constructor argument is {@code true}, the + * closing of this stream will also close the underlying input stream. + * Any subsequent {@code read}, {@code skip} and {@code available} calls + * will throw {@code IOException}s. */ public final class PrefixInputStream extends FilterInputStream { - private final int length; - private int position; + /** + * The length in bytes of the prefix. + * This is the maximum number of bytes that can be read from this stream, + * but fewer bytes can be read if the wrapped source stream itself contains fewer bytes + */ + private final int prefixLength; + /** + * The current count of bytes read from this stream. + * This starts of as {@code 0} and is always smaller or equal to {@code prefixLength}. + */ + private int count; + /** + * whether closing this stream must also close the underlying stream + */ private boolean closeSource; + /** + * flag signalling if this stream has been closed + */ private boolean closed; - public PrefixInputStream(InputStream in, int length, boolean closeSource) { + public PrefixInputStream(InputStream in, int prefixLength, boolean closeSource) { super(Objects.requireNonNull(in)); - if (length < 0) { - throw new IllegalArgumentException("The length constructor argument must be a positive value"); + if (prefixLength < 0) { + throw new IllegalArgumentException("The prefixLength constructor argument must be a positive integer"); } - this.length = length; - this.position = 0; + this.prefixLength = prefixLength; + this.count = 0; this.closeSource = closeSource; this.closed = false; } @@ -43,14 +59,14 @@ public PrefixInputStream(InputStream in, int length, boolean closeSource) { @Override public int read() throws IOException { ensureOpen(); - if (position >= length) { + if (remainingPrefixByteCount() <= 0) { return -1; } int byteVal = in.read(); if (byteVal == -1) { return -1; } - position++; + count++; return byteVal; } @@ -61,35 +77,35 @@ public int read(byte[] b, int off, int len) throws IOException { if (len == 0) { return 0; } - if (position >= length) { + if (remainingPrefixByteCount() <= 0) { return -1; } - int readSize = Math.min(len, length - position); + int readSize = Math.min(len, remainingPrefixByteCount()); int bytesRead = in.read(b, off, readSize); if (bytesRead == -1) { return -1; } - position += bytesRead; + count += bytesRead; return bytesRead; } @Override public long skip(long n) throws IOException { ensureOpen(); - if (n <= 0 || position >= length) { + if (n <= 0 || remainingPrefixByteCount() <= 0) { return 0; } - long bytesToSkip = Math.min(n, length - position); + long bytesToSkip = Math.min(n, remainingPrefixByteCount()); assert bytesToSkip > 0; long bytesSkipped = in.skip(bytesToSkip); - position += bytesSkipped; + count += bytesSkipped; return bytesSkipped; } @Override public int available() throws IOException { ensureOpen(); - return Math.min(length - position, in.available()); + return Math.min(remainingPrefixByteCount(), in.available()); } @Override @@ -118,6 +134,10 @@ public void close() throws IOException { } } + private int remainingPrefixByteCount() { + return prefixLength - count; + } + private void ensureOpen() throws IOException { if (closed) { throw new IOException("Stream has been closed"); From e85aefe7212fb95dde0a1b30fd4a725158019945 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 07:27:51 -0500 Subject: [PATCH 052/142] Update x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java Co-Authored-By: Tim Vernum --- .../repositories/encrypted/DecryptionPacketsInputStream.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index a0a6d983b1de0..ec92940361221 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -59,7 +59,7 @@ public final class DecryptionPacketsInputStream extends ChainingInputStream { /** * Computes and returns the length of the plaintext given the {@code ciphertextLength} and the {@code packetLength} * used during encryption. - * Each ciphertext packet is prepended by the Initilization Vector and appended the Authentication Tag. + * Each ciphertext packet is prepended by the Initilization Vector and has the Authentication Tag appended. * Decryption is 1:1, and the ciphertext is not padded, but stripping away the IV and the AT amounts to a shorter * plaintext compared to the ciphertext. * From 8a0773aba5f8a9436e993a847fc3156e31040295 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 16:26:03 +0200 Subject: [PATCH 053/142] no iv instance variable --- .../DecryptionPacketsInputStream.java | 58 ++++++++++++------- .../EncryptionPacketsInputStream.java | 2 +- 2 files changed, 37 insertions(+), 23 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index ec92940361221..d3152061ed401 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -22,6 +22,9 @@ import java.security.NoSuchAlgorithmException; import java.util.Objects; +import static org.elasticsearch.repositories.encrypted.EncryptedRepository.GCM_IV_LENGTH_IN_BYTES; +import static org.elasticsearch.repositories.encrypted.EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; + /** * A {@code DecryptionPacketsInputStream} wraps an encrypted input stream and decrypts * its contents. This is designed (and tested) to decrypt only the encryption format that @@ -50,8 +53,7 @@ public final class DecryptionPacketsInputStream extends ChainingInputStream { private final SecretKey secretKey; private final int nonce; private final int packetLength; - private final byte[] packet; - private final byte[] iv; + private final byte[] packetBuffer; private boolean hasNext; private long counter; @@ -66,13 +68,11 @@ public final class DecryptionPacketsInputStream extends ChainingInputStream { * @see EncryptionPacketsInputStream#getEncryptionLength(long, int) */ public static long getDecryptionLength(long ciphertextLength, int packetLength) { - long encryptedPacketLength = - packetLength + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES; + long encryptedPacketLength = packetLength + GCM_TAG_LENGTH_IN_BYTES + GCM_IV_LENGTH_IN_BYTES; long completePackets = ciphertextLength / encryptedPacketLength; long decryptedSize = completePackets * packetLength; if (ciphertextLength % encryptedPacketLength != 0) { - decryptedSize += (ciphertextLength % encryptedPacketLength) - EncryptedRepository.GCM_IV_LENGTH_IN_BYTES - - EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES; + decryptedSize += (ciphertextLength % encryptedPacketLength) - GCM_IV_LENGTH_IN_BYTES - GCM_TAG_LENGTH_IN_BYTES; } return decryptedSize; } @@ -85,8 +85,7 @@ public DecryptionPacketsInputStream(InputStream source, SecretKey secretKey, int throw new IllegalArgumentException("Invalid packet length [" + packetLength + "]"); } this.packetLength = packetLength; - this.packet = new byte[packetLength + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES]; - this.iv = new byte[EncryptedRepository.GCM_IV_LENGTH_IN_BYTES]; + this.packetBuffer = new byte[packetLength + GCM_TAG_LENGTH_IN_BYTES]; this.hasNext = true; this.counter = EncryptedRepository.PACKET_START_COUNTER; } @@ -100,14 +99,14 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { return null; } PrefixInputStream packetInputStream = new PrefixInputStream(source, - packetLength + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES + EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES, + packetLength + GCM_IV_LENGTH_IN_BYTES + GCM_TAG_LENGTH_IN_BYTES, false); int currentPacketLength = decrypt(packetInputStream); // only the last packet is shorter, so this must be the last packet if (currentPacketLength != packetLength) { hasNext = false; } - return new ByteArrayInputStream(packet, 0, currentPacketLength); + return new ByteArrayInputStream(packetBuffer, 0, currentPacketLength); } @Override @@ -125,28 +124,43 @@ public void reset() throws IOException { } private int decrypt(PrefixInputStream packetInputStream) throws IOException { - if (packetInputStream.read(iv) != iv.length) { - throw new IOException("Error while reading the heading IV of the packet"); + // read only the IV prefix into the packet buffer + int ivLength = packetInputStream.readNBytes(packetBuffer, 0, GCM_IV_LENGTH_IN_BYTES); + if (ivLength != GCM_IV_LENGTH_IN_BYTES) { + throw new IOException("Packet heading IV error. Unexpected length [" + ivLength + "]."); + } + // extract the nonce and the counter from the packet IV + ByteBuffer ivBuffer = ByteBuffer.wrap(packetBuffer, 0, GCM_IV_LENGTH_IN_BYTES).order(ByteOrder.LITTLE_ENDIAN); + int packetIvNonce = ivBuffer.getInt(0); + long packetIvCounter = ivBuffer.getLong(Integer.BYTES); + if (packetIvNonce != nonce) { + throw new IOException("Packet nonce mismatch. Expecting [" + nonce + "], but got [" + packetIvNonce + "]."); + } + if (packetIvCounter != counter) { + throw new IOException("Packet counter mismatch. Expecting [" + counter + "], but got [" + packetIvCounter + "]."); } - ByteBuffer ivBuffer = ByteBuffer.wrap(iv).order(ByteOrder.LITTLE_ENDIAN); - if (ivBuffer.getInt(0) != nonce || ivBuffer.getLong(4) != counter++) { - throw new IOException("Invalid packet IV"); + // counter increment for the subsequent packet + counter++; + // counter wrap around + if (counter == EncryptedRepository.PACKET_START_COUNTER) { + throw new IOException("Maximum packet count limit exceeded"); } - int packetLength = packetInputStream.read(packet); - if (packetLength < EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES) { - throw new IOException("Error while reading the packet"); + Cipher packetCipher = getPacketDecryptionCipher(packetBuffer); + // read the rest of the packet, reusing the packetBuffer + int packetLength = packetInputStream.readNBytes(packetBuffer, 0, packetBuffer.length); + if (packetLength < GCM_TAG_LENGTH_IN_BYTES) { + throw new IOException("Encrypted packet is too short"); } - Cipher packetCipher = getPacketDecryptionCipher(iv); try { // in-place decryption - return packetCipher.doFinal(packet, 0, packetLength, packet); + return packetCipher.doFinal(packetBuffer, 0, packetLength, packetBuffer); } catch (ShortBufferException | IllegalBlockSizeException | BadPaddingException e) { throw new IOException(e); } } - private Cipher getPacketDecryptionCipher(byte[] packetIv) throws IOException { - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, packetIv); + private Cipher getPacketDecryptionCipher(byte[] packet) throws IOException { + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, packet, 0, GCM_IV_LENGTH_IN_BYTES); try { Cipher packetCipher = Cipher.getInstance(EncryptedRepository.GCM_ENCRYPTION_SCHEME); packetCipher.init(Cipher.DECRYPT_MODE, secretKey, gcmParameterSpec); diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 73810dba980b1..a45569c2f9632 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -125,8 +125,8 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { // the counter takes up the last 8 bytes of the packet IV (12 byte wide) // the first 4 bytes are used by the nonce (which is the same for every packet IV) packetIv.putLong(Integer.BYTES, counter++); + // counter wrap around if (counter == EncryptedRepository.PACKET_START_COUNTER) { - // counter wrap around throw new IOException("Maximum packet count limit exceeded"); } Cipher packetCipher = getPacketEncryptionCipher(secretKey, packetIv.array()); From 07d7ac86fefc9e0b1303e4a52b0881813e2377ba Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 16:47:43 +0200 Subject: [PATCH 054/142] Nit --- .../repositories/encrypted/DecryptionPacketsInputStream.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index d3152061ed401..ad30c624ddb45 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -145,6 +145,7 @@ private int decrypt(PrefixInputStream packetInputStream) throws IOException { if (counter == EncryptedRepository.PACKET_START_COUNTER) { throw new IOException("Maximum packet count limit exceeded"); } + // cipher used to decrypt only the current packetInputStream Cipher packetCipher = getPacketDecryptionCipher(packetBuffer); // read the rest of the packet, reusing the packetBuffer int packetLength = packetInputStream.readNBytes(packetBuffer, 0, packetBuffer.length); @@ -152,10 +153,10 @@ private int decrypt(PrefixInputStream packetInputStream) throws IOException { throw new IOException("Encrypted packet is too short"); } try { - // in-place decryption + // in-place decryption of the whole packet return packetCipher.doFinal(packetBuffer, 0, packetLength, packetBuffer); } catch (ShortBufferException | IllegalBlockSizeException | BadPaddingException e) { - throw new IOException(e); + throw new IOException("Exception during packet ", e); } } From fd109144d88a98ea4ca0512ff77a20ed8209daa6 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 16:53:15 +0200 Subject: [PATCH 055/142] Exception messages --- .../encrypted/DecryptionPacketsInputStream.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index ad30c624ddb45..8e8ebc7d9253d 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -153,10 +153,10 @@ private int decrypt(PrefixInputStream packetInputStream) throws IOException { throw new IOException("Encrypted packet is too short"); } try { - // in-place decryption of the whole packet + // in-place decryption of the whole packet and return decrypted length return packetCipher.doFinal(packetBuffer, 0, packetLength, packetBuffer); } catch (ShortBufferException | IllegalBlockSizeException | BadPaddingException e) { - throw new IOException("Exception during packet ", e); + throw new IOException("Exception during packet decryption", e); } } @@ -167,7 +167,7 @@ private Cipher getPacketDecryptionCipher(byte[] packet) throws IOException { packetCipher.init(Cipher.DECRYPT_MODE, secretKey, gcmParameterSpec); return packetCipher; } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | InvalidAlgorithmParameterException e) { - throw new IOException(e); + throw new IOException("Exception during packet cipher initialisation", e); } } } From 2e41d4f73a8ede35d7ee6a17f515103dfbd89a5b Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 18:04:53 +0200 Subject: [PATCH 056/142] Fix tests with exception names --- .../encrypted/DecryptionPacketsInputStreamTests.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java index 82bd551925833..a53fe39c9d049 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java @@ -75,7 +75,7 @@ public void testFailureEncryptAndDecryptWrongNonce() throws Exception { IOException e = expectThrows(IOException.class, () -> { in.readAllBytes(); }); - assertThat(e.getMessage(), Matchers.is("Invalid packet IV")); + assertThat(e.getMessage(), Matchers.startsWith("Packet nonce mismatch.")); } } @@ -98,7 +98,7 @@ public void testFailureEncryptAndDecryptWrongKey() throws Exception { IOException e = expectThrows(IOException.class, () -> { in.readAllBytes(); }); - assertThat(e.getMessage(), Matchers.is("javax.crypto.AEADBadTagException: Tag mismatch!")); + assertThat(e.getMessage(), Matchers.is("Exception during packet decryption")); } } @@ -126,7 +126,7 @@ public void testFailureEncryptAndDecryptAlteredCiphertext() throws Exception { IOException e = expectThrows(IOException.class, () -> { in.readAllBytes(); }); - assertThat(e.getMessage(), Matchers.is("javax.crypto.AEADBadTagException: Tag mismatch!")); + assertThat(e.getMessage(), Matchers.is("Exception during packet decryption")); } // flip bit back encryptedBytes[i] ^= (1 << j); @@ -158,7 +158,11 @@ public void testFailureEncryptAndDecryptAlteredCiphertextIV() throws Exception { IOException e = expectThrows(IOException.class, () -> { in.readAllBytes(); }); - assertThat(e.getMessage(), Matchers.is("Invalid packet IV")); + if (j < Integer.BYTES) { + assertThat(e.getMessage(), Matchers.startsWith("Packet nonce mismatch")); + } else { + assertThat(e.getMessage(), Matchers.startsWith("Packet counter mismatch")); + } } // flip bit back encryptedBytes[i + j] ^= (1 << k); From 97f591791877f64e37d5b1bdfcc88a7f6daff627 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 21:42:48 +0200 Subject: [PATCH 057/142] Test for reader of fewer bytes --- .../DecryptionPacketsInputStreamTests.java | 25 ++++++++++++++----- 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java index a53fe39c9d049..f39e6e86057d3 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java @@ -13,6 +13,7 @@ import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; import java.io.ByteArrayInputStream; +import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.security.SecureRandom; @@ -43,7 +44,7 @@ public void testSuccessEncryptAndDecryptLargePacketLength() throws Exception { } public void testSuccessEncryptAndDecryptTypicalPacketLength() throws Exception { - int len = 512 + Randomness.get().nextInt(512); + int len = 1024 + Randomness.get().nextInt(512); byte[] plainBytes = new byte[len]; Randomness.get().nextBytes(plainBytes); SecretKey secretKey = generateSecretKey(); @@ -180,16 +181,28 @@ private void testEncryptAndDecryptSuccess(byte[] plainBytes, SecretKey secretKey } assertThat((long) encryptedBytes.length, Matchers.is(EncryptionPacketsInputStream.getEncryptionLength(len, packetLen))); byte[] decryptedBytes; - try (InputStream in = new DecryptionPacketsInputStream(new ByteArrayInputStream(encryptedBytes), secretKey, nonce, - packetLen)) { + try (InputStream in = new DecryptionPacketsInputStream(new ReadLessFilterInputStream(new ByteArrayInputStream(encryptedBytes)), + secretKey, nonce, packetLen)) { decryptedBytes = in.readAllBytes(); } assertThat(decryptedBytes.length, Matchers.is(len)); assertThat((long) decryptedBytes.length, Matchers.is(DecryptionPacketsInputStream.getDecryptionLength(encryptedBytes.length, packetLen))); - for (int i = 0; i < len; i++) { - assertThat(decryptedBytes[i], Matchers.is(plainBytes[i])); - } + assertThat(decryptedBytes, Matchers.equalTo(plainBytes)); + } + } + + // input stream that reads less bytes than asked to, testing that packet-wide reads don't rely on `read` calls for memory buffers which + // always return the same number of bytes they are asked to + private static class ReadLessFilterInputStream extends FilterInputStream { + + protected ReadLessFilterInputStream(InputStream in) { + super(in); + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + return super.read(b, off, randomIntBetween(0, len)); } } From 4fd6dcc8b43b907060036c3bb36247d9e5ffa1a5 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 30 Dec 2019 22:00:06 +0200 Subject: [PATCH 058/142] Adjust counting input stream docs --- .../repositories/encrypted/CountingInputStream.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java index de9692a0f6191..38c536d2832e4 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java @@ -13,10 +13,12 @@ /** * A {@code CountingInputStream} wraps another input stream and counts the number of bytes * that have been read or skipped. - * Bytes replayed following a {@code reset} call are not counted multiple times, i.e. only - * the bytes that are produced in a single pass, without resets, by the wrapped stream are counted. + *

* This input stream does no buffering on its own and only supports {@code mark} and - * {@code reset} if the wrapped stream supports it. + * {@code reset} if the underlying wrapped stream supports it. + *

+ * If the stream supports {@code mark} and {@code reset} the byte count is also reset to the + * value that it had on the last {@code mark} call, thereby not counting the same bytes twice. *

* If the {@code closeSource} constructor argument is {@code true}, closing this * stream will also close the wrapped input stream. Apart from closing the wrapped From 3d1daf45db9d041a7b409908710ac9d8572cc9f5 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 31 Dec 2019 18:32:29 +0200 Subject: [PATCH 059/142] RingBuffer --- .../encrypted/BufferOnMarkInputStream.java | 268 +++++++++--------- .../BufferOnMarkInputStreamTests.java | 239 +++++++--------- 2 files changed, 231 insertions(+), 276 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index 1221d3267d0c8..d114dd0274ea6 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -30,31 +30,7 @@ */ public final class BufferOnMarkInputStream extends FilterInputStream { - final int bufferSize; // package-protected for tests - /** - * The array used to store the bytes to be replayed upon a reset call. - * The buffer portion that stores valid bytes, which must be returned by the read calls after a reset call, - * is demarcated by a {@code head} (inclusive) and a {@code tail} offset (exclusive). The offsets wrap around, - * i.e. if the {@code tail} offset is smaller than the {@code head} offset, then the portion of valid bytes - * is that from the {@code head} offset until the end of the buffer array and from the start of the array - * until the {@code tail} offset. The buffer is empty when both the {@code head} and the {@code tail} offsets - * are equal. The buffer is full if it stores {@code bufferSize} elements. - * To avoid mixing up the two states, the actual allocated size of the array is {@code bufferSize + 1}. - */ - byte[] ringBuffer; // package-protected for tests - /** - * The inclusive start offset of the bytes that must be replayed after a reset call. - */ - int head; // package-protected for tests - /** - * The exclusive end offset of the bytes that must be replayed after a reset call. - */ - int tail; // package-protected for tests - /** - * The current offset of the next byte to be returned from the buffer for the reads following a reset. - * This is defined only when {@code resetCalled} is {@code true}. - */ - int position; // package-protected for tests + final RingBuffer ringBuffer; // package-protected for tests /** * {@code true} when the result of a read or skip from the underlying stream must also be stored in the buffer */ @@ -75,13 +51,7 @@ public final class BufferOnMarkInputStream extends FilterInputStream { */ public BufferOnMarkInputStream(InputStream in, int bufferSize) { super(Objects.requireNonNull(in)); - if (bufferSize <= 0) { - throw new IllegalArgumentException("The buffersize constructor argument must be a strictly positive value"); - } - this.bufferSize = bufferSize; - // the ring buffer is lazily allocated upon the first mark call - this.ringBuffer = null; - this.head = this.tail = this.position = -1; + this.ringBuffer = new RingBuffer(bufferSize); this.markCalled = this.resetCalled = false; this.closed = false; } @@ -120,7 +90,7 @@ public int read(byte[] b, int off, int len) throws IOException { } // firstly try reading any buffered bytes in case this read call is part of a rewind following a reset call if (resetCalled) { - int bytesRead = readFromBuffer(b, off, len); + int bytesRead = ringBuffer.read(b, off, len); if (bytesRead == 0) { // rewinding is complete, no more bytes to replay resetCalled = false; @@ -134,12 +104,12 @@ public int read(byte[] b, int off, int len) throws IOException { } // if mark has been previously called, buffer all the read bytes if (markCalled) { - if (bytesRead > getRemainingBufferCapacity()) { + if (bytesRead > ringBuffer.getAvailableToWriteByteCount()) { // could not fully write to buffer, invalidate mark markCalled = false; - head = tail = position = 0; + ringBuffer.clear(); } else { - writeToBuffer(b, off, bytesRead); + ringBuffer.write(b, off, bytesRead); } } return bytesRead; @@ -225,11 +195,7 @@ public int available() throws IOException { ensureOpen(); int bytesAvailable = 0; if (resetCalled) { - if (position <= tail) { - bytesAvailable += tail - position; - } else { - bytesAvailable += ringBuffer.length - position + tail; - } + bytesAvailable += ringBuffer.getAvailableToReadByteCount(); } bytesAvailable += in.available(); return bytesAvailable; @@ -269,8 +235,9 @@ public boolean markSupported() { public void mark(int readlimit) { // readlimit is otherwise ignored but this defensively fails if the caller is expecting to be able to mark/reset more than this // instance can accommodate in the ring mark buffer - if (readlimit > bufferSize) { - throw new IllegalArgumentException("Readlimit value [" + readlimit + "] exceeds the maximum value of [" + bufferSize + "]"); + if (readlimit > ringBuffer.getBufferSize()) { + throw new IllegalArgumentException("Readlimit value [" + readlimit + "] exceeds the maximum value of [" + + ringBuffer.getBufferSize() + "]"); } else if (readlimit < 0) { throw new IllegalArgumentException("Readlimit value [" + readlimit + "] cannot be negative"); } @@ -278,19 +245,11 @@ public void mark(int readlimit) { return; } markCalled = true; - // lazily allocate the mark ring buffer - if (ringBuffer == null) { - // "+ 1" for the full-buffer sentinel free element - ringBuffer = new byte[bufferSize + 1]; - head = tail = position = 0; + if (resetCalled) { + // mark while replaying after a reset + ringBuffer.mark(); } else { - if (resetCalled) { - // mark after reset - head = position; - } else { - // discard any leftovers in buffer - head = tail = position = 0; - } + ringBuffer.clear(); } } @@ -313,7 +272,7 @@ public void reset() throws IOException { throw new IOException("Mark not called or has been invalidated"); } resetCalled = true; - position = head; + ringBuffer.reset(); } /** @@ -325,7 +284,6 @@ public void reset() throws IOException { public void close() throws IOException { if (false == closed) { closed = true; - ringBuffer = null; in.close(); } } @@ -335,101 +293,145 @@ public void close() throws IOException { * This is the same as the {@code bufferSize} constructor argument. */ public int getMaxMarkReadlimit() { - return bufferSize; + return ringBuffer.getBufferSize(); } - private int readFromBuffer(byte[] b, int off, int len) { - if (position == tail) { - return 0; - } - final int readLength; - if (position <= tail) { - readLength = Math.min(len, tail - position); - } else { - readLength = Math.min(len, ringBuffer.length - position); - } - System.arraycopy(ringBuffer, position, b, off, readLength); - position += readLength; - if (position == ringBuffer.length) { - position = 0; + private void ensureOpen() throws IOException { + if (closed) { + throw new IOException("Stream has been closed"); } - return readLength; } - private void writeToBuffer(byte[] b, int off, int len) { - while (len > 0) { - final int writeLength; - if (head <= tail) { - writeLength = Math.min(len, ringBuffer.length - tail - (head == 0 ? 1 : 0)); - } else { - writeLength = Math.min(len, head - tail - 1); - } - if (writeLength <= 0) { - throw new IllegalStateException("No space left in the mark buffer"); - } - System.arraycopy(b, off, ringBuffer, tail, writeLength); - tail += writeLength; - off += writeLength; - len -= writeLength; - if (tail == ringBuffer.length) { - tail = 0; - // tail wrap-around overwrites head - if (head == 0) { - throw new IllegalStateException("Possible overflow of the mark buffer"); - } - } - } + // only for tests + protected InputStream getWrapped() { + return in; } - private void ensureOpen() throws IOException { - if (closed) { - throw new IOException("Stream has been closed"); + static class RingBuffer { + + final int bufferSize; // package-protected for tests + /** + * The array used to store the bytes to be replayed upon a reset call. + * The buffer portion that stores valid bytes, which must be returned by the read calls after a reset call, + * is demarcated by a {@code head} (inclusive) and a {@code tail} offset (exclusive). The offsets wrap around, + * i.e. if the {@code tail} offset is smaller than the {@code head} offset, then the portion of valid bytes + * is that from the {@code head} offset until the end of the buffer array and from the start of the array + * until the {@code tail} offset. The buffer is empty when both the {@code head} and the {@code tail} offsets + * are equal. The buffer is full if it stores {@code bufferSize} elements. + * To avoid mixing up the two states, the actual allocated size of the array is {@code bufferSize + 1}. + */ + byte[] buffer; // package-protected for tests + /** + * The inclusive start offset of the bytes that must be replayed after a reset call. + */ + int head; // package-protected for tests + /** + * The exclusive end offset of the bytes that must be replayed after a reset call. + */ + int tail; // package-protected for tests + /** + * The current offset of the next byte to be returned from the buffer for the reads following a reset. + * This is defined only when {@code resetCalled} is {@code true}. + */ + int position; // package-protected for tests + + RingBuffer(int bufferSize) { + if (bufferSize <= 0) { + throw new IllegalArgumentException("The buffersize constructor argument must be a strictly positive value"); + } + this.bufferSize = bufferSize; } - } - // protected for tests - protected int getRemainingBufferCapacity() { - if (ringBuffer == null) { + int getBufferSize() { return bufferSize; } - if (head == tail) { - return ringBuffer.length - 1; - } else if (head < tail) { - return ringBuffer.length - tail + head - 1; - } else { - return head - tail - 1; + + void reset() { + position = head; } - } - //protected for tests - protected int getRemainingBufferToRead() { - if (ringBuffer == null) { - return 0; + void mark() { + head = position; } - if (head <= tail) { - return tail - position; - } else if (position >= head) { - return ringBuffer.length - position + tail; - } else { - return tail - position; + + void clear() { + head = position = tail = 0; } - } - // protected for tests - protected int getCurrentBufferCount() { - if (ringBuffer == null) { - return 0; + int read(byte[] b, int off, int len) { + if (position == tail) { + return 0; + } + final int readLength; + if (position <= tail) { + readLength = Math.min(len, tail - position); + } else { + readLength = Math.min(len, buffer.length - position); + } + System.arraycopy(buffer, position, b, off, readLength); + position += readLength; + if (position == buffer.length) { + position = 0; + } + return readLength; } - if (head <= tail) { - return tail - head; - } else { - return ringBuffer.length - head + tail; + + void write(byte[] b, int off, int len) { + if (buffer == null && len > 0) { + // "+ 1" for the full-buffer sentinel free element + buffer = new byte[bufferSize + 1]; + head = position = tail = 0; + } + while (len > 0) { + final int writeLength; + if (head <= tail) { + writeLength = Math.min(len, buffer.length - tail - (head == 0 ? 1 : 0)); + } else { + writeLength = Math.min(len, head - tail - 1); + } + if (writeLength <= 0) { + throw new IllegalStateException("No space left in the mark buffer"); + } + System.arraycopy(b, off, buffer, tail, writeLength); + tail += writeLength; + off += writeLength; + len -= writeLength; + if (tail == buffer.length) { + tail = 0; + // tail wrap-around overwrites head + if (head == 0) { + throw new IllegalStateException("Possible overflow of the mark buffer"); + } + } + } + } + + int getAvailableToWriteByteCount() { + if (buffer == null) { + return bufferSize; + } + if (head == tail) { + return buffer.length - 1; + } else if (head < tail) { + return buffer.length - tail + head - 1; + } else { + return head - tail - 1; + } + } + + int getAvailableToReadByteCount() { + if (buffer == null) { + return 0; + } + if (head <= tail) { + return tail - position; + } else if (position >= head) { + return buffer.length - position + tail; + } else { + return tail - position; + } } - } - // only for tests - protected InputStream getWrapped() { - return in; } } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index 867d2072e9ae4..107ef29a34313 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -101,9 +101,8 @@ public void testCloseRejectsSuccessiveCalls() throws Exception { assertThat(e.getMessage(), Matchers.is("Stream has been closed")); int bytesReadAfter = bytesRead.get(); assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(0)); - assertThat(test.getCurrentBufferCount(), Matchers.is(0)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); } public void testBufferingUponMark() throws Exception { @@ -111,9 +110,8 @@ public void testBufferingUponMark() throws Exception { Tuple mockSourceTuple = getMockInfiniteInputStream(); AtomicInteger bytesRead = mockSourceTuple.v1(); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); - assertThat(test.getCurrentBufferCount(), Matchers.is(0)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); // read without mark assertThat(test.read(), Matchers.not(-1)); int readLen = 1 + Randomness.get().nextInt(8); @@ -124,9 +122,8 @@ public void testBufferingUponMark() throws Exception { } assertThat(readLen, Matchers.not(0)); // assert no buffering - assertThat(test.getCurrentBufferCount(), Matchers.is(0)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); // mark test.mark(1 + Randomness.get().nextInt(bufferSize)); // read one byte @@ -136,9 +133,8 @@ public void testBufferingUponMark() throws Exception { // assert byte is "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(1)); // assert byte is buffered - assertThat(test.getCurrentBufferCount(), Matchers.is(1)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - 1)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(1)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - 1)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(1)); assertThat(test.resetCalled, Matchers.is(false)); // read more bytes, up to buffer size bytes bytesReadBefore = bytesRead.get(); @@ -152,9 +148,8 @@ public void testBufferingUponMark() throws Exception { // assert byte is "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); // assert byte is buffered - assertThat(test.getCurrentBufferCount(), Matchers.is(1 + readLen)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - 1 - readLen)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(1 + readLen)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - 1 - readLen)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(1 + readLen)); assertThat(test.resetCalled, Matchers.is(false)); } @@ -163,11 +158,10 @@ public void testInvalidateMark() throws Exception { Tuple mockSourceTuple = getMockInfiniteInputStream(); AtomicInteger bytesRead = mockSourceTuple.v1(); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); - assertThat(test.getCurrentBufferCount(), Matchers.is(0)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); // mark - test.mark(1 + Randomness.get().nextInt(bufferSize)); + test.mark(randomIntBetween(1, bufferSize)); // read all bytes to fill the mark buffer int bytesReadBefore = bytesRead.get(); int readLen = bufferSize; @@ -180,9 +174,8 @@ public void testInvalidateMark() throws Exception { // assert byte is "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); // assert byte is buffered - assertThat(test.getCurrentBufferCount(), Matchers.is(bufferSize)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(0)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(bufferSize)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(bufferSize)); assertThat(test.resetCalled, Matchers.is(false)); // read another one byte bytesReadBefore = bytesRead.get(); @@ -191,9 +184,8 @@ public void testInvalidateMark() throws Exception { // assert byte is "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(1)); // assert mark is invalidated - assertThat(test.getCurrentBufferCount(), Matchers.is(0)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); assertThat(test.markCalled, Matchers.is(false)); // read more bytes bytesReadBefore = bytesRead.get(); @@ -207,9 +199,8 @@ public void testInvalidateMark() throws Exception { // assert byte is "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); // assert byte is NOT buffered - assertThat(test.getCurrentBufferCount(), Matchers.is(0)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); assertThat(test.markCalled, Matchers.is(false)); // assert reset does not work any more IOException e = expectThrows(IOException.class, () -> { @@ -239,9 +230,8 @@ public void testConsumeBufferUponReset() throws Exception { // assert bytes are "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); // assert buffer is populated - assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); assertThat(test.markCalled, Matchers.is(true)); // reset test.reset(); @@ -258,9 +248,8 @@ public void testConsumeBufferUponReset() throws Exception { // assert bytes are replayed from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(0)); // assert buffer is consumed - assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen - readLen2)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - readLen2)); assertThat(test.markCalled, Matchers.is(true)); assertThat(test.resetCalled, Matchers.is(true)); } @@ -286,16 +275,14 @@ public void testInvalidateMarkAfterReset() throws Exception { // assert bytes are "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); // assert buffer is populated - assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); assertThat(test.markCalled, Matchers.is(true)); // reset test.reset(); assertThat(test.resetCalled, Matchers.is(true)); - assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); // read again, from buffer this time bytesReadBefore = bytesRead.get(); int readLen2 = readLen; @@ -308,9 +295,8 @@ public void testInvalidateMarkAfterReset() throws Exception { // assert bytes are replayed from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(0)); // assert buffer is consumed - assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); assertThat(test.markCalled, Matchers.is(true)); assertThat(test.resetCalled, Matchers.is(true)); // read on, from the stream, until the mark buffer is full @@ -324,8 +310,7 @@ public void testInvalidateMarkAfterReset() throws Exception { bytesReadAfter = bytesRead.get(); // assert bytes are "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen3)); - assertThat(test.getCurrentBufferCount(), Matchers.is(bufferSize)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); assertThat(test.markCalled, Matchers.is(true)); if (readLen3 > 0) { assertThat(test.resetCalled, Matchers.is(false)); @@ -344,9 +329,8 @@ public void testInvalidateMarkAfterReset() throws Exception { // assert byte is "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen4)); // assert mark reset - assertThat(test.getCurrentBufferCount(), Matchers.is(0)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); assertThat(test.markCalled, Matchers.is(false)); // assert reset does not work anymore IOException e = expectThrows(IOException.class, () -> { @@ -376,17 +360,15 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { // assert bytes are "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); // assert buffer is populated - assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); assertThat(test.markCalled, Matchers.is(true)); assertThat(test.resetCalled, Matchers.is(false)); // reset test.reset(); assertThat(test.resetCalled, Matchers.is(true)); - assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); for (int readLen2 = 1; readLen2 <= readLen; readLen2++) { Tuple mockSourceTuple2 = getMockInfiniteInputStream(); BufferOnMarkInputStream cloneTest = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); @@ -403,16 +385,14 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { // assert bytes are replayed from the buffer, and not read from the stream assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(0)); // assert buffer is consumed - assertThat(cloneTest.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(cloneTest.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); - assertThat(cloneTest.getRemainingBufferToRead(), Matchers.is(readLen - readLen2)); + assertThat(cloneTest.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); + assertThat(cloneTest.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - readLen2)); assertThat(cloneTest.markCalled, Matchers.is(true)); assertThat(cloneTest.resetCalled, Matchers.is(true)); // mark cloneTest.mark(1 + Randomness.get().nextInt(bufferSize)); - assertThat(cloneTest.getCurrentBufferCount(), Matchers.is(readLen - readLen2)); - assertThat(cloneTest.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen + readLen2)); - assertThat(cloneTest.getRemainingBufferToRead(), Matchers.is(readLen - readLen2)); + assertThat(cloneTest.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen + readLen2)); + assertThat(cloneTest.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - readLen2)); assertThat(cloneTest.markCalled, Matchers.is(true)); assertThat(cloneTest.resetCalled, Matchers.is(true)); // read until the buffer is filled @@ -432,9 +412,8 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { // assert bytes are replayed from the buffer, and not read from the stream assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(0)); // assert buffer is consumed completely - assertThat(cloneTest3.getCurrentBufferCount(), Matchers.is(readLen - readLen2)); - assertThat(cloneTest3.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen + readLen2)); - assertThat(cloneTest3.getRemainingBufferToRead(), Matchers.is(readLen - readLen2 - readLen3)); + assertThat(cloneTest3.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen + readLen2)); + assertThat(cloneTest3.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - readLen2 - readLen3)); assertThat(cloneTest3.markCalled, Matchers.is(true)); assertThat(cloneTest3.resetCalled, Matchers.is(true)); } @@ -455,8 +434,7 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { // assert bytes are PARTLY replayed, PARTLY read from the stream assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen3 + readLen2 - readLen)); // assert buffer is appended and fully replayed - assertThat(cloneTest3.getCurrentBufferCount(), Matchers.is(readLen3)); - assertThat(cloneTest3.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen3)); + assertThat(cloneTest3.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen3)); assertThat(cloneTest3.markCalled, Matchers.is(true)); assertThat(cloneTest3.resetCalled, Matchers.is(false)); } @@ -484,15 +462,13 @@ public void testMarkAfterResetAfterReplayingBuffer() throws Exception { // assert bytes are "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); // assert buffer is populated - assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); assertThat(test.markCalled, Matchers.is(true)); // reset test.reset(); - assertThat(test.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(test.getRemainingBufferCapacity(), Matchers.is(bufferSize - readLen)); - assertThat(test.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); assertThat(test.markCalled, Matchers.is(true)); assertThat(test.resetCalled, Matchers.is(true)); for (int readLen2 = readLen + 1; readLen2 <= bufferSize; readLen2++) { @@ -507,15 +483,13 @@ public void testMarkAfterResetAfterReplayingBuffer() throws Exception { // assert bytes are PARTLY replayed, PARTLY read from the stream assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(read2.length - readLen)); // assert buffer is appended and fully replayed - assertThat(test2.getCurrentBufferCount(), Matchers.is(read2.length)); - assertThat(test2.getRemainingBufferCapacity(), Matchers.is(bufferSize - read2.length)); + assertThat(test2.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - read2.length)); assertThat(test2.markCalled, Matchers.is(true)); assertThat(test2.resetCalled, Matchers.is(false)); // mark test2.mark(1 + Randomness.get().nextInt(bufferSize)); - assertThat(test2.getCurrentBufferCount(), Matchers.is(0)); - assertThat(test2.getRemainingBufferCapacity(), Matchers.is(bufferSize)); - assertThat(test2.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(test2.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); + assertThat(test2.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); assertThat(test2.markCalled, Matchers.is(true)); assertThat(test2.resetCalled, Matchers.is(false)); } @@ -526,19 +500,15 @@ public void testNoMockSimpleMarkResetAtBeginning() throws Exception { for (int mark = 1; mark <= length; mark++) { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), mark)) { in.mark(mark); - assertThat(in.getCurrentBufferCount(), Matchers.is(0)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(mark)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(mark)); byte[] test1 = in.readNBytes(mark); assertArray(0, test1); - assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); in.reset(); - assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); byte[] test2 = in.readNBytes(mark); assertArray(0, test2); - assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); } } } @@ -576,18 +546,14 @@ public void testNoMockSimpleMarkResetEverywhere() throws Exception { // skip first offset bytes in.readNBytes(offset); in.mark(mark); - assertThat(in.getCurrentBufferCount(), Matchers.is(0)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(mark)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(mark)); byte[] test1 = in.readNBytes(mark); assertArray(offset, test1); - assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); in.reset(); - assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); byte[] test2 = in.readNBytes(mark); - assertThat(in.getCurrentBufferCount(), Matchers.is(mark)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(0)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); assertArray(offset, test2); } } @@ -634,30 +600,25 @@ public void testNoMockDoubleMarkEverywhere() throws Exception { try (BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), length)) { in.readNBytes(offset); - assertThat(in.getCurrentBufferCount(), Matchers.is(0)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); // first mark in.mark(length - offset); - assertThat(in.getCurrentBufferCount(), Matchers.is(0)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); byte[] test = in.readNBytes(readLen); assertArray(offset, test); - assertThat(in.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - readLen)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); // reset to first in.reset(); - assertThat(in.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - readLen)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); // advance before/after the first read length test = in.readNBytes(markLen); - assertThat(in.getCurrentBufferCount(), Matchers.is(Math.max(readLen, markLen))); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - Math.max(readLen, markLen))); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - Math.max(readLen, markLen))); if (markLen <= readLen) { - assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen - markLen)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - markLen)); } else { assertThat(in.resetCalled, Matchers.is(false)); } @@ -665,37 +626,31 @@ public void testNoMockDoubleMarkEverywhere() throws Exception { // second mark in.mark(length - offset - markLen); if (markLen <= readLen) { - assertThat(in.getCurrentBufferCount(), Matchers.is(readLen - markLen)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen + markLen)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen - markLen)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - readLen + markLen)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - markLen)); } else { - assertThat(in.getCurrentBufferCount(), Matchers.is(0)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); } for (int readLen2 = 1; readLen2 <= length - offset - markLen; readLen2++) { byte[] test2 = in.readNBytes(readLen2); if (markLen + readLen2 <= readLen) { - assertThat(in.getCurrentBufferCount(), Matchers.is(readLen - markLen)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen + markLen)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - readLen + markLen)); assertThat(in.resetCalled, Matchers.is(true)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen - markLen - readLen2)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - markLen - readLen2)); } else { - assertThat(in.getCurrentBufferCount(), Matchers.is(readLen2)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen2)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - readLen2)); assertThat(in.resetCalled, Matchers.is(false)); } assertArray(offset + markLen, test2); in.reset(); assertThat(in.resetCalled, Matchers.is(true)); if (markLen + readLen2 <= readLen) { - assertThat(in.getCurrentBufferCount(), Matchers.is(readLen - markLen)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen + markLen)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen - markLen)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - readLen + markLen)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - markLen)); } else { - assertThat(in.getCurrentBufferCount(), Matchers.is(readLen2)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(length - readLen2)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen2)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - readLen2)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen2)); } } } @@ -712,13 +667,11 @@ public void testNoMockMarkWithoutReset() throws Exception { while (offset < testArray.length) { int readLen = Math.min(1 + Randomness.get().nextInt(maxMark), testArray.length - offset); in.mark(Randomness.get().nextInt(readLen)); - assertThat(in.getCurrentBufferCount(), Matchers.is(0)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(maxMark)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(0)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(maxMark)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); byte[] test = in.readNBytes(readLen); - assertThat(in.getCurrentBufferCount(), Matchers.is(readLen)); - assertThat(in.getRemainingBufferCapacity(), Matchers.is(maxMark - readLen)); - assertThat(in.getRemainingBufferToRead(), Matchers.is(readLen)); + assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(maxMark - readLen)); + assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); assertArray(offset, test); offset += readLen; } @@ -755,15 +708,15 @@ private BufferOnMarkInputStream cloneBufferOnMarkStream(BufferOnMarkInputStream int origOffset = ((NoMarkByteArrayInputStream) orig.getWrapped()).getPos(); int origLen = ((NoMarkByteArrayInputStream) orig.getWrapped()).getCount(); BufferOnMarkInputStream cloneStream = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, - origOffset, origLen - origOffset), orig.bufferSize); - if (orig.ringBuffer != null) { - cloneStream.ringBuffer = Arrays.copyOf(orig.ringBuffer, orig.ringBuffer.length); + origOffset, origLen - origOffset), orig.ringBuffer.getBufferSize()); + if (orig.ringBuffer.buffer != null) { + cloneStream.ringBuffer.buffer = Arrays.copyOf(orig.ringBuffer.buffer, orig.ringBuffer.buffer.length); } else { - cloneStream.ringBuffer = null; + cloneStream.ringBuffer.buffer = null; } - cloneStream.head = orig.head; - cloneStream.tail = orig.tail; - cloneStream.position = orig.position; + cloneStream.ringBuffer.head = orig.ringBuffer.head; + cloneStream.ringBuffer.tail = orig.ringBuffer.tail; + cloneStream.ringBuffer.position = orig.ringBuffer.position; cloneStream.markCalled = orig.markCalled; cloneStream.resetCalled = orig.resetCalled; cloneStream.closed = orig.closed; @@ -771,14 +724,14 @@ private BufferOnMarkInputStream cloneBufferOnMarkStream(BufferOnMarkInputStream } private void cloneBufferOnMarkStream(BufferOnMarkInputStream clone, BufferOnMarkInputStream orig) { - if (orig.ringBuffer != null) { - clone.ringBuffer = Arrays.copyOf(orig.ringBuffer, orig.ringBuffer.length); + if (orig.ringBuffer.buffer != null) { + clone.ringBuffer.buffer = Arrays.copyOf(orig.ringBuffer.buffer, orig.ringBuffer.buffer.length); } else { - clone.ringBuffer = null; + clone.ringBuffer.buffer = null; } - clone.head = orig.head; - clone.tail = orig.tail; - clone.position = orig.position; + clone.ringBuffer.head = orig.ringBuffer.head; + clone.ringBuffer.tail = orig.ringBuffer.tail; + clone.ringBuffer.position = orig.ringBuffer.position; clone.markCalled = orig.markCalled; clone.resetCalled = orig.resetCalled; clone.closed = orig.closed; From 4fcd49d0d922aa9f2b6793dbb9e132d1d3aab9ac Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 31 Dec 2019 21:35:53 +0200 Subject: [PATCH 060/142] WIP --- .../encrypted/BufferOnMarkInputStream.java | 64 ++++++++++++++----- .../DecryptionPacketsInputStreamTests.java | 9 ++- 2 files changed, 55 insertions(+), 18 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index d114dd0274ea6..3dec732bbeab3 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -15,7 +15,7 @@ * All the bytes read or skipped following a {@link #mark(int)} call are also stored in a fixed-size internal array * so they can be replayed following a {@link #reset()} call. The size of the internal buffer is specified at construction * time. It is an error (throws {@code IllegalArgumentException}) to specify a larger {@code readlimit} value as an argument - * to a mark call. + * to a {@code mark} call. *

* Unlike the {@link java.io.BufferedInputStream} this only buffers upon a {@link #mark(int)} call, * i.e. if {@code mark} is never called this is equivalent to a bare pass-through {@link FilterInputStream}. @@ -28,8 +28,12 @@ *

* This is NOT thread-safe, multiple threads sharing a single instance must synchronize access. */ -public final class BufferOnMarkInputStream extends FilterInputStream { +public final class BufferOnMarkInputStream extends InputStream { + final InputStream source; + /** + * the buffer used to store the bytes following a mark (which are replayed on reset) + */ final RingBuffer ringBuffer; // package-protected for tests /** * {@code true} when the result of a read or skip from the underlying stream must also be stored in the buffer @@ -43,14 +47,14 @@ public final class BufferOnMarkInputStream extends FilterInputStream { /** * Creates a {@code BufferOnMarkInputStream} that buffers a maximum of {@code bufferSize} elements - * from the wrapped input stream {@code in} in order to support {@code mark} and {@code reset}. - * The {@code bufferSize} is the maximum value for the mark readlimit. + * from the wrapped input stream {@code source} in order to support {@code mark} and {@code reset}. + * The {@code bufferSize} is the maximum value for the {@code mark} readlimit argument. * - * @param in the underlying input buffer + * @param source the underlying input buffer * @param bufferSize the number of bytes that can be stored after a call to mark */ - public BufferOnMarkInputStream(InputStream in, int bufferSize) { - super(Objects.requireNonNull(in)); + public BufferOnMarkInputStream(InputStream source, int bufferSize) { + this.source = source; this.ringBuffer = new RingBuffer(bufferSize); this.markCalled = this.resetCalled = false; this.closed = false; @@ -98,7 +102,7 @@ public int read(byte[] b, int off, int len) throws IOException { return bytesRead; } } - int bytesRead = in.read(b, off, len); + int bytesRead = source.read(b, off, len); if (bytesRead <= 0) { return bytesRead; } @@ -164,7 +168,7 @@ public long skip(long n) throws IOException { if (resetCalled) { throw new IllegalStateException("Reset cannot be called without a preceding mark invocation"); } - return in.skip(n); + return source.skip(n); } long remaining = n; int size = (int)Math.min(2048, remaining); @@ -197,7 +201,7 @@ public int available() throws IOException { if (resetCalled) { bytesAvailable += ringBuffer.getAvailableToReadByteCount(); } - bytesAvailable += in.available(); + bytesAvailable += source.available(); return bytesAvailable; } @@ -284,7 +288,7 @@ public void reset() throws IOException { public void close() throws IOException { if (false == closed) { closed = true; - in.close(); + source.close(); } } @@ -304,7 +308,7 @@ private void ensureOpen() throws IOException { // only for tests protected InputStream getWrapped() { - return in; + return source; } static class RingBuffer { @@ -358,30 +362,58 @@ void clear() { head = position = tail = 0; } + /** + * Reads up to {@code len} bytes from the ring buffer and places them in the {@code b} array starting at offset {@code off}. + * This advances the internal pointer of the ring buffer so that a subsequent call will return the following bytes, not the + * same ones (see {@link #reset()}). + * Exactly {@code len} bytes are read and placed in the array, but no more than {@link #getAvailableToReadByteCount()}; i.e. + * if {@code len} is greater than the value returned by {@link #getAvailableToReadByteCount()} this reads all the remaining + * available bytes (which could be {@code 0}). + * This returns the exact count of bytes read (the minimum of {@code len} and the value of {@code #getAvailableToReadByteCount}). + * + * @param b the array where to place the bytes read + * @param off the offset in the array where to start placing the bytes read (i.e. first byte is stored at b[off]) + * @param len the maximum number of bytes to read + * @return the number of bytes actually read + */ int read(byte[] b, int off, int len) { - if (position == tail) { + Objects.requireNonNull(b); + Objects.checkFromIndexSize(off, len, b.length); + if (position == tail || len == 0) { return 0; } + // the number of bytes to read final int readLength; if (position <= tail) { readLength = Math.min(len, tail - position); } else { + // the ring buffer contains elements that wrap around the end of the array readLength = Math.min(len, buffer.length - position); } System.arraycopy(buffer, position, b, off, readLength); + // update the internal pointer with the bytes read position += readLength; if (position == buffer.length) { + // pointer wrap around position = 0; + // also read the remaining bytes after the wrap around + return readLength + read(b, off + readLength, len - readLength); } return readLength; } void write(byte[] b, int off, int len) { + Objects.requireNonNull(b); + Objects.checkFromIndexSize(off, len, b.length); + // allocate internal buffer lazily if (buffer == null && len > 0) { - // "+ 1" for the full-buffer sentinel free element + // "+ 1" for the full-buffer sentinel element buffer = new byte[bufferSize + 1]; head = position = tail = 0; } + if (len > getAvailableToWriteByteCount()) { + throw new IllegalArgumentException("Not enough remaining space in the ring buffer"); + } while (len > 0) { final int writeLength; if (head <= tail) { @@ -390,7 +422,7 @@ void write(byte[] b, int off, int len) { writeLength = Math.min(len, head - tail - 1); } if (writeLength <= 0) { - throw new IllegalStateException("No space left in the mark buffer"); + throw new IllegalStateException("No space left in the ring buffer"); } System.arraycopy(b, off, buffer, tail, writeLength); tail += writeLength; @@ -400,7 +432,7 @@ void write(byte[] b, int off, int len) { tail = 0; // tail wrap-around overwrites head if (head == 0) { - throw new IllegalStateException("Possible overflow of the mark buffer"); + throw new IllegalStateException("Possible overflow of the ring buffer"); } } } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java index f39e6e86057d3..f34bedcfe1658 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStreamTests.java @@ -188,7 +188,9 @@ private void testEncryptAndDecryptSuccess(byte[] plainBytes, SecretKey secretKey assertThat(decryptedBytes.length, Matchers.is(len)); assertThat((long) decryptedBytes.length, Matchers.is(DecryptionPacketsInputStream.getDecryptionLength(encryptedBytes.length, packetLen))); - assertThat(decryptedBytes, Matchers.equalTo(plainBytes)); + for (int i = 0; i < len; i++) { + assertThat(decryptedBytes[i], Matchers.is(plainBytes[i])); + } } } @@ -202,7 +204,10 @@ protected ReadLessFilterInputStream(InputStream in) { @Override public int read(byte[] b, int off, int len) throws IOException { - return super.read(b, off, randomIntBetween(0, len)); + if (len == 0) { + return 0; + } + return super.read(b, off, randomIntBetween(1, len)); } } From 9ef136e0529afc2b8e09a7969553a9b05143330d Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 31 Dec 2019 22:01:23 +0200 Subject: [PATCH 061/142] WIP --- .../encrypted/BufferOnMarkInputStream.java | 15 +++++++- .../encrypted/CountingInputStream.java | 38 ++++++++++++------- .../encrypted/PrefixInputStream.java | 18 ++++----- 3 files changed, 46 insertions(+), 25 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index 3dec732bbeab3..5b4f1d173f409 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -363,10 +363,10 @@ void clear() { } /** - * Reads up to {@code len} bytes from the ring buffer and places them in the {@code b} array starting at offset {@code off}. + * Copies up to {@code len} bytes from the ring buffer and places them in the {@code b} array starting at offset {@code off}. * This advances the internal pointer of the ring buffer so that a subsequent call will return the following bytes, not the * same ones (see {@link #reset()}). - * Exactly {@code len} bytes are read and placed in the array, but no more than {@link #getAvailableToReadByteCount()}; i.e. + * Exactly {@code len} bytes are copied from the ring buffer, but no more than {@link #getAvailableToReadByteCount()}; i.e. * if {@code len} is greater than the value returned by {@link #getAvailableToReadByteCount()} this reads all the remaining * available bytes (which could be {@code 0}). * This returns the exact count of bytes read (the minimum of {@code len} and the value of {@code #getAvailableToReadByteCount}). @@ -402,6 +402,17 @@ int read(byte[] b, int off, int len) { return readLength; } + /** + * Copies exactly {@code len} bytes from the array {@code b}, starting at offset {@code off}, into the ring buffer. + * The bytes are appended after the ones written in the same way by a previous call, and are available to + * {@link #read(byte[], int, int)} immediately. + * This throws {@code IllegalArgumentException} if the ring buffer does not have enough space left. + * To get the available capacity left call {@link #getAvailableToWriteByteCount()}. + * + * @param b the array from which to copy the bytes into the ring buffer + * @param off the offset of the first element to copy + * @param len the number of elements to copy + */ void write(byte[] b, int off, int len) { Objects.requireNonNull(b); Objects.checkFromIndexSize(off, len, b.length); diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java index 38c536d2832e4..91245beaa8707 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/CountingInputStream.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.repositories.encrypted; -import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Objects; @@ -24,25 +23,26 @@ * stream will also close the wrapped input stream. Apart from closing the wrapped * stream in this case, the {@code close} method does nothing else. */ -public final class CountingInputStream extends FilterInputStream { +public final class CountingInputStream extends InputStream { + private final InputStream source; + private final boolean closeSource; long count; // package-protected for tests long mark; // package-protected for tests boolean closed; // package-protected for tests - private final boolean closeSource; /** * Wraps another input stream, counting the number of bytes read. * - * @param in the input stream to be wrapped + * @param source the input stream to be wrapped * @param closeSource {@code true} if closing this stream will also close the wrapped stream */ - public CountingInputStream(InputStream in, boolean closeSource) { - super(Objects.requireNonNull(in)); + public CountingInputStream(InputStream source, boolean closeSource) { + this.source = Objects.requireNonNull(source); + this.closeSource = closeSource; this.count = 0L; this.mark = -1L; this.closed = false; - this.closeSource = closeSource; } /** Returns the number of bytes read. */ @@ -52,7 +52,7 @@ public long getCount() { @Override public int read() throws IOException { - int result = in.read(); + int result = source.read(); if (result != -1) { count++; } @@ -61,7 +61,7 @@ public int read() throws IOException { @Override public int read(byte[] b, int off, int len) throws IOException { - int result = in.read(b, off, len); + int result = source.read(b, off, len); if (result != -1) { count += result; } @@ -70,27 +70,37 @@ public int read(byte[] b, int off, int len) throws IOException { @Override public long skip(long n) throws IOException { - long result = in.skip(n); + long result = source.skip(n); count += result; return result; } + @Override + public int available() throws IOException { + return source.available(); + } + + @Override + public boolean markSupported() { + return source.markSupported(); + } + @Override public synchronized void mark(int readlimit) { - in.mark(readlimit); + source.mark(readlimit); mark = count; } @Override public synchronized void reset() throws IOException { - if (false == in.markSupported()) { + if (false == source.markSupported()) { throw new IOException("Mark not supported"); } if (mark == -1L) { throw new IOException("Mark not set"); } count = mark; - in.reset(); + source.reset(); } @Override @@ -98,7 +108,7 @@ public void close() throws IOException { if (false == closed) { closed = true; if (closeSource) { - in.close(); + source.close(); } } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java index e0e795f29e3ca..08fa708d40542 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java @@ -6,7 +6,6 @@ package org.elasticsearch.repositories.encrypted; -import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Objects; @@ -23,8 +22,9 @@ * Any subsequent {@code read}, {@code skip} and {@code available} calls * will throw {@code IOException}s. */ -public final class PrefixInputStream extends FilterInputStream { +public final class PrefixInputStream extends InputStream { + private final InputStream source; /** * The length in bytes of the prefix. * This is the maximum number of bytes that can be read from this stream, @@ -45,11 +45,11 @@ public final class PrefixInputStream extends FilterInputStream { */ private boolean closed; - public PrefixInputStream(InputStream in, int prefixLength, boolean closeSource) { - super(Objects.requireNonNull(in)); + public PrefixInputStream(InputStream source, int prefixLength, boolean closeSource) { if (prefixLength < 0) { throw new IllegalArgumentException("The prefixLength constructor argument must be a positive integer"); } + this.source = source; this.prefixLength = prefixLength; this.count = 0; this.closeSource = closeSource; @@ -62,7 +62,7 @@ public int read() throws IOException { if (remainingPrefixByteCount() <= 0) { return -1; } - int byteVal = in.read(); + int byteVal = source.read(); if (byteVal == -1) { return -1; } @@ -81,7 +81,7 @@ public int read(byte[] b, int off, int len) throws IOException { return -1; } int readSize = Math.min(len, remainingPrefixByteCount()); - int bytesRead = in.read(b, off, readSize); + int bytesRead = source.read(b, off, readSize); if (bytesRead == -1) { return -1; } @@ -97,7 +97,7 @@ public long skip(long n) throws IOException { } long bytesToSkip = Math.min(n, remainingPrefixByteCount()); assert bytesToSkip > 0; - long bytesSkipped = in.skip(bytesToSkip); + long bytesSkipped = source.skip(bytesToSkip); count += bytesSkipped; return bytesSkipped; } @@ -105,7 +105,7 @@ public long skip(long n) throws IOException { @Override public int available() throws IOException { ensureOpen(); - return Math.min(remainingPrefixByteCount(), in.available()); + return Math.min(remainingPrefixByteCount(), source.available()); } @Override @@ -130,7 +130,7 @@ public void close() throws IOException { } closed = true; if (closeSource) { - in.close(); + source.close(); } } From cb966b2e1f9b18a386859d2ab9c6ba97db86a987 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 1 Jan 2020 19:20:52 +0200 Subject: [PATCH 062/142] More javadoc to the ring buffer inner --- .../encrypted/BufferOnMarkInputStream.java | 174 ++++++++++++------ .../encrypted/PrefixInputStream.java | 3 + .../BufferOnMarkInputStreamTests.java | 90 ++++----- 3 files changed, 168 insertions(+), 99 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java index 5b4f1d173f409..f568dcc4a67d2 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStream.java @@ -11,7 +11,7 @@ import java.util.Objects; /** - * A {@code BufferOnMarkInputStream} adds the mark and reset functionality to another input stream. + * A {@code BufferOnMarkInputStream} adds the {@code mark} and {@code reset} functionality to another input stream. * All the bytes read or skipped following a {@link #mark(int)} call are also stored in a fixed-size internal array * so they can be replayed following a {@link #reset()} call. The size of the internal buffer is specified at construction * time. It is an error (throws {@code IllegalArgumentException}) to specify a larger {@code readlimit} value as an argument @@ -30,19 +30,32 @@ */ public final class BufferOnMarkInputStream extends InputStream { + /** + * the underlying input stream supplying the actual bytes to read + */ final InputStream source; /** - * the buffer used to store the bytes following a mark (which are replayed on reset) + * The fixed capacity buffer used to store the bytes following a {@code mark} call on the input stream, + * and which are then replayed after the {@code reset} call. + * The buffer permits appending bytes which can then be read, possibly multiple times, by also + * supporting the mark and reset operations on its own. + * Reading will not discard the bytes just read. Subsequent reads will return the + * next bytes, but the bytes can be replayed by reading after calling {@code reset}. + * The {@code mark} operation is used to adjust the position of the reset return position to the current + * read position and also discard the bytes read before. */ final RingBuffer ringBuffer; // package-protected for tests /** - * {@code true} when the result of a read or skip from the underlying stream must also be stored in the buffer + * {@code true} when the result of a read or a skip from the underlying source stream must also be stored in the buffer + */ + boolean storeToBuffer; // package-protected for tests + /** + * {@code true} when the returned bytes must come from the buffer and not from the underlying source stream */ - boolean markCalled; // package-protected for tests + boolean replayFromBuffer; // package-protected for tests /** - * {@code true} when the returned bytes must come from the buffer and not from the underlying stream + * {@code true} when this stream is closed and any further calls throw IOExceptions */ - boolean resetCalled; // package-protected for tests boolean closed; // package-protected for tests /** @@ -56,7 +69,7 @@ public final class BufferOnMarkInputStream extends InputStream { public BufferOnMarkInputStream(InputStream source, int bufferSize) { this.source = source; this.ringBuffer = new RingBuffer(bufferSize); - this.markCalled = this.resetCalled = false; + this.storeToBuffer = this.replayFromBuffer = false; this.closed = false; } @@ -64,9 +77,9 @@ public BufferOnMarkInputStream(InputStream source, int bufferSize) { * Reads up to {@code len} bytes of data into an array of bytes from this * input stream. If {@code len} is zero, then no bytes are read and {@code 0} * is returned; otherwise, there is an attempt to read at least one byte. - * The read will return buffered bytes, which have been returned in a previous - * call as well, if the contents of the stream must be replayed following a - * reset call; otherwise it forwards the call to the underlying stream. + * If the contents of the stream must be replayed following a {@code reset} + * call, the call will return buffered bytes which have been returned in a previous + * call. Otherwise it forwards the read call to the underlying source input stream. * If no byte is available because there are no more bytes to replay following * a reset (if a reset was called) and the underlying stream is exhausted, the * value {@code -1} is returned; otherwise, at least one byte is read and stored @@ -93,11 +106,11 @@ public int read(byte[] b, int off, int len) throws IOException { return 0; } // firstly try reading any buffered bytes in case this read call is part of a rewind following a reset call - if (resetCalled) { + if (replayFromBuffer) { int bytesRead = ringBuffer.read(b, off, len); if (bytesRead == 0) { // rewinding is complete, no more bytes to replay - resetCalled = false; + replayFromBuffer = false; } else { return bytesRead; } @@ -107,10 +120,12 @@ public int read(byte[] b, int off, int len) throws IOException { return bytesRead; } // if mark has been previously called, buffer all the read bytes - if (markCalled) { + if (storeToBuffer) { if (bytesRead > ringBuffer.getAvailableToWriteByteCount()) { - // could not fully write to buffer, invalidate mark - markCalled = false; + // can not fully write to buffer + // invalidate mark + storeToBuffer = false; + // empty buffer ringBuffer.clear(); } else { ringBuffer.write(b, off, bytesRead); @@ -164,16 +179,20 @@ public long skip(long n) throws IOException { if (n <= 0) { return 0; } - if (false == markCalled) { - if (resetCalled) { + if (false == storeToBuffer) { + // integrity check of the replayFromBuffer state variable + if (replayFromBuffer) { throw new IllegalStateException("Reset cannot be called without a preceding mark invocation"); } + // if mark has not been called, no storing to the buffer is required return source.skip(n); } long remaining = n; int size = (int)Math.min(2048, remaining); byte[] skipBuffer = new byte[size]; while (remaining > 0) { + // skipping translates to a read so that the skipped bytes are stored in the buffer, + // so they can possibly be replayed after a reset int bytesRead = read(skipBuffer, 0, (int)Math.min(size, remaining)); if (bytesRead < 0) { break; @@ -198,7 +217,7 @@ public long skip(long n) throws IOException { public int available() throws IOException { ensureOpen(); int bytesAvailable = 0; - if (resetCalled) { + if (replayFromBuffer) { bytesAvailable += ringBuffer.getAvailableToReadByteCount(); } bytesAvailable += source.available(); @@ -206,8 +225,8 @@ public int available() throws IOException { } /** - * Tests if this input stream supports the {@code mark} and - * {@code reset} methods. This always returns {@code true}. + * Tests if this input stream supports the {@code mark} and {@code reset} methods. + * This always returns {@code true}. */ @Override public boolean markSupported() { @@ -217,7 +236,9 @@ public boolean markSupported() { /** * Marks the current position in this input stream. A subsequent call to * the {@code reset} method repositions this stream at the last marked - * position so that subsequent reads re-read the same bytes. + * position so that subsequent reads re-read the same bytes. The bytes + * read or skipped following a {@code mark} call will be buffered internally + * and any previously buffered bytes are discarded. *

* The {@code readlimit} arguments tells this input stream to * allow that many bytes to be read before the mark position can be @@ -238,7 +259,7 @@ public boolean markSupported() { @Override public void mark(int readlimit) { // readlimit is otherwise ignored but this defensively fails if the caller is expecting to be able to mark/reset more than this - // instance can accommodate in the ring mark buffer + // instance can accommodate in the fixed ring buffer if (readlimit > ringBuffer.getBufferSize()) { throw new IllegalArgumentException("Readlimit value [" + readlimit + "] exceeds the maximum value of [" + ringBuffer.getBufferSize() + "]"); @@ -248,34 +269,38 @@ public void mark(int readlimit) { if (closed) { return; } - markCalled = true; - if (resetCalled) { - // mark while replaying after a reset + // signal that further read or skipped bytes must be stored to the buffer + storeToBuffer = true; + if (replayFromBuffer) { + // the mark operation while replaying after a reset + // this only discards the previously buffered bytes before the current position + // as well as updates the mark position in the buffer ringBuffer.mark(); } else { + // any previously stored bytes are discarded because mark only has to retain bytes from this position on ringBuffer.clear(); } } /** - * Repositions this stream to the position at the time the - * {@code mark} method was last called on this input stream. - * Subsequent read calls will return the same bytes in the same - * order since the point of the {@code mark} call. Naturally, - * {@code mark} can be invoked at any moment, even after a - * {@code reset}. + * Repositions this stream to the position at the time the {@code mark} method was last called on this input stream. + * It throws an {@code IOException} if {@code mark} has not yet been called on this instance. + * Internally, this resets the buffer to the last mark position and signals that further reads (and skips) + * on this input stream must return bytes from the buffer and not from the underlying source stream. * - * @throws IOException if the stream has been closed or the number of bytes - * read since the last mark call exceeded {@link #getMaxMarkReadlimit()} - * @see java.io.InputStream#mark(int) + * @throws IOException if the stream has been closed or the number of bytes + * read since the last mark call exceeded {@link #getMaxMarkReadlimit()} + * @see java.io.InputStream#mark(int) */ @Override public void reset() throws IOException { ensureOpen(); - if (false == markCalled) { + if (false == storeToBuffer) { throw new IOException("Mark not called or has been invalidated"); } - resetCalled = true; + // signal that further reads/skips must be satisfied from the buffer and not from the underlying source stream + replayFromBuffer = true; + // position the buffer's read pointer back to the last mark position ringBuffer.reset(); } @@ -294,7 +319,8 @@ public void close() throws IOException { /** * Returns the maximum value for the {@code readlimit} argument of the {@link #mark(int)} method. - * This is the same as the {@code bufferSize} constructor argument. + * This is the value of the {@code bufferSize} constructor argument and represents the maximum number + * of bytes that can be internally buffered (so they can be replayed after the reset call). */ public int getMaxMarkReadlimit() { return ringBuffer.getBufferSize(); @@ -306,39 +332,48 @@ private void ensureOpen() throws IOException { } } - // only for tests - protected InputStream getWrapped() { - return source; - } - + /** + * This buffer is used to store all the bytes read or skipped after the last {@link BufferOnMarkInputStream#mark(int)} + * invocation. + *

+ * The latest bytes written to the ring buffer are appended following the previous ones. + * Reading back the bytes advances an internal pointer so that subsequent read calls return subsequent bytes. + * However, read bytes are not discarded. The same bytes can be re-read following the {@link #reset()} invocation. + * {@link #reset()} permits re-reading the bytes since the last {@link #mark()}} call, or since the buffer instance + * has been created or the {@link #clear()} method has been invoked. + * Calling {@link #mark()} will discard all bytes read before, and calling {@link #clear()} will discard all the + * bytes (new bytes must be written otherwise reading will return {@code 0} bytes). + */ static class RingBuffer { - final int bufferSize; // package-protected for tests + /** + * This holds the size of the buffer which is lazily allocated on the first {@link #write(byte[], int, int)} invocation + */ + private final int bufferSize; /** * The array used to store the bytes to be replayed upon a reset call. - * The buffer portion that stores valid bytes, which must be returned by the read calls after a reset call, - * is demarcated by a {@code head} (inclusive) and a {@code tail} offset (exclusive). The offsets wrap around, - * i.e. if the {@code tail} offset is smaller than the {@code head} offset, then the portion of valid bytes - * is that from the {@code head} offset until the end of the buffer array and from the start of the array - * until the {@code tail} offset. The buffer is empty when both the {@code head} and the {@code tail} offsets - * are equal. The buffer is full if it stores {@code bufferSize} elements. - * To avoid mixing up the two states, the actual allocated size of the array is {@code bufferSize + 1}. */ byte[] buffer; // package-protected for tests /** - * The inclusive start offset of the bytes that must be replayed after a reset call. + * The start offset (inclusive) for the bytes that must be re-read after a reset call. This offset is advanced + * by invoking {@link #mark()} */ int head; // package-protected for tests /** - * The exclusive end offset of the bytes that must be replayed after a reset call. + * The end offset (exclusive) for the bytes that must be re-read after a reset call. This offset is advanced + * by writing to the ring buffer. */ int tail; // package-protected for tests /** - * The current offset of the next byte to be returned from the buffer for the reads following a reset. - * This is defined only when {@code resetCalled} is {@code true}. + * The offset of the bytes to return on the next read call. This offset is advanced by reading from the ring buffer. */ int position; // package-protected for tests + /** + * Creates a new ring buffer instance that can store a maximum of {@code bufferSize} bytes. + * More bytes are stored by writing to the ring buffer, and bytes are discarded from the buffer by the + * {@code mark} and {@code reset} method invocations. + */ RingBuffer(int bufferSize) { if (bufferSize <= 0) { throw new IllegalArgumentException("The buffersize constructor argument must be a strictly positive value"); @@ -346,18 +381,35 @@ static class RingBuffer { this.bufferSize = bufferSize; } + /** + * Returns the maximum number of bytes that this buffer can store. + */ int getBufferSize() { return bufferSize; } + /** + * Rewind back to the read position of the last {@link #mark()} or {@link #reset()}. The next + * {@link RingBuffer#read(byte[], int, int)} call will return the same bytes that the read + * call after the last {@link #mark()} did. + */ void reset() { position = head; } + /** + * Mark the current read position. Any previously read bytes are discarded from the ring buffer, + * i.e. they cannot be re-read, but this frees up space for writing other bytes. + * All the following {@link RingBuffer#read(byte[], int, int)} calls will revert back to this position. + */ void mark() { head = position; } + /** + * Empties out the ring buffer, discarding all the bytes written to it, i.e. any following read calls don't + * return any bytes. + */ void clear() { head = position = tail = 0; } @@ -449,6 +501,12 @@ void write(byte[] b, int off, int len) { } } + /** + * Returns the number of bytes that can be written to this ring buffer before it becomes full + * and will not accept further writes. Be advised that reading (see {@link #read(byte[], int, int)}) + * does not free up space because bytes can be re-read multiple times (see {@link #reset()}); + * ring buffer space can be reclaimed by calling {@link #mark()} or {@link #clear()} + */ int getAvailableToWriteByteCount() { if (buffer == null) { return bufferSize; @@ -462,6 +520,14 @@ int getAvailableToWriteByteCount() { } } + /** + * Returns the number of bytes that can be read from this ring buffer before it becomes empty + * and all subsequent {@link #read(byte[], int, int)} calls will return {@code 0}. Writing + * more bytes (see {@link #write(byte[], int, int)}) will obviously increase the number of + * bytes available to read. Calling {@link #reset()} will also increase the available byte + * count because the following reads will go over again the same bytes since the last + * {@code mark} call. + */ int getAvailableToReadByteCount() { if (buffer == null) { return 0; diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java index 08fa708d40542..873ffc319e176 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PrefixInputStream.java @@ -24,6 +24,9 @@ */ public final class PrefixInputStream extends InputStream { + /** + * The underlying stream of which only a prefix is returned + */ private final InputStream source; /** * The length in bytes of the prefix. diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index 107ef29a34313..bb005abd85f08 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -135,7 +135,7 @@ public void testBufferingUponMark() throws Exception { // assert byte is buffered assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - 1)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(1)); - assertThat(test.resetCalled, Matchers.is(false)); + assertThat(test.replayFromBuffer, Matchers.is(false)); // read more bytes, up to buffer size bytes bytesReadBefore = bytesRead.get(); readLen = 1 + Randomness.get().nextInt(bufferSize - 1); @@ -150,7 +150,7 @@ public void testBufferingUponMark() throws Exception { // assert byte is buffered assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - 1 - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(1 + readLen)); - assertThat(test.resetCalled, Matchers.is(false)); + assertThat(test.replayFromBuffer, Matchers.is(false)); } public void testInvalidateMark() throws Exception { @@ -176,7 +176,7 @@ public void testInvalidateMark() throws Exception { // assert byte is buffered assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(bufferSize)); - assertThat(test.resetCalled, Matchers.is(false)); + assertThat(test.replayFromBuffer, Matchers.is(false)); // read another one byte bytesReadBefore = bytesRead.get(); assertThat(test.read(), Matchers.not(-1)); @@ -186,7 +186,7 @@ public void testInvalidateMark() throws Exception { // assert mark is invalidated assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); - assertThat(test.markCalled, Matchers.is(false)); + assertThat(test.storeToBuffer, Matchers.is(false)); // read more bytes bytesReadBefore = bytesRead.get(); readLen = 1 + Randomness.get().nextInt(2 * bufferSize); @@ -201,7 +201,7 @@ public void testInvalidateMark() throws Exception { // assert byte is NOT buffered assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); - assertThat(test.markCalled, Matchers.is(false)); + assertThat(test.storeToBuffer, Matchers.is(false)); // assert reset does not work any more IOException e = expectThrows(IOException.class, () -> { test.reset(); @@ -232,10 +232,10 @@ public void testConsumeBufferUponReset() throws Exception { // assert buffer is populated assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); - assertThat(test.markCalled, Matchers.is(true)); + assertThat(test.storeToBuffer, Matchers.is(true)); // reset test.reset(); - assertThat(test.resetCalled, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(true)); // read again, from buffer this time bytesReadBefore = bytesRead.get(); int readLen2 = 1 + Randomness.get().nextInt(readLen); @@ -250,8 +250,8 @@ public void testConsumeBufferUponReset() throws Exception { // assert buffer is consumed assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - readLen2)); - assertThat(test.markCalled, Matchers.is(true)); - assertThat(test.resetCalled, Matchers.is(true)); + assertThat(test.storeToBuffer, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(true)); } public void testInvalidateMarkAfterReset() throws Exception { @@ -277,10 +277,10 @@ public void testInvalidateMarkAfterReset() throws Exception { // assert buffer is populated assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); - assertThat(test.markCalled, Matchers.is(true)); + assertThat(test.storeToBuffer, Matchers.is(true)); // reset test.reset(); - assertThat(test.resetCalled, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(true)); assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); // read again, from buffer this time @@ -297,8 +297,8 @@ public void testInvalidateMarkAfterReset() throws Exception { // assert buffer is consumed assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); - assertThat(test.markCalled, Matchers.is(true)); - assertThat(test.resetCalled, Matchers.is(true)); + assertThat(test.storeToBuffer, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(true)); // read on, from the stream, until the mark buffer is full bytesReadBefore = bytesRead.get(); int readLen3 = bufferSize - readLen; @@ -311,11 +311,11 @@ public void testInvalidateMarkAfterReset() throws Exception { // assert bytes are "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen3)); assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); - assertThat(test.markCalled, Matchers.is(true)); + assertThat(test.storeToBuffer, Matchers.is(true)); if (readLen3 > 0) { - assertThat(test.resetCalled, Matchers.is(false)); + assertThat(test.replayFromBuffer, Matchers.is(false)); } else { - assertThat(test.resetCalled, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(true)); } // read more bytes bytesReadBefore = bytesRead.get(); @@ -331,7 +331,7 @@ public void testInvalidateMarkAfterReset() throws Exception { // assert mark reset assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); - assertThat(test.markCalled, Matchers.is(false)); + assertThat(test.storeToBuffer, Matchers.is(false)); // assert reset does not work anymore IOException e = expectThrows(IOException.class, () -> { test.reset(); @@ -362,11 +362,11 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { // assert buffer is populated assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); - assertThat(test.markCalled, Matchers.is(true)); - assertThat(test.resetCalled, Matchers.is(false)); + assertThat(test.storeToBuffer, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(false)); // reset test.reset(); - assertThat(test.resetCalled, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(true)); assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); for (int readLen2 = 1; readLen2 <= readLen; readLen2++) { @@ -387,14 +387,14 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { // assert buffer is consumed assertThat(cloneTest.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(cloneTest.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - readLen2)); - assertThat(cloneTest.markCalled, Matchers.is(true)); - assertThat(cloneTest.resetCalled, Matchers.is(true)); + assertThat(cloneTest.storeToBuffer, Matchers.is(true)); + assertThat(cloneTest.replayFromBuffer, Matchers.is(true)); // mark cloneTest.mark(1 + Randomness.get().nextInt(bufferSize)); assertThat(cloneTest.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen + readLen2)); assertThat(cloneTest.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - readLen2)); - assertThat(cloneTest.markCalled, Matchers.is(true)); - assertThat(cloneTest.resetCalled, Matchers.is(true)); + assertThat(cloneTest.storeToBuffer, Matchers.is(true)); + assertThat(cloneTest.replayFromBuffer, Matchers.is(true)); // read until the buffer is filled for (int readLen3 = 1; readLen3 <= readLen - readLen2; readLen3++) { Tuple mockSourceTuple3 = getMockInfiniteInputStream(); @@ -414,8 +414,8 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { // assert buffer is consumed completely assertThat(cloneTest3.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen + readLen2)); assertThat(cloneTest3.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - readLen2 - readLen3)); - assertThat(cloneTest3.markCalled, Matchers.is(true)); - assertThat(cloneTest3.resetCalled, Matchers.is(true)); + assertThat(cloneTest3.storeToBuffer, Matchers.is(true)); + assertThat(cloneTest3.replayFromBuffer, Matchers.is(true)); } // read beyond the buffer can supply, but not more than it can accommodate for (int readLen3 = readLen - readLen2 + 1; readLen3 <= bufferSize - readLen2; readLen3++) { @@ -435,8 +435,8 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen3 + readLen2 - readLen)); // assert buffer is appended and fully replayed assertThat(cloneTest3.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen3)); - assertThat(cloneTest3.markCalled, Matchers.is(true)); - assertThat(cloneTest3.resetCalled, Matchers.is(false)); + assertThat(cloneTest3.storeToBuffer, Matchers.is(true)); + assertThat(cloneTest3.replayFromBuffer, Matchers.is(false)); } } } @@ -464,13 +464,13 @@ public void testMarkAfterResetAfterReplayingBuffer() throws Exception { // assert buffer is populated assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); - assertThat(test.markCalled, Matchers.is(true)); + assertThat(test.storeToBuffer, Matchers.is(true)); // reset test.reset(); assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); - assertThat(test.markCalled, Matchers.is(true)); - assertThat(test.resetCalled, Matchers.is(true)); + assertThat(test.storeToBuffer, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(true)); for (int readLen2 = readLen + 1; readLen2 <= bufferSize; readLen2++) { Tuple mockSourceTuple2 = getMockInfiniteInputStream(); BufferOnMarkInputStream test2 = new BufferOnMarkInputStream(mockSourceTuple2.v2(), bufferSize); @@ -484,14 +484,14 @@ public void testMarkAfterResetAfterReplayingBuffer() throws Exception { assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(read2.length - readLen)); // assert buffer is appended and fully replayed assertThat(test2.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - read2.length)); - assertThat(test2.markCalled, Matchers.is(true)); - assertThat(test2.resetCalled, Matchers.is(false)); + assertThat(test2.storeToBuffer, Matchers.is(true)); + assertThat(test2.replayFromBuffer, Matchers.is(false)); // mark test2.mark(1 + Randomness.get().nextInt(bufferSize)); assertThat(test2.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test2.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); - assertThat(test2.markCalled, Matchers.is(true)); - assertThat(test2.resetCalled, Matchers.is(false)); + assertThat(test2.storeToBuffer, Matchers.is(true)); + assertThat(test2.replayFromBuffer, Matchers.is(false)); } } @@ -620,7 +620,7 @@ public void testNoMockDoubleMarkEverywhere() throws Exception { if (markLen <= readLen) { assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - markLen)); } else { - assertThat(in.resetCalled, Matchers.is(false)); + assertThat(in.replayFromBuffer, Matchers.is(false)); } assertArray(offset, test); // second mark @@ -636,15 +636,15 @@ public void testNoMockDoubleMarkEverywhere() throws Exception { byte[] test2 = in.readNBytes(readLen2); if (markLen + readLen2 <= readLen) { assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - readLen + markLen)); - assertThat(in.resetCalled, Matchers.is(true)); + assertThat(in.replayFromBuffer, Matchers.is(true)); assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - markLen - readLen2)); } else { assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - readLen2)); - assertThat(in.resetCalled, Matchers.is(false)); + assertThat(in.replayFromBuffer, Matchers.is(false)); } assertArray(offset + markLen, test2); in.reset(); - assertThat(in.resetCalled, Matchers.is(true)); + assertThat(in.replayFromBuffer, Matchers.is(true)); if (markLen + readLen2 <= readLen) { assertThat(in.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(length - readLen + markLen)); assertThat(in.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - markLen)); @@ -705,8 +705,8 @@ private void testMarkResetMarkStep(BufferOnMarkInputStream stream, int offset, i } private BufferOnMarkInputStream cloneBufferOnMarkStream(BufferOnMarkInputStream orig) { - int origOffset = ((NoMarkByteArrayInputStream) orig.getWrapped()).getPos(); - int origLen = ((NoMarkByteArrayInputStream) orig.getWrapped()).getCount(); + int origOffset = ((NoMarkByteArrayInputStream) orig.source).getPos(); + int origLen = ((NoMarkByteArrayInputStream) orig.source).getCount(); BufferOnMarkInputStream cloneStream = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, origOffset, origLen - origOffset), orig.ringBuffer.getBufferSize()); if (orig.ringBuffer.buffer != null) { @@ -717,8 +717,8 @@ private BufferOnMarkInputStream cloneBufferOnMarkStream(BufferOnMarkInputStream cloneStream.ringBuffer.head = orig.ringBuffer.head; cloneStream.ringBuffer.tail = orig.ringBuffer.tail; cloneStream.ringBuffer.position = orig.ringBuffer.position; - cloneStream.markCalled = orig.markCalled; - cloneStream.resetCalled = orig.resetCalled; + cloneStream.storeToBuffer = orig.storeToBuffer; + cloneStream.replayFromBuffer = orig.replayFromBuffer; cloneStream.closed = orig.closed; return cloneStream; } @@ -732,8 +732,8 @@ private void cloneBufferOnMarkStream(BufferOnMarkInputStream clone, BufferOnMark clone.ringBuffer.head = orig.ringBuffer.head; clone.ringBuffer.tail = orig.ringBuffer.tail; clone.ringBuffer.position = orig.ringBuffer.position; - clone.markCalled = orig.markCalled; - clone.resetCalled = orig.resetCalled; + clone.storeToBuffer = orig.storeToBuffer; + clone.replayFromBuffer = orig.replayFromBuffer; clone.closed = orig.closed; } From 0f9f77c11e826a810c4c6f89f753583e4bbb8729 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 1 Jan 2020 22:29:34 +0200 Subject: [PATCH 063/142] Small test polishing --- .../BufferOnMarkInputStreamTests.java | 127 +++++++++++------- 1 file changed, 79 insertions(+), 48 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java index bb005abd85f08..9f7da7ea55db5 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/BufferOnMarkInputStreamTests.java @@ -39,7 +39,7 @@ public void testResetWithoutMarkFails() throws Exception { Tuple mockSourceTuple = getMockInfiniteInputStream(); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), 1 + Randomness.get().nextInt(1024)); // maybe read some bytes - test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + test.readNBytes(randomFrom(0, randomInt(31))); IOException e = expectThrows(IOException.class, () -> { test.reset(); }); @@ -48,29 +48,29 @@ public void testResetWithoutMarkFails() throws Exception { public void testMarkAndBufferReadLimitsCheck() throws Exception { Tuple mockSourceTuple = getMockInfiniteInputStream(); - int bufferSize = 1 + Randomness.get().nextInt(1024); + int bufferSize = randomIntBetween(1, 1024); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); assertThat(test.getMaxMarkReadlimit(), Matchers.is(bufferSize)); // maybe read some bytes - test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); - int wrongReadLimit = bufferSize + 1 + Randomness.get().nextInt(8); + test.readNBytes(randomFrom(0, randomInt(32))); + int wrongLargeReadLimit = bufferSize + randomIntBetween(1, 8); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - test.mark(wrongReadLimit); + test.mark(wrongLargeReadLimit); }); - assertThat(e.getMessage(), Matchers.is("Readlimit value [" + wrongReadLimit + "] exceeds the maximum value of [" + assertThat(e.getMessage(), Matchers.is("Readlimit value [" + wrongLargeReadLimit + "] exceeds the maximum value of [" + bufferSize + "]")); e = expectThrows(IllegalArgumentException.class, () -> { - test.mark(-1 - Randomness.get().nextInt(2)); + test.mark(-1 - randomInt(1)); }); assertThat(e.getMessage(), Matchers.containsString("cannot be negative")); e = expectThrows(IllegalArgumentException.class, () -> { - new BufferOnMarkInputStream(mock(InputStream.class), 0 - Randomness.get().nextInt(2)); + new BufferOnMarkInputStream(mock(InputStream.class), 0 - randomInt(1)); }); assertThat(e.getMessage(), Matchers.is("The buffersize constructor argument must be a strictly positive value")); } public void testCloseRejectsSuccessiveCalls() throws Exception { - int bufferSize = 3 + Randomness.get().nextInt(32); + int bufferSize = 3 + Randomness.get().nextInt(128); Tuple mockSourceTuple = getMockInfiniteInputStream(); AtomicInteger bytesRead = mockSourceTuple.v1(); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); @@ -106,39 +106,45 @@ public void testCloseRejectsSuccessiveCalls() throws Exception { } public void testBufferingUponMark() throws Exception { - int bufferSize = 3 + Randomness.get().nextInt(32); + int bufferSize = randomIntBetween(3, 128); Tuple mockSourceTuple = getMockInfiniteInputStream(); AtomicInteger bytesRead = mockSourceTuple.v1(); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); - // read without mark + // read without mark, should be a simple pass-through with the same byte count + int bytesReadBefore = bytesRead.get(); assertThat(test.read(), Matchers.not(-1)); - int readLen = 1 + Randomness.get().nextInt(8); + int bytesReadAfter = bytesRead.get(); + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(1)); + int readLen = randomIntBetween(1, 8); + bytesReadBefore = bytesRead.get(); if (randomBoolean()) { test.readNBytes(readLen); } else { skipNBytes(test, readLen); } - assertThat(readLen, Matchers.not(0)); + bytesReadAfter = bytesRead.get(); + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); // assert no buffering assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); // mark - test.mark(1 + Randomness.get().nextInt(bufferSize)); + test.mark(randomIntBetween(1, bufferSize)); // read one byte - int bytesReadBefore = bytesRead.get(); + bytesReadBefore = bytesRead.get(); assertThat(test.read(), Matchers.not(-1)); - int bytesReadAfter = bytesRead.get(); + bytesReadAfter = bytesRead.get(); // assert byte is "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(1)); // assert byte is buffered assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - 1)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(1)); + assertThat(test.storeToBuffer, Matchers.is(true)); assertThat(test.replayFromBuffer, Matchers.is(false)); // read more bytes, up to buffer size bytes + readLen = randomIntBetween(1, bufferSize - 1); bytesReadBefore = bytesRead.get(); - readLen = 1 + Randomness.get().nextInt(bufferSize - 1); if (randomBoolean()) { test.readNBytes(readLen); } else { @@ -151,19 +157,23 @@ public void testBufferingUponMark() throws Exception { assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - 1 - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(1 + readLen)); assertThat(test.replayFromBuffer, Matchers.is(false)); + assertThat(test.storeToBuffer, Matchers.is(true)); } - public void testInvalidateMark() throws Exception { - int bufferSize = 3 + Randomness.get().nextInt(32); + public void testMarkInvalidation() throws Exception { + int bufferSize = randomIntBetween(3, 128); Tuple mockSourceTuple = getMockInfiniteInputStream(); AtomicInteger bytesRead = mockSourceTuple.v1(); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); + assertThat(test.storeToBuffer, Matchers.is(false)); + assertThat(test.replayFromBuffer, Matchers.is(false)); // mark test.mark(randomIntBetween(1, bufferSize)); // read all bytes to fill the mark buffer int bytesReadBefore = bytesRead.get(); + // read enough to populate the full buffer space int readLen = bufferSize; if (randomBoolean()) { test.readNBytes(readLen); @@ -177,19 +187,21 @@ public void testInvalidateMark() throws Exception { assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(bufferSize)); assertThat(test.replayFromBuffer, Matchers.is(false)); + assertThat(test.storeToBuffer, Matchers.is(true)); // read another one byte bytesReadBefore = bytesRead.get(); assertThat(test.read(), Matchers.not(-1)); bytesReadAfter = bytesRead.get(); // assert byte is "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(1)); - // assert mark is invalidated + // assert mark is invalidated and no buffering is further performed assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); + assertThat(test.replayFromBuffer, Matchers.is(false)); assertThat(test.storeToBuffer, Matchers.is(false)); // read more bytes bytesReadBefore = bytesRead.get(); - readLen = 1 + Randomness.get().nextInt(2 * bufferSize); + readLen = randomIntBetween(1, 2 * bufferSize); if (randomBoolean()) { test.readNBytes(readLen); } else { @@ -198,10 +210,11 @@ public void testInvalidateMark() throws Exception { bytesReadAfter = bytesRead.get(); // assert byte is "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen)); - // assert byte is NOT buffered + // assert byte again is NOT buffered assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); assertThat(test.storeToBuffer, Matchers.is(false)); + assertThat(test.replayFromBuffer, Matchers.is(false)); // assert reset does not work any more IOException e = expectThrows(IOException.class, () -> { test.reset(); @@ -210,17 +223,17 @@ public void testInvalidateMark() throws Exception { } public void testConsumeBufferUponReset() throws Exception { - int bufferSize = 3 + Randomness.get().nextInt(128); + int bufferSize = randomIntBetween(3, 128); Tuple mockSourceTuple = getMockInfiniteInputStream(); AtomicInteger bytesRead = mockSourceTuple.v1(); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); // maybe read some bytes - test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + test.readNBytes(randomFrom(0, randomInt(32))); // mark - test.mark(1 + Randomness.get().nextInt(bufferSize)); + test.mark(randomIntBetween(1, bufferSize)); // read less than bufferSize bytes int bytesReadBefore = bytesRead.get(); - int readLen = 1 + Randomness.get().nextInt(bufferSize); + int readLen = randomIntBetween(1, bufferSize); if (randomBoolean()) { test.readNBytes(readLen); } else { @@ -233,12 +246,14 @@ public void testConsumeBufferUponReset() throws Exception { assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); assertThat(test.storeToBuffer, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(false)); // reset test.reset(); assertThat(test.replayFromBuffer, Matchers.is(true)); + assertThat(test.storeToBuffer, Matchers.is(true)); // read again, from buffer this time bytesReadBefore = bytesRead.get(); - int readLen2 = 1 + Randomness.get().nextInt(readLen); + int readLen2 = randomIntBetween(1, readLen); if (randomBoolean()) { test.readNBytes(readLen2); } else { @@ -255,17 +270,17 @@ public void testConsumeBufferUponReset() throws Exception { } public void testInvalidateMarkAfterReset() throws Exception { - int bufferSize = 3 + Randomness.get().nextInt(128); + int bufferSize = randomIntBetween(3, 128); Tuple mockSourceTuple = getMockInfiniteInputStream(); AtomicInteger bytesRead = mockSourceTuple.v1(); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); // maybe read some bytes - test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + test.readNBytes(randomFrom(0, randomInt(32))); // mark - test.mark(1 + Randomness.get().nextInt(bufferSize)); + test.mark(randomIntBetween(1, bufferSize)); // read less than bufferSize bytes int bytesReadBefore = bytesRead.get(); - int readLen = 1 + Randomness.get().nextInt(bufferSize); + int readLen = randomIntBetween(1, bufferSize); if (randomBoolean()) { test.readNBytes(readLen); } else { @@ -278,13 +293,18 @@ public void testInvalidateMarkAfterReset() throws Exception { assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); assertThat(test.storeToBuffer, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(false)); // reset test.reset(); + // assert signal for replay from buffer is toggled assertThat(test.replayFromBuffer, Matchers.is(true)); + assertThat(test.storeToBuffer, Matchers.is(true)); + // assert bytes are still buffered assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); // read again, from buffer this time bytesReadBefore = bytesRead.get(); + // read all bytes from the buffer int readLen2 = readLen; if (randomBoolean()) { test.readNBytes(readLen2); @@ -301,6 +321,7 @@ public void testInvalidateMarkAfterReset() throws Exception { assertThat(test.replayFromBuffer, Matchers.is(true)); // read on, from the stream, until the mark buffer is full bytesReadBefore = bytesRead.get(); + // read the remaining bytes to fill the buffer int readLen3 = bufferSize - readLen; if (randomBoolean()) { test.readNBytes(readLen3); @@ -311,6 +332,7 @@ public void testInvalidateMarkAfterReset() throws Exception { // assert bytes are "read" and not returned from the buffer assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen3)); assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(0)); + assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen3)); assertThat(test.storeToBuffer, Matchers.is(true)); if (readLen3 > 0) { assertThat(test.replayFromBuffer, Matchers.is(false)); @@ -319,7 +341,7 @@ public void testInvalidateMarkAfterReset() throws Exception { } // read more bytes bytesReadBefore = bytesRead.get(); - int readLen4 = 1 + Randomness.get().nextInt(2 * bufferSize); + int readLen4 = randomIntBetween(1, 2 * bufferSize); if (randomBoolean()) { test.readNBytes(readLen4); } else { @@ -332,6 +354,7 @@ public void testInvalidateMarkAfterReset() throws Exception { assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); assertThat(test.storeToBuffer, Matchers.is(false)); + assertThat(test.replayFromBuffer, Matchers.is(false)); // assert reset does not work anymore IOException e = expectThrows(IOException.class, () -> { test.reset(); @@ -340,17 +363,17 @@ public void testInvalidateMarkAfterReset() throws Exception { } public void testMarkAfterResetWhileReplayingBuffer() throws Exception { - int bufferSize = 8 + Randomness.get().nextInt(8); + int bufferSize = randomIntBetween(8, 16); Tuple mockSourceTuple = getMockInfiniteInputStream(); AtomicInteger bytesRead = mockSourceTuple.v1(); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); // maybe read some bytes - test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + test.readNBytes(randomFrom(0, randomInt(32))); // mark - test.mark(1 + Randomness.get().nextInt(bufferSize)); + test.mark(randomIntBetween(1, bufferSize)); // read less than bufferSize bytes int bytesReadBefore = bytesRead.get(); - int readLen = 1 + Randomness.get().nextInt(bufferSize); + int readLen = randomIntBetween(1, bufferSize); if (randomBoolean()) { test.readNBytes(readLen); } else { @@ -367,8 +390,10 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { // reset test.reset(); assertThat(test.replayFromBuffer, Matchers.is(true)); + assertThat(test.storeToBuffer, Matchers.is(true)); assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); + // read bytes after reset for (int readLen2 = 1; readLen2 <= readLen; readLen2++) { Tuple mockSourceTuple2 = getMockInfiniteInputStream(); BufferOnMarkInputStream cloneTest = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); @@ -389,8 +414,8 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { assertThat(cloneTest.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - readLen2)); assertThat(cloneTest.storeToBuffer, Matchers.is(true)); assertThat(cloneTest.replayFromBuffer, Matchers.is(true)); - // mark - cloneTest.mark(1 + Randomness.get().nextInt(bufferSize)); + // mark inside the buffer after reset + cloneTest.mark(randomIntBetween(1, bufferSize)); assertThat(cloneTest.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen + readLen2)); assertThat(cloneTest.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen - readLen2)); assertThat(cloneTest.storeToBuffer, Matchers.is(true)); @@ -435,6 +460,7 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen3 + readLen2 - readLen)); // assert buffer is appended and fully replayed assertThat(cloneTest3.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen3)); + assertThat(cloneTest3.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen3 + readLen2 - readLen)); assertThat(cloneTest3.storeToBuffer, Matchers.is(true)); assertThat(cloneTest3.replayFromBuffer, Matchers.is(false)); } @@ -442,17 +468,17 @@ public void testMarkAfterResetWhileReplayingBuffer() throws Exception { } public void testMarkAfterResetAfterReplayingBuffer() throws Exception { - int bufferSize = 8 + Randomness.get().nextInt(8); + int bufferSize = randomIntBetween(8, 16); Tuple mockSourceTuple = getMockInfiniteInputStream(); AtomicInteger bytesRead = mockSourceTuple.v1(); BufferOnMarkInputStream test = new BufferOnMarkInputStream(mockSourceTuple.v2(), bufferSize); // maybe read some bytes - test.readNBytes(randomFrom(0, Randomness.get().nextInt(32))); + test.readNBytes(randomFrom(0, randomInt(32))); // mark - test.mark(1 + Randomness.get().nextInt(bufferSize)); + test.mark(randomIntBetween(1, bufferSize)); // read less than bufferSize bytes int bytesReadBefore = bytesRead.get(); - int readLen = 1 + Randomness.get().nextInt(bufferSize); + int readLen = randomIntBetween(1, bufferSize); if (randomBoolean()) { test.readNBytes(readLen); } else { @@ -465,6 +491,7 @@ public void testMarkAfterResetAfterReplayingBuffer() throws Exception { assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); assertThat(test.ringBuffer.getAvailableToReadByteCount(), Matchers.is(readLen)); assertThat(test.storeToBuffer, Matchers.is(true)); + assertThat(test.replayFromBuffer, Matchers.is(false)); // reset test.reset(); assertThat(test.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen)); @@ -478,16 +505,20 @@ public void testMarkAfterResetAfterReplayingBuffer() throws Exception { AtomicInteger bytesRead2 = mockSourceTuple2.v1(); // read again, more than before bytesReadBefore = bytesRead2.get(); - byte[] read2 = test2.readNBytes(readLen2); + if (randomBoolean()) { + test2.readNBytes(readLen2); + } else { + skipNBytes(test2, readLen2); + } bytesReadAfter = bytesRead2.get(); // assert bytes are PARTLY replayed, PARTLY read from the stream - assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(read2.length - readLen)); + assertThat(bytesReadAfter - bytesReadBefore, Matchers.is(readLen2 - readLen)); // assert buffer is appended and fully replayed - assertThat(test2.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - read2.length)); + assertThat(test2.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize - readLen2)); assertThat(test2.storeToBuffer, Matchers.is(true)); assertThat(test2.replayFromBuffer, Matchers.is(false)); // mark - test2.mark(1 + Randomness.get().nextInt(bufferSize)); + test2.mark(randomIntBetween(1, bufferSize)); assertThat(test2.ringBuffer.getAvailableToWriteByteCount(), Matchers.is(bufferSize)); assertThat(test2.ringBuffer.getAvailableToReadByteCount(), Matchers.is(0)); assertThat(test2.storeToBuffer, Matchers.is(true)); @@ -678,8 +709,8 @@ public void testNoMockMarkWithoutReset() throws Exception { } public void testNoMockThreeMarkResetMarkSteps() throws Exception { - int length = 8 + Randomness.get().nextInt(8); - int stepLen = 4 + Randomness.get().nextInt(4); + int length = randomIntBetween(8, 16); + int stepLen = randomIntBetween(4, 8); BufferOnMarkInputStream in = new BufferOnMarkInputStream(new NoMarkByteArrayInputStream(testArray, 0, length), stepLen); testMarkResetMarkStep(in, 0, length, stepLen, 2); } From 3cf85b4ccc8116030775855eb089d5abc9374fdb Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 5 Jan 2020 17:10:59 +0200 Subject: [PATCH 064/142] Encryption without CMS --- .../encrypted/BlobEncryptionMetadata.java | 17 +- .../DecryptionPacketsInputStream.java | 2 +- .../encrypted/EncryptedRepository.java | 186 ++++++++++-------- .../encrypted/EncryptedRepositoryPlugin.java | 39 +++- .../EncryptionPacketsInputStream.java | 2 +- .../EncryptionPacketsInputStreamTests.java | 2 +- 6 files changed, 145 insertions(+), 103 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java index 95e7a471d5d44..c357f26b32d91 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java @@ -11,12 +11,13 @@ import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -public final class BlobEncryptionMetadata { +public final class BlobEncryptionMetadata implements Writeable { private final byte[] dataEncryptionKeyMaterial; private final int nonce; @@ -50,13 +51,11 @@ public BlobEncryptionMetadata(InputStream inputStream) throws IOException { } } - public void write(OutputStream outputStream) throws IOException { - try (StreamOutput out = new OutputStreamStreamOutput(outputStream)) { - out.setVersion(Version.CURRENT); - out.writeByteArray(this.dataEncryptionKeyMaterial); - out.writeInt(this.nonce); - out.writeInt(this.packetLengthInBytes); - } + @Override + public void writeTo(StreamOutput out) throws IOException { + out.setVersion(Version.CURRENT); + out.writeByteArray(this.dataEncryptionKeyMaterial); + out.writeInt(this.nonce); + out.writeInt(this.packetLengthInBytes); } - } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index 8e8ebc7d9253d..8fd86f96a128f 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -163,7 +163,7 @@ private int decrypt(PrefixInputStream packetInputStream) throws IOException { private Cipher getPacketDecryptionCipher(byte[] packet) throws IOException { GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, packet, 0, GCM_IV_LENGTH_IN_BYTES); try { - Cipher packetCipher = Cipher.getInstance(EncryptedRepository.GCM_ENCRYPTION_SCHEME); + Cipher packetCipher = Cipher.getInstance(EncryptedRepository.DEK_ENCRYPTION_SCHEME); packetCipher.init(Cipher.DECRYPT_MODE, secretKey, gcmParameterSpec); return packetCipher; } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | InvalidAlgorithmParameterException e) { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index da4fd63c71c49..d80a0afa893a4 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -6,20 +6,6 @@ package org.elasticsearch.repositories.encrypted; -import org.bouncycastle.asn1.ASN1ObjectIdentifier; -import org.bouncycastle.asn1.cms.CMSObjectIdentifiers; -import org.bouncycastle.cms.CMSAlgorithm; -import org.bouncycastle.cms.CMSEnvelopedData; -import org.bouncycastle.cms.CMSEnvelopedDataGenerator; -import org.bouncycastle.cms.CMSException; -import org.bouncycastle.cms.CMSTypedData; -import org.bouncycastle.cms.PasswordRecipientId; -import org.bouncycastle.cms.PasswordRecipientInfoGenerator; -import org.bouncycastle.cms.RecipientInformation; -import org.bouncycastle.cms.RecipientInformationStore; -import org.bouncycastle.cms.jcajce.JceCMSContentEncryptorBuilder; -import org.bouncycastle.cms.jcajce.JcePasswordEnvelopedRecipient; -import org.bouncycastle.cms.jcajce.JcePasswordRecipientInfoGenerator; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.blobstore.BlobContainer; @@ -29,16 +15,25 @@ import org.elasticsearch.common.blobstore.DeleteResult; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import javax.crypto.BadPaddingException; +import javax.crypto.Cipher; +import javax.crypto.IllegalBlockSizeException; import javax.crypto.KeyGenerator; +import javax.crypto.NoSuchPaddingException; import javax.crypto.SecretKey; +import javax.crypto.spec.GCMParameterSpec; import javax.crypto.spec.SecretKeySpec; import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; +import java.security.InvalidAlgorithmParameterException; +import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.List; @@ -49,7 +44,10 @@ public class EncryptedRepository extends BlobStoreRepository { static final int GCM_TAG_LENGTH_IN_BYTES = 16; static final int GCM_IV_LENGTH_IN_BYTES = 12; static final int AES_BLOCK_SIZE_IN_BYTES = 128; - static final String GCM_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; + static final String DEK_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; + static final String KEK_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; + static final int DEK_KEY_SIZE_IN_BITS = 256; + static final String RAND_ALGO = "SHA1PRNG"; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 20; // 1MB static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB @@ -59,23 +57,24 @@ public class EncryptedRepository extends BlobStoreRepository { private static final String ENCRYPTION_METADATA_PREFIX = "encryption-metadata-"; private final BlobStoreRepository delegatedRepository; - private final char[] masterPassword; - private final KeyGenerator keyGenerator; + private final KeyGenerator dataEncryptionKeyGenerator; + private final SecretKey keyEncryptionKey; private final SecureRandom secureRandom; protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService - , BlobStoreRepository delegatedRepository, char[] materPassword) throws NoSuchAlgorithmException { + , BlobStoreRepository delegatedRepository, SecretKey keyEncryptionKey) throws NoSuchAlgorithmException { super(metadata, namedXContentRegistry, clusterService, delegatedRepository.basePath()); this.delegatedRepository = delegatedRepository; - this.masterPassword = materPassword; - this.keyGenerator = KeyGenerator.getInstance("AES"); - this.keyGenerator.init(256, SecureRandom.getInstance("SHA1PRNG")); - this.secureRandom = SecureRandom.getInstance("SHA1PRNG"); + this.dataEncryptionKeyGenerator = KeyGenerator.getInstance("AES"); + this.dataEncryptionKeyGenerator.init(DEK_KEY_SIZE_IN_BITS, SecureRandom.getInstance(RAND_ALGO)); + this.keyEncryptionKey = keyEncryptionKey; + this.secureRandom = SecureRandom.getInstance(RAND_ALGO); } @Override - protected BlobStore createBlobStore() throws Exception { - return new EncryptedBlobStoreDecorator(this.delegatedRepository.blobStore(), keyGenerator, secureRandom, masterPassword); + protected BlobStore createBlobStore() { + return new EncryptedBlobStoreDecorator(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, keyEncryptionKey, + secureRandom); } @Override @@ -99,21 +98,21 @@ protected void doClose() { private static class EncryptedBlobStoreDecorator implements BlobStore { private final BlobStore delegatedBlobStore; - private final KeyGenerator keyGenerator; + private final KeyGenerator dataEncryptionKeyGenerator; + private final SecretKey keyEncryptionKey; private final SecureRandom secureRandom; - private final char[] masterPassword; - EncryptedBlobStoreDecorator(BlobStore delegatedBlobStore, KeyGenerator keyGenerator, SecureRandom secureRandom, - char[] masterPassword) { + EncryptedBlobStoreDecorator(BlobStore delegatedBlobStore, KeyGenerator dataEncryptionKeyGenerator, + SecretKey keyEncryptionKey, SecureRandom secureRandom) { this.delegatedBlobStore = delegatedBlobStore; - this.keyGenerator = keyGenerator; + this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; + this.keyEncryptionKey = keyEncryptionKey; this.secureRandom = secureRandom; - this.masterPassword = masterPassword; } @Override public void close() throws IOException { - this.delegatedBlobStore.close(); + delegatedBlobStore.close(); } @Override @@ -123,9 +122,9 @@ public BlobContainer blobContainer(BlobPath path) { for (String pathComponent : path) { encryptionMetadataBlobPath = encryptionMetadataBlobPath.add(pathComponent); } - return new EncryptedBlobContainerDecorator(this.delegatedBlobStore.blobContainer(path), - this.delegatedBlobStore.blobContainer(encryptionMetadataBlobPath), this.keyGenerator, this.secureRandom, - this.masterPassword); + return new EncryptedBlobContainerDecorator(delegatedBlobStore.blobContainer(path), + delegatedBlobStore.blobContainer(encryptionMetadataBlobPath), dataEncryptionKeyGenerator, keyEncryptionKey, + secureRandom); } } @@ -133,17 +132,17 @@ private static class EncryptedBlobContainerDecorator implements BlobContainer { private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; - private final KeyGenerator keyGenerator; + private final KeyGenerator dataEncryptionKeyGenerator; + private final SecretKey keyEncryptionKey; private final SecureRandom secureRandom; - private final char[] masterPassword; EncryptedBlobContainerDecorator(BlobContainer delegatedBlobContainer, BlobContainer encryptionMetadataBlobContainer, - KeyGenerator keyGenerator, SecureRandom secureRandom, char[] masterPassword) { + KeyGenerator dataEncryptionKeyGenerator, SecretKey keyEncryptionKey, SecureRandom secureRandom) { this.delegatedBlobContainer = delegatedBlobContainer; this.encryptionMetadataBlobContainer = encryptionMetadataBlobContainer; - this.masterPassword = masterPassword; + this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; + this.keyEncryptionKey = keyEncryptionKey; this.secureRandom = secureRandom; - this.keyGenerator = keyGenerator; } @Override @@ -154,12 +153,7 @@ public BlobPath path() { @Override public InputStream readBlob(String blobName) throws IOException { BytesReference encryptedMetadataBytes = Streams.readFully(this.encryptionMetadataBlobContainer.readBlob(blobName)); - final BlobEncryptionMetadata metadata; - try { - metadata = decryptMetadata(BytesReference.toBytes(encryptedMetadataBytes)); - } catch (CMSException e) { - throw new IOException(e); - } + final BlobEncryptionMetadata metadata = decryptMetadata(BytesReference.toBytes(encryptedMetadataBytes), keyEncryptionKey); SecretKey dataDecryptionKey = new SecretKeySpec(metadata.getDataEncryptionKeyMaterial(), 0, metadata.getDataEncryptionKeyMaterial().length, "AES"); return new DecryptionPacketsInputStream(this.delegatedBlobContainer.readBlob(blobName), dataDecryptionKey, @@ -168,58 +162,82 @@ public InputStream readBlob(String blobName) throws IOException { @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { - SecretKey dataEncryptionKey = keyGenerator.generateKey(); + SecretKey dataEncryptionKey = dataEncryptionKeyGenerator.generateKey(); int nonce = secureRandom.nextInt(); + // first write the encrypted blob long encryptedBlobSize = EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { this.delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); } + // metadata required to decrypt back the encrypted blob BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(dataEncryptionKey.getEncoded(), nonce, PACKET_LENGTH_IN_BYTES); - final byte[] encryptedMetadata; - try { - encryptedMetadata = encryptMetadata(metadata); - } catch (CMSException e) { - throw new IOException(e); - } - try (InputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { - this.encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, false); + byte[] encryptedMetadata = encryptMetadata(metadata, keyEncryptionKey, secureRandom); + // write the encrypted metadata + try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { + this.encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, + false /* overwrite any blob with the same name because it cannot correspond to any other encrypted blob */); } } - private byte[] encryptMetadata(BlobEncryptionMetadata metadata) throws IOException, CMSException { - CMSEnvelopedDataGenerator envelopedDataGenerator = new CMSEnvelopedDataGenerator(); - PasswordRecipientInfoGenerator passwordRecipientInfoGenerator = new JcePasswordRecipientInfoGenerator(CMSAlgorithm.AES256_GCM - , masterPassword); - envelopedDataGenerator.addRecipientInfoGenerator(passwordRecipientInfoGenerator); - final CMSEnvelopedData envelopedData = envelopedDataGenerator.generate(new CMSTypedData() { - @Override - public ASN1ObjectIdentifier getContentType() { - return CMSObjectIdentifiers.data; + private byte[] encryptMetadata(BlobEncryptionMetadata metadata, SecretKey keyEncryptionKey, SecureRandom secureRandom) throws IOException { + // serialize metadata to byte[] + final byte[] plaintextMetadata; + try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + try (StreamOutput out = new OutputStreamStreamOutput(baos)) { + metadata.writeTo(out); } - - @Override - public void write(OutputStream out) throws IOException, CMSException { - metadata.write(out); - } - - @Override - public Object getContent() { - return metadata; - } - }, new JceCMSContentEncryptorBuilder(CMSAlgorithm.AES256_GCM).build()); - return envelopedData.getEncoded(); + plaintextMetadata = baos.toByteArray(); + } + // create cipher for metadata encryption + byte[] iv = new byte[GCM_IV_LENGTH_IN_BYTES]; + secureRandom.nextBytes(iv); + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, iv); + final Cipher cipher; + try { + cipher = Cipher.getInstance(KEK_ENCRYPTION_SCHEME); + cipher.init(Cipher.ENCRYPT_MODE, keyEncryptionKey, gcmParameterSpec); + } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidAlgorithmParameterException | InvalidKeyException e) { + throw new IOException("Exception while initializing KEK encryption cipher", e); + } + // encrypt metadata + final byte[] encryptedMetadata; + try { + encryptedMetadata = cipher.doFinal(plaintextMetadata); + } catch (IllegalBlockSizeException | BadPaddingException e) { + throw new IOException("Exception while encrypting metadata and DEK", e); + } + // concatenate iv and metadata cipher text + byte[] resultCiphertext = new byte[iv.length + encryptedMetadata.length]; + // prepend IV + System.arraycopy(iv, 0, resultCiphertext, 0, iv.length); + System.arraycopy(encryptedMetadata, 0, resultCiphertext, iv.length, encryptedMetadata.length); + return resultCiphertext; } - private BlobEncryptionMetadata decryptMetadata(byte[] metadata) throws CMSException, IOException { - final CMSEnvelopedData envelopedData = new CMSEnvelopedData(metadata); - RecipientInformationStore recipients = envelopedData.getRecipientInfos(); - RecipientInformation recipient = recipients.get(new PasswordRecipientId()); - if (recipient == null) { - throw new IllegalArgumentException(); + private BlobEncryptionMetadata decryptMetadata(byte[] encryptedMetadata, SecretKey keyEncryptionKey) throws IOException { + // first bytes are IV + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, encryptedMetadata, 0, + GCM_IV_LENGTH_IN_BYTES); + // initialize cipher + final Cipher cipher; + try { + cipher = Cipher.getInstance(KEK_ENCRYPTION_SCHEME); + cipher.init(Cipher.DECRYPT_MODE, keyEncryptionKey, gcmParameterSpec); + } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidAlgorithmParameterException | InvalidKeyException e) { + throw new IOException("Exception while initializing KEK decryption cipher", e); + } + // decrypt metadata (use cipher) + final byte[] decryptedMetadata; + try { + decryptedMetadata = cipher.doFinal(encryptedMetadata, GCM_IV_LENGTH_IN_BYTES, + encryptedMetadata.length - GCM_IV_LENGTH_IN_BYTES); + } catch (IllegalBlockSizeException | BadPaddingException e) { + throw new IOException("Exception while decrypting metadata and DEK", e); + } + try (ByteArrayInputStream decryptedMetadataInputStream = new ByteArrayInputStream(decryptedMetadata)) { + return new BlobEncryptionMetadata(decryptedMetadataInputStream); } - final byte[] decryptedMetadata = recipient.getContent(new JcePasswordEnvelopedRecipient(masterPassword)); - return new BlobEncryptionMetadata(new ByteArrayInputStream(decryptedMetadata)); } @Override diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 5b49fa2a5ac9f..135adb1ebbe91 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -20,6 +20,13 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import javax.crypto.SecretKey; +import javax.crypto.SecretKeyFactory; +import javax.crypto.spec.PBEKeySpec; +import javax.crypto.spec.SecretKeySpec; +import java.nio.charset.StandardCharsets; +import java.security.NoSuchAlgorithmException; +import java.security.spec.InvalidKeySpecException; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -28,15 +35,23 @@ public final class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin { + static final String REPOSITORY_TYPE_NAME = "encrypted"; + static final String CIPHER_ALGO = "AES"; + static final int KEK_PBKDF2_ITER = 61616; // funny "uncommon" iter count larger than 60k + static final String KEK_PBKDF2_ALGO = "PBKDF2WithHmacSHA512"; + static final int KEK_KEY_SIZE_IN_BITS = 256; + static final String DEFAULT_KEK_SALT = "the AES key encryption key, which is generated from the repository password using " + + "PBKDF2, is never stored on disk, therefore the salt parameter of PBKDF2, used to protect against offline cracking of the key" + + " using rainbow tables is not required. A hardcoded salt value does not compromise security."; static final Setting.AffixSetting ENCRYPTION_PASSWORD_SETTING = Setting.affixKeySetting("repository.encrypted.", "password", key -> SecureSetting.secureString(key, null)); static final Setting DELEGATE_TYPE = new Setting<>("delegate_type", "", Function.identity()); - static final String TYPE = "encrypted"; + static final Setting KEK_PBKDF2_SALT = new Setting<>("kek_salt", DEFAULT_KEK_SALT, Function.identity()); private final Map cachedRepositoryPasswords = new HashMap<>(); public EncryptedRepositoryPlugin(Settings settings) { - // cache the passwords for all encrypted repositories during *plugin* instantiation + // cache the passwords for all encrypted repositories during plugin instantiation // the keystore-based secure passwords are not readable on repository instantiation for (String repositoryName : ENCRYPTION_PASSWORD_SETTING.getNamespaces(settings)) { Setting encryptionPasswordSetting = ENCRYPTION_PASSWORD_SETTING @@ -46,6 +61,14 @@ public EncryptedRepositoryPlugin(Settings settings) { } } + SecretKey generateKeyEncryptionKey(char[] password, byte[] salt) throws NoSuchAlgorithmException, InvalidKeySpecException { + PBEKeySpec keySpec = new PBEKeySpec(password, salt, KEK_PBKDF2_ITER, KEK_KEY_SIZE_IN_BITS); + SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(KEK_PBKDF2_ALGO); + SecretKey secretKey = keyFactory.generateSecret(keySpec); + SecretKeySpec secret = new SecretKeySpec(secretKey.getEncoded(), CIPHER_ALGO); + return secret; + } + @Override public List> getSettings() { return List.of(ENCRYPTION_PASSWORD_SETTING); @@ -54,7 +77,7 @@ public List> getSettings() { @Override public Map getRepositories(final Environment env, final NamedXContentRegistry registry, final ClusterService clusterService) { - return Collections.singletonMap(TYPE, new Repository.Factory() { + return Collections.singletonMap(REPOSITORY_TYPE_NAME, new Repository.Factory() { @Override public Repository create(RepositoryMetaData metadata) { @@ -67,9 +90,9 @@ public Repository create(RepositoryMetaData metaData, Function Date: Sun, 5 Jan 2020 17:15:30 +0200 Subject: [PATCH 065/142] Javadoc --- .../repositories/encrypted/BlobEncryptionMetadata.java | 2 -- .../repositories/encrypted/EncryptedRepository.java | 3 ++- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java index c357f26b32d91..fbc301306643f 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java @@ -8,14 +8,12 @@ import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.InputStreamStreamInput; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; public final class BlobEncryptionMetadata implements Writeable { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index d80a0afa893a4..22205c0676fb2 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -180,7 +180,8 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } } - private byte[] encryptMetadata(BlobEncryptionMetadata metadata, SecretKey keyEncryptionKey, SecureRandom secureRandom) throws IOException { + private byte[] encryptMetadata(BlobEncryptionMetadata metadata, SecretKey keyEncryptionKey, + SecureRandom secureRandom) throws IOException { // serialize metadata to byte[] final byte[] plaintextMetadata; try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { From 7d1b3cca04acfd40a6f877568aafac22a86ed26a Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 5 Jan 2020 18:13:53 +0200 Subject: [PATCH 066/142] Snapshot works --- .../encrypted/BlobEncryptionMetadata.java | 23 +++- .../encrypted/EncryptedRepository.java | 124 +++++++++--------- .../encrypted/EncryptedRepositoryTests.java | 4 + 3 files changed, 88 insertions(+), 63 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java index fbc301306643f..db4ff89f471ac 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java @@ -14,6 +14,8 @@ import java.io.IOException; import java.io.InputStream; +import java.util.Arrays; +import java.util.Objects; public final class BlobEncryptionMetadata implements Writeable { @@ -22,7 +24,7 @@ public final class BlobEncryptionMetadata implements Writeable { private final int packetLengthInBytes; public BlobEncryptionMetadata(byte[] dataEncryptionKeyMaterial, int nonce, int packetLengthInBytes) { - this.dataEncryptionKeyMaterial = dataEncryptionKeyMaterial; + this.dataEncryptionKeyMaterial = Objects.requireNonNull(dataEncryptionKeyMaterial); this.nonce = nonce; this.packetLengthInBytes = packetLengthInBytes; } @@ -51,9 +53,26 @@ public BlobEncryptionMetadata(InputStream inputStream) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - out.setVersion(Version.CURRENT); + Version.writeVersion(Version.CURRENT, out); out.writeByteArray(this.dataEncryptionKeyMaterial); out.writeInt(this.nonce); out.writeInt(this.packetLengthInBytes); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BlobEncryptionMetadata metadata = (BlobEncryptionMetadata) o; + return nonce == metadata.nonce && + packetLengthInBytes == metadata.packetLengthInBytes && + Arrays.equals(dataEncryptionKeyMaterial, metadata.dataEncryptionKeyMaterial); + } + + @Override + public int hashCode() { + int result = Objects.hash(nonce, packetLengthInBytes); + result = 31 * result + Arrays.hashCode(dataEncryptionKeyMaterial); + return result; + } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 22205c0676fb2..22bbdf2d7c592 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -180,67 +180,6 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } } - private byte[] encryptMetadata(BlobEncryptionMetadata metadata, SecretKey keyEncryptionKey, - SecureRandom secureRandom) throws IOException { - // serialize metadata to byte[] - final byte[] plaintextMetadata; - try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { - try (StreamOutput out = new OutputStreamStreamOutput(baos)) { - metadata.writeTo(out); - } - plaintextMetadata = baos.toByteArray(); - } - // create cipher for metadata encryption - byte[] iv = new byte[GCM_IV_LENGTH_IN_BYTES]; - secureRandom.nextBytes(iv); - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, iv); - final Cipher cipher; - try { - cipher = Cipher.getInstance(KEK_ENCRYPTION_SCHEME); - cipher.init(Cipher.ENCRYPT_MODE, keyEncryptionKey, gcmParameterSpec); - } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidAlgorithmParameterException | InvalidKeyException e) { - throw new IOException("Exception while initializing KEK encryption cipher", e); - } - // encrypt metadata - final byte[] encryptedMetadata; - try { - encryptedMetadata = cipher.doFinal(plaintextMetadata); - } catch (IllegalBlockSizeException | BadPaddingException e) { - throw new IOException("Exception while encrypting metadata and DEK", e); - } - // concatenate iv and metadata cipher text - byte[] resultCiphertext = new byte[iv.length + encryptedMetadata.length]; - // prepend IV - System.arraycopy(iv, 0, resultCiphertext, 0, iv.length); - System.arraycopy(encryptedMetadata, 0, resultCiphertext, iv.length, encryptedMetadata.length); - return resultCiphertext; - } - - private BlobEncryptionMetadata decryptMetadata(byte[] encryptedMetadata, SecretKey keyEncryptionKey) throws IOException { - // first bytes are IV - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, encryptedMetadata, 0, - GCM_IV_LENGTH_IN_BYTES); - // initialize cipher - final Cipher cipher; - try { - cipher = Cipher.getInstance(KEK_ENCRYPTION_SCHEME); - cipher.init(Cipher.DECRYPT_MODE, keyEncryptionKey, gcmParameterSpec); - } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidAlgorithmParameterException | InvalidKeyException e) { - throw new IOException("Exception while initializing KEK decryption cipher", e); - } - // decrypt metadata (use cipher) - final byte[] decryptedMetadata; - try { - decryptedMetadata = cipher.doFinal(encryptedMetadata, GCM_IV_LENGTH_IN_BYTES, - encryptedMetadata.length - GCM_IV_LENGTH_IN_BYTES); - } catch (IllegalBlockSizeException | BadPaddingException e) { - throw new IOException("Exception while decrypting metadata and DEK", e); - } - try (ByteArrayInputStream decryptedMetadataInputStream = new ByteArrayInputStream(decryptedMetadata)) { - return new BlobEncryptionMetadata(decryptedMetadataInputStream); - } - } - @Override public void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { @@ -276,4 +215,67 @@ public Map listBlobsByPrefix(String blobNamePrefix) throws } } + // protected for tests + protected static byte[] encryptMetadata(BlobEncryptionMetadata metadata, SecretKey keyEncryptionKey, + SecureRandom secureRandom) throws IOException { + // serialize metadata to byte[] + final byte[] plaintextMetadata; + try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + try (StreamOutput out = new OutputStreamStreamOutput(baos)) { + metadata.writeTo(out); + } + plaintextMetadata = baos.toByteArray(); + } + // create cipher for metadata encryption + byte[] iv = new byte[GCM_IV_LENGTH_IN_BYTES]; + secureRandom.nextBytes(iv); + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, iv); + final Cipher cipher; + try { + cipher = Cipher.getInstance(KEK_ENCRYPTION_SCHEME); + cipher.init(Cipher.ENCRYPT_MODE, keyEncryptionKey, gcmParameterSpec); + } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidAlgorithmParameterException | InvalidKeyException e) { + throw new IOException("Exception while initializing KEK encryption cipher", e); + } + // encrypt metadata + final byte[] encryptedMetadata; + try { + encryptedMetadata = cipher.doFinal(plaintextMetadata); + } catch (IllegalBlockSizeException | BadPaddingException e) { + throw new IOException("Exception while encrypting metadata and DEK", e); + } + // concatenate iv and metadata cipher text + byte[] resultCiphertext = new byte[iv.length + encryptedMetadata.length]; + // prepend IV + System.arraycopy(iv, 0, resultCiphertext, 0, iv.length); + System.arraycopy(encryptedMetadata, 0, resultCiphertext, iv.length, encryptedMetadata.length); + return resultCiphertext; + } + + // protected for tests + protected static BlobEncryptionMetadata decryptMetadata(byte[] encryptedMetadata, SecretKey keyEncryptionKey) throws IOException { + // first bytes are IV + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, encryptedMetadata, 0, + GCM_IV_LENGTH_IN_BYTES); + // initialize cipher + final Cipher cipher; + try { + cipher = Cipher.getInstance(KEK_ENCRYPTION_SCHEME); + cipher.init(Cipher.DECRYPT_MODE, keyEncryptionKey, gcmParameterSpec); + } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidAlgorithmParameterException | InvalidKeyException e) { + throw new IOException("Exception while initializing KEK decryption cipher", e); + } + // decrypt metadata (use cipher) + final byte[] decryptedMetadata; + try { + decryptedMetadata = cipher.doFinal(encryptedMetadata, GCM_IV_LENGTH_IN_BYTES, + encryptedMetadata.length - GCM_IV_LENGTH_IN_BYTES); + } catch (IllegalBlockSizeException | BadPaddingException e) { + throw new IOException("Exception while decrypting metadata and DEK", e); + } + try (ByteArrayInputStream decryptedMetadataInputStream = new ByteArrayInputStream(decryptedMetadata)) { + return new BlobEncryptionMetadata(decryptedMetadataInputStream); + } + } + } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java new file mode 100644 index 0000000000000..38d44a6ca8f18 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java @@ -0,0 +1,4 @@ +package org.elasticsearch.repositories.encrypted; + +public class EncryptedRepositoryTests { +} From 31a2b3083a2be9265f5a2fac3b26239eab1250ed Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 6 Jan 2020 13:40:27 +0200 Subject: [PATCH 067/142] Remove bouncy castle dependency --- .../plugin/repository-encrypted/build.gradle | 20 ------------------- .../licenses/bc-fips-1.0.1.jar.sha1 | 1 - .../licenses/bc-fips-LICENSE.txt | 12 ----------- .../licenses/bc-fips-NOTICE.txt | 0 .../licenses/bcpkix-fips-1.0.3.jar.sha1 | 1 - .../licenses/bcpkix-fips-LICENSE.txt | 12 ----------- .../licenses/bcpkix-fips-NOTICE.txt | 0 .../encrypted/EncryptedRepositoryTests.java | 4 ---- 8 files changed, 50 deletions(-) delete mode 100644 x-pack/plugin/repository-encrypted/licenses/bc-fips-1.0.1.jar.sha1 delete mode 100644 x-pack/plugin/repository-encrypted/licenses/bc-fips-LICENSE.txt delete mode 100644 x-pack/plugin/repository-encrypted/licenses/bc-fips-NOTICE.txt delete mode 100644 x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-1.0.3.jar.sha1 delete mode 100644 x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-LICENSE.txt delete mode 100644 x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-NOTICE.txt delete mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java diff --git a/x-pack/plugin/repository-encrypted/build.gradle b/x-pack/plugin/repository-encrypted/build.gradle index 08e04070d926c..5a2f82946f711 100644 --- a/x-pack/plugin/repository-encrypted/build.gradle +++ b/x-pack/plugin/repository-encrypted/build.gradle @@ -8,24 +8,4 @@ esplugin { extendedPlugins = ['x-pack-core'] } -thirdPartyAudit { - ignoreViolations ( - 'org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider$CoreSecureRandom', - 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF', - 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$BaseTLSKeyGeneratorSpi', - 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator', - 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator$2', - 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator', - 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator$2', - 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSPRFKeyGenerator', - 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator', - 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator$2', - ) -} - -dependencies { - compile "org.bouncycastle:bc-fips:1.0.1" - compile "org.bouncycastle:bcpkix-fips:1.0.3" -} - integTest.enabled = false diff --git a/x-pack/plugin/repository-encrypted/licenses/bc-fips-1.0.1.jar.sha1 b/x-pack/plugin/repository-encrypted/licenses/bc-fips-1.0.1.jar.sha1 deleted file mode 100644 index c0612895533ca..0000000000000 --- a/x-pack/plugin/repository-encrypted/licenses/bc-fips-1.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ed8dd3144761eaa33b9c56f5e2bef85f1b731d6f diff --git a/x-pack/plugin/repository-encrypted/licenses/bc-fips-LICENSE.txt b/x-pack/plugin/repository-encrypted/licenses/bc-fips-LICENSE.txt deleted file mode 100644 index 66ba32c7e58ca..0000000000000 --- a/x-pack/plugin/repository-encrypted/licenses/bc-fips-LICENSE.txt +++ /dev/null @@ -1,12 +0,0 @@ -Copyright (c) 2000 - 2019 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the -following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/x-pack/plugin/repository-encrypted/licenses/bc-fips-NOTICE.txt b/x-pack/plugin/repository-encrypted/licenses/bc-fips-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-1.0.3.jar.sha1 b/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-1.0.3.jar.sha1 deleted file mode 100644 index a5b07bac95422..0000000000000 --- a/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-1.0.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -33c47b105777c9dcc8a08188186bd35401366bd1 diff --git a/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-LICENSE.txt b/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-LICENSE.txt deleted file mode 100644 index 66ba32c7e58ca..0000000000000 --- a/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-LICENSE.txt +++ /dev/null @@ -1,12 +0,0 @@ -Copyright (c) 2000 - 2019 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the -following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-NOTICE.txt b/x-pack/plugin/repository-encrypted/licenses/bcpkix-fips-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java deleted file mode 100644 index 38d44a6ca8f18..0000000000000 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java +++ /dev/null @@ -1,4 +0,0 @@ -package org.elasticsearch.repositories.encrypted; - -public class EncryptedRepositoryTests { -} From 591e48ce138a8f74553e926440b5f21c048d80ce Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 7 Jan 2020 14:31:00 +0200 Subject: [PATCH 068/142] Password based metadata encryptor --- .../encrypted/BlobEncryptionMetadata.java | 18 ++ .../encrypted/EncryptedRepository.java | 110 +++-------- .../encrypted/EncryptedRepositoryPlugin.java | 23 +-- .../encrypted/PasswordBasedEncryptor.java | 181 ++++++++++++++++++ 4 files changed, 234 insertions(+), 98 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java index db4ff89f471ac..33a53ed0a240a 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java @@ -8,10 +8,13 @@ import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; @@ -75,4 +78,19 @@ public int hashCode() { result = 31 * result + Arrays.hashCode(dataEncryptionKeyMaterial); return result; } + + static byte[] serializeMetadataToByteArray(BlobEncryptionMetadata metadata) throws IOException { + try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + try (StreamOutput out = new OutputStreamStreamOutput(baos)) { + metadata.writeTo(out); + } + return baos.toByteArray(); + } + } + + static BlobEncryptionMetadata deserializeMetadataFromByteArray(byte[] metadata) throws IOException { + try (ByteArrayInputStream decryptedMetadataInputStream = new ByteArrayInputStream(metadata)) { + return new BlobEncryptionMetadata(decryptedMetadataInputStream); + } + } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 22bbdf2d7c592..1427475568eea 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -32,12 +32,14 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.security.GeneralSecurityException; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutionException; public class EncryptedRepository extends BlobStoreRepository { @@ -47,7 +49,6 @@ public class EncryptedRepository extends BlobStoreRepository { static final String DEK_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; static final String KEK_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; static final int DEK_KEY_SIZE_IN_BITS = 256; - static final String RAND_ALGO = "SHA1PRNG"; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 20; // 1MB static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB @@ -58,22 +59,22 @@ public class EncryptedRepository extends BlobStoreRepository { private final BlobStoreRepository delegatedRepository; private final KeyGenerator dataEncryptionKeyGenerator; - private final SecretKey keyEncryptionKey; + private final PasswordBasedEncryptor metadataEncryptor; private final SecureRandom secureRandom; protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService - , BlobStoreRepository delegatedRepository, SecretKey keyEncryptionKey) throws NoSuchAlgorithmException { + , BlobStoreRepository delegatedRepository, PasswordBasedEncryptor metadataEncryptor) throws NoSuchAlgorithmException { super(metadata, namedXContentRegistry, clusterService, delegatedRepository.basePath()); this.delegatedRepository = delegatedRepository; - this.dataEncryptionKeyGenerator = KeyGenerator.getInstance("AES"); - this.dataEncryptionKeyGenerator.init(DEK_KEY_SIZE_IN_BITS, SecureRandom.getInstance(RAND_ALGO)); - this.keyEncryptionKey = keyEncryptionKey; - this.secureRandom = SecureRandom.getInstance(RAND_ALGO); + this.dataEncryptionKeyGenerator = KeyGenerator.getInstance(EncryptedRepositoryPlugin.CIPHER_ALGO); + this.dataEncryptionKeyGenerator.init(DEK_KEY_SIZE_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); + this.metadataEncryptor = metadataEncryptor; + this.secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); } @Override protected BlobStore createBlobStore() { - return new EncryptedBlobStoreDecorator(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, keyEncryptionKey, + return new EncryptedBlobStoreDecorator(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryptor, secureRandom); } @@ -99,14 +100,14 @@ private static class EncryptedBlobStoreDecorator implements BlobStore { private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; - private final SecretKey keyEncryptionKey; + private final PasswordBasedEncryptor metadataEncryptor; private final SecureRandom secureRandom; EncryptedBlobStoreDecorator(BlobStore delegatedBlobStore, KeyGenerator dataEncryptionKeyGenerator, - SecretKey keyEncryptionKey, SecureRandom secureRandom) { + PasswordBasedEncryptor metadataEncryptor, SecureRandom secureRandom) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; - this.keyEncryptionKey = keyEncryptionKey; + this.metadataEncryptor = metadataEncryptor; this.secureRandom = secureRandom; } @@ -123,7 +124,7 @@ public BlobContainer blobContainer(BlobPath path) { encryptionMetadataBlobPath = encryptionMetadataBlobPath.add(pathComponent); } return new EncryptedBlobContainerDecorator(delegatedBlobStore.blobContainer(path), - delegatedBlobStore.blobContainer(encryptionMetadataBlobPath), dataEncryptionKeyGenerator, keyEncryptionKey, + delegatedBlobStore.blobContainer(encryptionMetadataBlobPath), dataEncryptionKeyGenerator, metadataEncryptor, secureRandom); } } @@ -133,15 +134,16 @@ private static class EncryptedBlobContainerDecorator implements BlobContainer { private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; private final KeyGenerator dataEncryptionKeyGenerator; - private final SecretKey keyEncryptionKey; + private final PasswordBasedEncryptor metadataEncryptor; private final SecureRandom secureRandom; EncryptedBlobContainerDecorator(BlobContainer delegatedBlobContainer, BlobContainer encryptionMetadataBlobContainer, - KeyGenerator dataEncryptionKeyGenerator, SecretKey keyEncryptionKey, SecureRandom secureRandom) { + KeyGenerator dataEncryptionKeyGenerator, PasswordBasedEncryptor metadataEncryptor, + SecureRandom secureRandom) { this.delegatedBlobContainer = delegatedBlobContainer; this.encryptionMetadataBlobContainer = encryptionMetadataBlobContainer; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; - this.keyEncryptionKey = keyEncryptionKey; + this.metadataEncryptor = metadataEncryptor; this.secureRandom = secureRandom; } @@ -153,7 +155,13 @@ public BlobPath path() { @Override public InputStream readBlob(String blobName) throws IOException { BytesReference encryptedMetadataBytes = Streams.readFully(this.encryptionMetadataBlobContainer.readBlob(blobName)); - final BlobEncryptionMetadata metadata = decryptMetadata(BytesReference.toBytes(encryptedMetadataBytes), keyEncryptionKey); + final byte[] decryptedMetadata; + try { + decryptedMetadata = metadataEncryptor.decrypt(BytesReference.toBytes(encryptedMetadataBytes)); + } catch (ExecutionException | GeneralSecurityException e) { + throw new IOException("Exception while decrypting metadata"); + } + final BlobEncryptionMetadata metadata = BlobEncryptionMetadata.deserializeMetadataFromByteArray(decryptedMetadata); SecretKey dataDecryptionKey = new SecretKeySpec(metadata.getDataEncryptionKeyMaterial(), 0, metadata.getDataEncryptionKeyMaterial().length, "AES"); return new DecryptionPacketsInputStream(this.delegatedBlobContainer.readBlob(blobName), dataDecryptionKey, @@ -172,7 +180,12 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } // metadata required to decrypt back the encrypted blob BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(dataEncryptionKey.getEncoded(), nonce, PACKET_LENGTH_IN_BYTES); - byte[] encryptedMetadata = encryptMetadata(metadata, keyEncryptionKey, secureRandom); + final byte[] encryptedMetadata; + try { + encryptedMetadata = metadataEncryptor.encrypt(BlobEncryptionMetadata.serializeMetadataToByteArray(metadata)); + } catch (ExecutionException | GeneralSecurityException e) { + throw new IOException("Exception while encrypting metadata"); + } // write the encrypted metadata try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { this.encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, @@ -215,67 +228,4 @@ public Map listBlobsByPrefix(String blobNamePrefix) throws } } - // protected for tests - protected static byte[] encryptMetadata(BlobEncryptionMetadata metadata, SecretKey keyEncryptionKey, - SecureRandom secureRandom) throws IOException { - // serialize metadata to byte[] - final byte[] plaintextMetadata; - try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { - try (StreamOutput out = new OutputStreamStreamOutput(baos)) { - metadata.writeTo(out); - } - plaintextMetadata = baos.toByteArray(); - } - // create cipher for metadata encryption - byte[] iv = new byte[GCM_IV_LENGTH_IN_BYTES]; - secureRandom.nextBytes(iv); - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, iv); - final Cipher cipher; - try { - cipher = Cipher.getInstance(KEK_ENCRYPTION_SCHEME); - cipher.init(Cipher.ENCRYPT_MODE, keyEncryptionKey, gcmParameterSpec); - } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidAlgorithmParameterException | InvalidKeyException e) { - throw new IOException("Exception while initializing KEK encryption cipher", e); - } - // encrypt metadata - final byte[] encryptedMetadata; - try { - encryptedMetadata = cipher.doFinal(plaintextMetadata); - } catch (IllegalBlockSizeException | BadPaddingException e) { - throw new IOException("Exception while encrypting metadata and DEK", e); - } - // concatenate iv and metadata cipher text - byte[] resultCiphertext = new byte[iv.length + encryptedMetadata.length]; - // prepend IV - System.arraycopy(iv, 0, resultCiphertext, 0, iv.length); - System.arraycopy(encryptedMetadata, 0, resultCiphertext, iv.length, encryptedMetadata.length); - return resultCiphertext; - } - - // protected for tests - protected static BlobEncryptionMetadata decryptMetadata(byte[] encryptedMetadata, SecretKey keyEncryptionKey) throws IOException { - // first bytes are IV - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, encryptedMetadata, 0, - GCM_IV_LENGTH_IN_BYTES); - // initialize cipher - final Cipher cipher; - try { - cipher = Cipher.getInstance(KEK_ENCRYPTION_SCHEME); - cipher.init(Cipher.DECRYPT_MODE, keyEncryptionKey, gcmParameterSpec); - } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidAlgorithmParameterException | InvalidKeyException e) { - throw new IOException("Exception while initializing KEK decryption cipher", e); - } - // decrypt metadata (use cipher) - final byte[] decryptedMetadata; - try { - decryptedMetadata = cipher.doFinal(encryptedMetadata, GCM_IV_LENGTH_IN_BYTES, - encryptedMetadata.length - GCM_IV_LENGTH_IN_BYTES); - } catch (IllegalBlockSizeException | BadPaddingException e) { - throw new IOException("Exception while decrypting metadata and DEK", e); - } - try (ByteArrayInputStream decryptedMetadataInputStream = new ByteArrayInputStream(decryptedMetadata)) { - return new BlobEncryptionMetadata(decryptedMetadataInputStream); - } - } - } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 135adb1ebbe91..f08afce5901c4 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -26,6 +26,7 @@ import javax.crypto.spec.SecretKeySpec; import java.nio.charset.StandardCharsets; import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; import java.security.spec.InvalidKeySpecException; import java.util.Collections; import java.util.HashMap; @@ -37,16 +38,10 @@ public final class EncryptedRepositoryPlugin extends Plugin implements Repositor static final String REPOSITORY_TYPE_NAME = "encrypted"; static final String CIPHER_ALGO = "AES"; - static final int KEK_PBKDF2_ITER = 61616; // funny "uncommon" iter count larger than 60k - static final String KEK_PBKDF2_ALGO = "PBKDF2WithHmacSHA512"; - static final int KEK_KEY_SIZE_IN_BITS = 256; - static final String DEFAULT_KEK_SALT = "the AES key encryption key, which is generated from the repository password using " + - "PBKDF2, is never stored on disk, therefore the salt parameter of PBKDF2, used to protect against offline cracking of the key" + - " using rainbow tables is not required. A hardcoded salt value does not compromise security."; + static final String RAND_ALGO = "SHA1PRNG"; static final Setting.AffixSetting ENCRYPTION_PASSWORD_SETTING = Setting.affixKeySetting("repository.encrypted.", "password", key -> SecureSetting.secureString(key, null)); static final Setting DELEGATE_TYPE = new Setting<>("delegate_type", "", Function.identity()); - static final Setting KEK_PBKDF2_SALT = new Setting<>("kek_salt", DEFAULT_KEK_SALT, Function.identity()); private final Map cachedRepositoryPasswords = new HashMap<>(); @@ -61,14 +56,6 @@ public EncryptedRepositoryPlugin(Settings settings) { } } - SecretKey generateKeyEncryptionKey(char[] password, byte[] salt) throws NoSuchAlgorithmException, InvalidKeySpecException { - PBEKeySpec keySpec = new PBEKeySpec(password, salt, KEK_PBKDF2_ITER, KEK_KEY_SIZE_IN_BITS); - SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(KEK_PBKDF2_ALGO); - SecretKey secretKey = keyFactory.generateSecret(keySpec); - SecretKeySpec secret = new SecretKeySpec(secretKey.getEncoded(), CIPHER_ALGO); - return secret; - } - @Override public List> getSettings() { return List.of(ENCRYPTION_PASSWORD_SETTING); @@ -105,10 +92,10 @@ public Repository create(RepositoryMetaData metaData, Function> keyBySaltCache; + private final AtomicReference> currentEncryptionKeySalt; + + public PasswordBasedEncryptor(char[] password, SecureRandom secureRandom) { + this.password = password; + this.secureRandom = secureRandom; + this.keyBySaltCache = CacheBuilder.>builder() + .setMaximumWeight(ENCRYPTION_KEY_CACHE_SIZE) + .build(); + byte[] randomEncryptionKeySaltBytes = new byte[SALT_LENGTH_IN_BYTES]; + secureRandom.nextBytes(randomEncryptionKeySaltBytes); + this.currentEncryptionKeySalt = new AtomicReference<>(new LimitedSupplier<>( + Base64.getEncoder().encodeToString(randomEncryptionKeySaltBytes), + ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY)); + } + + public byte[] encrypt(byte[] data) throws NoSuchPaddingException, NoSuchAlgorithmException, BadPaddingException, + IllegalBlockSizeException, ExecutionException, InvalidAlgorithmParameterException, InvalidKeyException { + Objects.requireNonNull(data); + // retrieve the encryption key + Tuple saltAndEncryptionKey = useEncryptionKey(); + // create the IV randomly + byte[] iv = new byte[IV_LENGTH_IN_BYTES]; + secureRandom.nextBytes(iv); + // create cipher for metadata encryption + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BYTES * Byte.SIZE, iv); + Cipher cipher = Cipher.getInstance(CIPHER_ALGO + "/" + CIPHER_MODE + "/" + CIPHER_PADDING); + cipher.init(Cipher.ENCRYPT_MODE, saltAndEncryptionKey.v2(), gcmParameterSpec); + // encrypt + byte[] encryptedData = cipher.doFinal(data); + // concatenate key salt, iv and metadata cipher text + byte[] resultCiphertext = new byte[saltAndEncryptionKey.v1().length + iv.length + encryptedData.length]; + // prepend salt + System.arraycopy(saltAndEncryptionKey.v1(), 0, resultCiphertext, 0, saltAndEncryptionKey.v1().length); + System.arraycopy(iv, 0, resultCiphertext, saltAndEncryptionKey.v1().length, iv.length); + System.arraycopy(encryptedData, 0, resultCiphertext, saltAndEncryptionKey.v1().length + iv.length, encryptedData.length); + return resultCiphertext; + } + + public byte[] decrypt(byte[] encryptedData) throws ExecutionException, NoSuchPaddingException, NoSuchAlgorithmException, + InvalidAlgorithmParameterException, InvalidKeyException, BadPaddingException, IllegalBlockSizeException { + if (Objects.requireNonNull(encryptedData).length < SALT_LENGTH_IN_BYTES + IV_LENGTH_IN_BYTES + TAG_LENGTH_IN_BYTES) { + throw new IllegalArgumentException("Ciphertext too short"); + } + // extract the salt prepended to the ciphertext + byte[] salt = Arrays.copyOf(encryptedData, SALT_LENGTH_IN_BYTES); + // get the key associated with the salt + SecretKey decryptionKey = getKeyFromSalt(new String(Base64.getDecoder().decode(salt), + StandardCharsets.UTF_8)).v2(); + // construct and initialize the decryption cipher + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BYTES * Byte.SIZE, encryptedData, SALT_LENGTH_IN_BYTES, + IV_LENGTH_IN_BYTES); + Cipher cipher = Cipher.getInstance(CIPHER_ALGO + "/" + CIPHER_MODE + "/" + CIPHER_PADDING); + cipher.init(Cipher.DECRYPT_MODE, decryptionKey, gcmParameterSpec); + // decrypt metadata (use cipher) + return cipher.doFinal(encryptedData, SALT_LENGTH_IN_BYTES + IV_LENGTH_IN_BYTES, + encryptedData.length - SALT_LENGTH_IN_BYTES - IV_LENGTH_IN_BYTES); + } + + private SecretKey generatePasswordBasedSecretKey(char[] password, byte[] salt) throws NoSuchAlgorithmException, + InvalidKeySpecException { + PBEKeySpec keySpec = new PBEKeySpec(password, salt, KDF_ITER, KEY_SIZE_IN_BITS); + SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(KDF_ALGO); + SecretKey secretKey = keyFactory.generateSecret(keySpec); + SecretKeySpec secret = new SecretKeySpec(secretKey.getEncoded(), CIPHER_ALGO); + return secret; + } + + private Tuple getKeyFromSalt(String salt) throws ExecutionException { + return this.keyBySaltCache.computeIfAbsent(salt, (ignore) -> { + byte[] saltBytes = Base64.getDecoder().decode(salt); + SecretKey secretKey = generatePasswordBasedSecretKey(password, saltBytes); + return new Tuple<>(saltBytes, secretKey); + }); + } + + private void resetCurrentEncryptionKeySalt() { + byte[] randomEncryptionKeySaltBytes = new byte[SALT_LENGTH_IN_BYTES]; + secureRandom.nextBytes(randomEncryptionKeySaltBytes); + this.currentEncryptionKeySalt.set(new LimitedSupplier<>( + Base64.getEncoder().encodeToString(randomEncryptionKeySaltBytes), + ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY)); + } + + private Tuple useEncryptionKey() throws ExecutionException { + Optional encryptionKeySalt = this.currentEncryptionKeySalt.get().get(); + if (encryptionKeySalt.isPresent()) { + return getKeyFromSalt(encryptionKeySalt.get()); + } + // change the salt and generate a new encryption key + resetCurrentEncryptionKeySalt(); + // try again + return useEncryptionKey(); + } + + static class LimitedSupplier implements Supplier> { + + private final AtomicLong count; + + private final Optional value; + + private final long limit; + LimitedSupplier(T value, long limit) { + this.count = new AtomicLong(0L); + this.value = Optional.of(Objects.requireNonNull(value)); + this.limit = limit; + } + + @Override + public Optional get() { + if (count.get() <= limit && count.incrementAndGet() <= limit) { + return value; + } + return Optional.empty(); + } + + } + +} From 1ee490ab3808431c55c78546fe1fa779bbe14b34 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 7 Jan 2020 16:23:33 +0200 Subject: [PATCH 069/142] Proper reordering of the two writes --- .../encrypted/EncryptedRepository.java | 64 ++++++++++--------- 1 file changed, 35 insertions(+), 29 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 1427475568eea..53172aea6e139 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -15,26 +15,16 @@ import org.elasticsearch.common.blobstore.DeleteResult; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import javax.crypto.BadPaddingException; -import javax.crypto.Cipher; -import javax.crypto.IllegalBlockSizeException; import javax.crypto.KeyGenerator; -import javax.crypto.NoSuchPaddingException; import javax.crypto.SecretKey; -import javax.crypto.spec.GCMParameterSpec; import javax.crypto.spec.SecretKeySpec; import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.security.GeneralSecurityException; -import java.security.InvalidAlgorithmParameterException; -import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.List; @@ -47,15 +37,12 @@ public class EncryptedRepository extends BlobStoreRepository { static final int GCM_IV_LENGTH_IN_BYTES = 12; static final int AES_BLOCK_SIZE_IN_BYTES = 128; static final String DEK_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; - static final String KEK_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; static final int DEK_KEY_SIZE_IN_BITS = 256; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 20; // 1MB static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB - // when something about the encryption scheme changes (eg. metadata format) we increment this version number - static final int ENCRYPTION_PROTOCOL_VERSION_NUMBER = 1; - private static final String ENCRYPTION_METADATA_PREFIX = "encryption-metadata-"; + private static final String ENCRYPTION_METADATA_PREFIX = "encryption-metadata"; private final BlobStoreRepository delegatedRepository; private final KeyGenerator dataEncryptionKeyGenerator; @@ -119,7 +106,7 @@ public void close() throws IOException { @Override public BlobContainer blobContainer(BlobPath path) { BlobPath encryptionMetadataBlobPath = BlobPath.cleanPath(); - encryptionMetadataBlobPath = encryptionMetadataBlobPath.add(ENCRYPTION_METADATA_PREFIX + ENCRYPTION_PROTOCOL_VERSION_NUMBER); + encryptionMetadataBlobPath = encryptionMetadataBlobPath.add(ENCRYPTION_METADATA_PREFIX); for (String pathComponent : path) { encryptionMetadataBlobPath = encryptionMetadataBlobPath.add(pathComponent); } @@ -154,16 +141,19 @@ public BlobPath path() { @Override public InputStream readBlob(String blobName) throws IOException { + // read metadata BytesReference encryptedMetadataBytes = Streams.readFully(this.encryptionMetadataBlobContainer.readBlob(blobName)); final byte[] decryptedMetadata; try { decryptedMetadata = metadataEncryptor.decrypt(BytesReference.toBytes(encryptedMetadataBytes)); } catch (ExecutionException | GeneralSecurityException e) { - throw new IOException("Exception while decrypting metadata"); + throw new IOException("Exception while decrypting metadata", e); } final BlobEncryptionMetadata metadata = BlobEncryptionMetadata.deserializeMetadataFromByteArray(decryptedMetadata); + // decrypt metadata SecretKey dataDecryptionKey = new SecretKeySpec(metadata.getDataEncryptionKeyMaterial(), 0, metadata.getDataEncryptionKeyMaterial().length, "AES"); + // read and decrypt blob return new DecryptionPacketsInputStream(this.delegatedBlobContainer.readBlob(blobName), dataDecryptionKey, metadata.getNonce(), metadata.getPacketLengthInBytes()); } @@ -172,58 +162,74 @@ public InputStream readBlob(String blobName) throws IOException { public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { SecretKey dataEncryptionKey = dataEncryptionKeyGenerator.generateKey(); int nonce = secureRandom.nextInt(); - // first write the encrypted blob - long encryptedBlobSize = EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); - try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, - dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { - this.delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); - } - // metadata required to decrypt back the encrypted blob + // this is the metadata required to decrypt back the encrypted blob BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(dataEncryptionKey.getEncoded(), nonce, PACKET_LENGTH_IN_BYTES); + // encrypt metadata final byte[] encryptedMetadata; try { encryptedMetadata = metadataEncryptor.encrypt(BlobEncryptionMetadata.serializeMetadataToByteArray(metadata)); } catch (ExecutionException | GeneralSecurityException e) { - throw new IOException("Exception while encrypting metadata"); + throw new IOException("Exception while encrypting metadata", e); } - // write the encrypted metadata + // first write the encrypted metadata try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { this.encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, - false /* overwrite any blob with the same name because it cannot correspond to any other encrypted blob */); + failIfAlreadyExists); + } + // afterwards write the encrypted data blob + long encryptedBlobSize = EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); + try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, + dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { + this.delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); } } @Override public void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { - // does not support atomic write + // the encrypted repository does not offer an alternative implementation for atomic writes + // fallback to regular write writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); } @Override public DeleteResult delete() throws IOException { + // first delete the encrypted data blob + DeleteResult deleteResult = this.delegatedBlobContainer.delete(); + // then delete metadata this.encryptionMetadataBlobContainer.delete(); - return this.delegatedBlobContainer.delete(); + return deleteResult; } @Override public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOException { - this.encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); + // first delete the encrypted data blob this.delegatedBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); + // then delete metadata + this.encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); } @Override public Map listBlobs() throws IOException { + // the encrypted data blob container is the source-of-truth for list operations + // the metadata blob container mirrors its structure, but in some failure cases it might contain + // additional orphaned metadata blobs return this.delegatedBlobContainer.listBlobs(); } @Override public Map children() throws IOException { + // the encrypted data blob container is the source-of-truth for child container operations + // the metadata blob container mirrors its structure, but in some failure cases it might contain + // additional orphaned metadata blobs return this.delegatedBlobContainer.children(); } @Override public Map listBlobsByPrefix(String blobNamePrefix) throws IOException { + // the encrypted data blob container is the source-of-truth for list operations + // the metadata blob container mirrors its structure, but in some failure cases it might contain + // additional orphaned metadata blobs return this.delegatedBlobContainer.listBlobsByPrefix(blobNamePrefix); } } From 5a1eee525d93b56c8eee221a03a49693085a6afb Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 7 Jan 2020 19:32:36 +0200 Subject: [PATCH 070/142] License check --- .../license/XPackLicenseState.java | 19 +++++++++++++++++++ .../plugin/repository-encrypted/build.gradle | 5 +++++ .../encrypted/EncryptedRepository.java | 19 +++++++++++++++++++ .../encrypted/EncryptedRepositoryPlugin.java | 15 ++++++++------- .../encrypted/PasswordBasedEncryptor.java | 6 ++++++ 5 files changed, 57 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 3c4c6dc0d6777..59e5fcadd6ce4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -527,6 +527,25 @@ public synchronized boolean isWatcherAllowed() { } } + /** + * Determine if creating and using an encrypted repository is available based on the current license. + */ + public synchronized boolean isEncryptedRepositoryAllowed() { + Status localStatus = status; + + if (localStatus.active == false) { + return false; + } + + switch (localStatus.mode) { + case TRIAL: + case PLATINUM: + return true; + default: + return false; + } + } + /** * Monitoring is always available as long as there is a valid license * diff --git a/x-pack/plugin/repository-encrypted/build.gradle b/x-pack/plugin/repository-encrypted/build.gradle index 5a2f82946f711..0a5ccb046854c 100644 --- a/x-pack/plugin/repository-encrypted/build.gradle +++ b/x-pack/plugin/repository-encrypted/build.gradle @@ -8,4 +8,9 @@ esplugin { extendedPlugins = ['x-pack-core'] } +dependencies { + compileOnly project(path: xpackModule('core'), configuration: 'default') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') +} + integTest.enabled = false diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 53172aea6e139..80df19cdf19aa 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -6,6 +6,8 @@ package org.elasticsearch.repositories.encrypted; +import org.apache.lucene.index.IndexCommit; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.blobstore.BlobContainer; @@ -16,7 +18,13 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; +import org.elasticsearch.index.store.Store; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.snapshots.SnapshotId; import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; @@ -59,6 +67,17 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry this.secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); } + @Override + public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, + IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, boolean writeShardGens, + ActionListener listener) { + if (EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { + super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, listener); + } else { + listener.onFailure(LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); + } + } + @Override protected BlobStore createBlobStore() { return new EncryptedBlobStoreDecorator(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryptor, diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index f08afce5901c4..b0fa003c36c20 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -15,19 +15,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.xpack.core.XPackPlugin; -import javax.crypto.SecretKey; -import javax.crypto.SecretKeyFactory; -import javax.crypto.spec.PBEKeySpec; -import javax.crypto.spec.SecretKeySpec; -import java.nio.charset.StandardCharsets; -import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; -import java.security.spec.InvalidKeySpecException; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -45,6 +41,8 @@ public final class EncryptedRepositoryPlugin extends Plugin implements Repositor private final Map cachedRepositoryPasswords = new HashMap<>(); + protected static XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } + public EncryptedRepositoryPlugin(Settings settings) { // cache the passwords for all encrypted repositories during plugin instantiation // the keystore-based secure passwords are not readable on repository instantiation @@ -73,6 +71,9 @@ public Repository create(RepositoryMetaData metadata) { @Override public Repository create(RepositoryMetaData metaData, Function typeLookup) throws Exception { + if (false == getLicenseState().isEncryptedRepositoryAllowed()) { + throw LicenseUtils.newComplianceException(REPOSITORY_TYPE_NAME + " snapshot repository"); + } String delegateType = DELEGATE_TYPE.get(metaData.settings()); if (Strings.hasLength(delegateType) == false) { throw new IllegalArgumentException(DELEGATE_TYPE.getKey() + " must be set"); diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java index 060ea3d16128e..32ee8fbd56ea3 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + package org.elasticsearch.repositories.encrypted; import org.apache.logging.log4j.util.Supplier; From e51a30427e18962bec99acc52c0cf1d28efb02d6 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 7 Jan 2020 20:15:32 +0200 Subject: [PATCH 071/142] Ooops metadata decrypt logic flaw --- .../encrypted/PasswordBasedEncryptor.java | 3 +-- .../PasswordBasedEncryptorTests.java | 23 +++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java index 32ee8fbd56ea3..97d77e3d3bad4 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java @@ -113,8 +113,7 @@ public byte[] decrypt(byte[] encryptedData) throws ExecutionException, NoSuchPad // extract the salt prepended to the ciphertext byte[] salt = Arrays.copyOf(encryptedData, SALT_LENGTH_IN_BYTES); // get the key associated with the salt - SecretKey decryptionKey = getKeyFromSalt(new String(Base64.getDecoder().decode(salt), - StandardCharsets.UTF_8)).v2(); + SecretKey decryptionKey = getKeyFromSalt(Base64.getEncoder().encodeToString(salt)).v2(); // construct and initialize the decryption cipher GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BYTES * Byte.SIZE, encryptedData, SALT_LENGTH_IN_BYTES, IV_LENGTH_IN_BYTES); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java new file mode 100644 index 0000000000000..dbf1cb5d591c2 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.security.SecureRandom; + +public class PasswordBasedEncryptorTests extends ESTestCase { + + public void testEncryptAndDecryptEmpty() throws Exception { + PasswordBasedEncryptor encryptor = new PasswordBasedEncryptor(new char[] {'p', 'a', 's', 's'}, SecureRandom.getInstance("SHA1PRNG")); + byte[] emptyEncrypted = encryptor.encrypt(new byte[0]); + byte[] ans = encryptor.decrypt(emptyEncrypted); + assertThat(ans.length, Matchers.is(0)); + } + +} From 10872947c0ae9b408c0c177cc6be28752d71c7ee Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 8 Jan 2020 09:14:45 +0200 Subject: [PATCH 072/142] License and consistent --- .../encrypted/EncryptedRepository.java | 3 +- .../encrypted/EncryptedRepositoryPlugin.java | 28 +++++++++++++++++-- .../encrypted/PasswordBasedEncryptor.java | 1 - 3 files changed, 27 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 80df19cdf19aa..7d10f20aeabce 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -74,7 +74,8 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s if (EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, listener); } else { - listener.onFailure(LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); + listener.onFailure(LicenseUtils.newComplianceException( + EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index b0fa003c36c20..71038d8f2676b 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -6,13 +6,17 @@ package org.elasticsearch.repositories.encrypted; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.ConsistentSettingsService; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.license.LicenseUtils; @@ -28,15 +32,17 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; public final class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin { + static final Logger LOGGER = LogManager.getLogger(EncryptedRepository.class); static final String REPOSITORY_TYPE_NAME = "encrypted"; static final String CIPHER_ALGO = "AES"; static final String RAND_ALGO = "SHA1PRNG"; static final Setting.AffixSetting ENCRYPTION_PASSWORD_SETTING = Setting.affixKeySetting("repository.encrypted.", - "password", key -> SecureSetting.secureString(key, null)); + "password", key -> SecureSetting.secureString(key, null, Setting.Property.Consistent)); static final Setting DELEGATE_TYPE = new Setting<>("delegate_type", "", Function.identity()); private final Map cachedRepositoryPasswords = new HashMap<>(); @@ -44,6 +50,11 @@ public final class EncryptedRepositoryPlugin extends Plugin implements Repositor protected static XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } public EncryptedRepositoryPlugin(Settings settings) { + if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { + LOGGER.warn("Encrypted snapshot repositories are not allowed for the current license." + + "Snapshotting to any encrypted repository is not permitted and will fail.", + LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); + } // cache the passwords for all encrypted repositories during plugin instantiation // the keystore-based secure passwords are not readable on repository instantiation for (String repositoryName : ENCRYPTION_PASSWORD_SETTING.getNamespaces(settings)) { @@ -62,6 +73,12 @@ public List> getSettings() { @Override public Map getRepositories(final Environment env, final NamedXContentRegistry registry, final ClusterService clusterService) { + final boolean areRepositoriesPasswordsConsistent = new ConsistentSettingsService(env.settings(), clusterService, + Set.of(ENCRYPTION_PASSWORD_SETTING)).areAllConsistent(); + // there might not be any encrypted repositories installed so no need to fail the node at this point + if (false == areRepositoriesPasswordsConsistent) { + LOGGER.warn("The password for encrypted snapshot repositories are not identical across all nodes."); + } return Collections.singletonMap(REPOSITORY_TYPE_NAME, new Repository.Factory() { @Override @@ -71,8 +88,13 @@ public Repository create(RepositoryMetaData metadata) { @Override public Repository create(RepositoryMetaData metaData, Function typeLookup) throws Exception { - if (false == getLicenseState().isEncryptedRepositoryAllowed()) { - throw LicenseUtils.newComplianceException(REPOSITORY_TYPE_NAME + " snapshot repository"); + if (false == areRepositoriesPasswordsConsistent) { + throw new SettingsException("The password for encrypted snapshot repositories are not identical across all nodes."); + } + if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { + LOGGER.warn("Encrypted snapshot repositories are not allowed for the current license." + + "Snapshots to the [" + metaData.name() + "] encrypted repository are not permitted and will fail.", + LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); } String delegateType = DELEGATE_TYPE.get(metaData.settings()); if (Strings.hasLength(delegateType) == false) { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java index 97d77e3d3bad4..dbf9f6777debf 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java @@ -20,7 +20,6 @@ import javax.crypto.spec.GCMParameterSpec; import javax.crypto.spec.PBEKeySpec; import javax.crypto.spec.SecretKeySpec; -import java.nio.charset.StandardCharsets; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; From b788e9878e4f2b7d37324f411269192b5a72817f Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 8 Jan 2020 15:49:08 +0200 Subject: [PATCH 073/142] Consistent Secure Settings reshuffle --- .../settings/ConsistentSettingsService.java | 115 +++++++++--------- .../ConsistentSettingsServiceTests.java | 10 +- .../PasswordBasedEncryptorTests.java | 3 +- 3 files changed, 62 insertions(+), 66 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java index 411a470238638..6217fbe73cff7 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java @@ -51,7 +51,7 @@ /** * Used to publish secure setting hashes in the cluster state and to validate those hashes against the local values of those same settings. * This is colloquially referred to as the secure setting consistency check. It will publish and verify hashes only for the collection - * of settings passed in the constructor. The settings have to have the {@link Setting.Property#Consistent} property. + * of settings passed in the constructor. The settings have to have the {@link Setting.Property#Consistent} property. */ public final class ConsistentSettingsService { private static final Logger logger = LogManager.getLogger(ConsistentSettingsService.class); @@ -59,6 +59,7 @@ public final class ConsistentSettingsService { private final Settings settings; private final ClusterService clusterService; private final Collection> secureSettingsCollection; + private final Map digestsBySettingKey; private final SecretKeyFactory pbkdf2KeyFactory; public ConsistentSettingsService(Settings settings, ClusterService clusterService, @@ -66,6 +67,8 @@ public ConsistentSettingsService(Settings settings, ClusterService clusterServic this.settings = settings; this.clusterService = clusterService; this.secureSettingsCollection = secureSettingsCollection; + // eagerly compute digests because the keystore could be closed at a later time + this.digestsBySettingKey = computeDigestOfConsistentSecureSettings(); // this is used to compute the PBKDF2 hash (the published one) try { this.pbkdf2KeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA512"); @@ -79,78 +82,80 @@ public ConsistentSettingsService(Settings settings, ClusterService clusterServic * published by the master node only. Note that this is not designed for {@link SecureSettings} implementations that are mutable. */ public LocalNodeMasterListener newHashPublisher() { - // eagerly compute hashes to be published - final Map computedHashesOfConsistentSettings = computeHashesOfConsistentSecureSettings(); - return new HashesPublisher(computedHashesOfConsistentSettings, clusterService); + // eagerly compute salted hashes to be published + final Map hashesBySettingKey = new HashMap<>(); + for (Map.Entry entry : this.digestsBySettingKey.entrySet()) { + String salt = UUIDs.randomBase64UUID(); + byte[] publicHash = computeSaltedPBKDF2Hash(entry.getValue(), salt.getBytes(StandardCharsets.UTF_8)); + String encodedPublicHash = new String(Base64.getEncoder().encode(publicHash), StandardCharsets.UTF_8); + hashesBySettingKey.put(entry.getKey(), salt + ":" + encodedPublicHash); + } + return new HashesPublisher(hashesBySettingKey, clusterService); } /** * Verifies that the hashes of consistent secure settings in the latest {@code ClusterState} verify for the values of those same * settings on the local node. The settings to be checked are passed in the constructor. Also, validates that a missing local - * value is also missing in the published set, and vice-versa. + * value is also missing in the published set, and vice-versa. */ public boolean areAllConsistent() { - final ClusterState state = clusterService.state(); - final Map publishedHashesOfConsistentSettings = state.metaData().hashesOfConsistentSettings(); - final Set publishedSettingKeysToVerify = new HashSet<>(); - publishedSettingKeysToVerify.addAll(publishedHashesOfConsistentSettings.keySet()); - final AtomicBoolean allConsistent = new AtomicBoolean(true); - forEachConcreteSecureSettingDo(concreteSecureSetting -> { - final String publishedSaltAndHash = publishedHashesOfConsistentSettings.get(concreteSecureSetting.getKey()); - final byte[] localHash = concreteSecureSetting.getSecretDigest(settings); - if (publishedSaltAndHash == null && localHash == null) { - // consistency of missing - logger.debug("no published hash for the consistent secure setting [{}] but it also does NOT exist on the local node", - concreteSecureSetting.getKey()); - } else if (publishedSaltAndHash == null && localHash != null) { + ClusterState state = clusterService.state(); + if (state.metaData() == null || state.metaData().hashesOfConsistentSettings() == null) { + throw new IllegalStateException("Hashes of consistent secure settings are not yet published by the master node. Cannot " + + "check consistency at this time"); + } + Map publishedHashesOfConsistentSettings = state.metaData().hashesOfConsistentSettings(); + AtomicBoolean allConsistent = new AtomicBoolean(true); + for (String localSettingName : digestsBySettingKey.keySet()) { + if (false == publishedHashesOfConsistentSettings.containsKey(localSettingName)) { // setting missing on master but present locally - logger.warn("no published hash for the consistent secure setting [{}] but it exists on the local node", - concreteSecureSetting.getKey()); + logger.warn("no published hash for the consistent secure setting [{}] but it exists on the local node", localSettingName); if (state.nodes().isLocalNodeElectedMaster()) { throw new IllegalStateException("Master node cannot validate consistent setting. No published hash for [" - + concreteSecureSetting.getKey() + "] but setting exists."); + + localSettingName + "] but setting exists."); } allConsistent.set(false); - } else if (publishedSaltAndHash != null && localHash == null) { - // setting missing locally but present on master + } + } + for (String publishedSettingName : publishedHashesOfConsistentSettings.keySet()) { + boolean publishedMatches = false; + for (Setting secureSetting : secureSettingsCollection) { + if (secureSetting.match(publishedSettingName)) { + publishedMatches = true; + break; + } + } + if (publishedMatches && false == digestsBySettingKey.containsKey(publishedSettingName)) { logger.warn("the consistent secure setting [{}] does not exist on the local node but there is a published hash for it", - concreteSecureSetting.getKey()); + publishedSettingName); allConsistent.set(false); - } else { - assert publishedSaltAndHash != null; - assert localHash != null; - final String[] parts = publishedSaltAndHash.split(":"); + } + } + for (Map.Entry publishedSaltAndHashForSetting : publishedHashesOfConsistentSettings.entrySet()) { + String settingName = publishedSaltAndHashForSetting.getKey(); + String publishedSaltAndHash = publishedSaltAndHashForSetting.getValue(); + if (digestsBySettingKey.containsKey(settingName)) { + String[] parts = publishedSaltAndHash.split(":"); if (parts == null || parts.length != 2) { throw new IllegalArgumentException("published hash [" + publishedSaltAndHash + " ] for secure setting [" - + concreteSecureSetting.getKey() + "] is invalid"); + + settingName + "] is invalid"); } - final String publishedSalt = parts[0]; - final String publishedHash = parts[1]; - final byte[] computedSaltedHashBytes = computeSaltedPBKDF2Hash(localHash, publishedSalt.getBytes(StandardCharsets.UTF_8)); + String publishedSalt = parts[0]; + String publishedHash = parts[1]; + byte[] localDigest = digestsBySettingKey.get(settingName); + byte[] computedSaltedHashBytes = computeSaltedPBKDF2Hash(localDigest, publishedSalt.getBytes(StandardCharsets.UTF_8)); final String computedSaltedHash = new String(Base64.getEncoder().encode(computedSaltedHashBytes), StandardCharsets.UTF_8); if (false == publishedHash.equals(computedSaltedHash)) { logger.warn("the published hash [{}] of the consistent secure setting [{}] differs from the locally computed one [{}]", - publishedHash, concreteSecureSetting.getKey(), computedSaltedHash); + publishedHash, settingName, computedSaltedHash); if (state.nodes().isLocalNodeElectedMaster()) { throw new IllegalStateException("Master node cannot validate consistent setting. The published hash [" - + publishedHash + "] of the consistent secure setting [" + concreteSecureSetting.getKey() + + publishedHash + "] of the consistent secure setting [" + settingName + "] differs from the locally computed one [" + computedSaltedHash + "]."); } allConsistent.set(false); } } - publishedSettingKeysToVerify.remove(concreteSecureSetting.getKey()); - }); - // another case of settings missing locally, when group settings have not expanded to all the keys published - for (String publishedSettingKey : publishedSettingKeysToVerify) { - for (Setting setting : secureSettingsCollection) { - if (setting.match(publishedSettingKey)) { - // setting missing locally but present on master - logger.warn("the consistent secure setting [{}] does not exist on the local node but there is a published hash for it", - publishedSettingKey); - allConsistent.set(false); - } - } } return allConsistent.get(); } @@ -175,18 +180,15 @@ private void forEachConcreteSecureSettingDo(Consumer> secureSet } } - private Map computeHashesOfConsistentSecureSettings() { - final Map hashesBySettingKey = new HashMap<>(); + private Map computeDigestOfConsistentSecureSettings() { + Map digestsBySettingKey = new HashMap<>(); forEachConcreteSecureSettingDo(concreteSecureSetting -> { - final byte[] localHash = concreteSecureSetting.getSecretDigest(settings); - if (localHash != null) { - final String salt = UUIDs.randomBase64UUID(); - final byte[] publicHash = computeSaltedPBKDF2Hash(localHash, salt.getBytes(StandardCharsets.UTF_8)); - final String encodedPublicHash = new String(Base64.getEncoder().encode(publicHash), StandardCharsets.UTF_8); - hashesBySettingKey.put(concreteSecureSetting.getKey(), salt + ":" + encodedPublicHash); + byte[] localDigest = concreteSecureSetting.getSecretDigest(settings); + if (localDigest != null) { + digestsBySettingKey.put(concreteSecureSetting.getKey(), localDigest); } }); - return hashesBySettingKey; + return digestsBySettingKey; } private byte[] computeSaltedPBKDF2Hash(byte[] bytes, byte[] salt) { @@ -209,12 +211,11 @@ private byte[] computeSaltedPBKDF2Hash(byte[] bytes, byte[] salt) { static final class HashesPublisher implements LocalNodeMasterListener { - // eagerly compute hashes to be published final Map computedHashesOfConsistentSettings; final ClusterService clusterService; HashesPublisher(Map computedHashesOfConsistentSettings, ClusterService clusterService) { - this.computedHashesOfConsistentSettings = Map.copyOf(computedHashesOfConsistentSettings); + this.computedHashesOfConsistentSettings = computedHashesOfConsistentSettings; this.clusterService = clusterService; } diff --git a/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java index 687b74e3397cb..c55038aa1ecc2 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java @@ -65,15 +65,12 @@ public void testSingleStringSetting() throws Exception { assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(false)); // publish new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).newHashPublisher().onMaster(); - ConsistentSettingsService consistentService = new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)); - assertThat(consistentService.areAllConsistent(), is(true)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(true)); // change value secureSettings.setString(stringSetting.getKey(), "_TYPO_somethingsecure"); - assertThat(consistentService.areAllConsistent(), is(false)); assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(false)); // publish change new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).newHashPublisher().onMaster(); - assertThat(consistentService.areAllConsistent(), is(true)); assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(true)); } @@ -92,15 +89,12 @@ public void testSingleAffixSetting() throws Exception { assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(false)); // publish new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).newHashPublisher().onMaster(); - ConsistentSettingsService consistentService = new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)); - assertThat(consistentService.areAllConsistent(), is(true)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(true)); // change value secureSettings.setString("test.affix.second.bar", "_TYPO_second_secure"); - assertThat(consistentService.areAllConsistent(), is(false)); assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(false)); // publish change new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).newHashPublisher().onMaster(); - assertThat(consistentService.areAllConsistent(), is(true)); assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(true)); // add value secureSettings.setString("test.affix.third.bar", "third_secure"); diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java index dbf1cb5d591c2..6d614137215c7 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java @@ -14,7 +14,8 @@ public class PasswordBasedEncryptorTests extends ESTestCase { public void testEncryptAndDecryptEmpty() throws Exception { - PasswordBasedEncryptor encryptor = new PasswordBasedEncryptor(new char[] {'p', 'a', 's', 's'}, SecureRandom.getInstance("SHA1PRNG")); + PasswordBasedEncryptor encryptor = new PasswordBasedEncryptor(new char[] {'p', 'a', 's', 's'}, + SecureRandom.getInstance("SHA1PRNG")); byte[] emptyEncrypted = encryptor.encrypt(new byte[0]); byte[] ans = encryptor.decrypt(emptyEncrypted); assertThat(ans.length, Matchers.is(0)); From 4e204a396379140996f2410714ac19e440a0b96f Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 9 Jan 2020 00:13:15 +0200 Subject: [PATCH 074/142] Consistency check --- .../settings/ConsistentSettingsService.java | 46 +++++++++++++++++-- .../snapshots/SnapshotsService.java | 6 ++- .../encrypted/EncryptedRepository.java | 30 +++++++++--- .../encrypted/EncryptedRepositoryPlugin.java | 29 ++++-------- 4 files changed, 80 insertions(+), 31 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java index 6217fbe73cff7..d7f6e837061df 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java @@ -100,11 +100,7 @@ public LocalNodeMasterListener newHashPublisher() { */ public boolean areAllConsistent() { ClusterState state = clusterService.state(); - if (state.metaData() == null || state.metaData().hashesOfConsistentSettings() == null) { - throw new IllegalStateException("Hashes of consistent secure settings are not yet published by the master node. Cannot " + - "check consistency at this time"); - } - Map publishedHashesOfConsistentSettings = state.metaData().hashesOfConsistentSettings(); + Map publishedHashesOfConsistentSettings = getPublishedHashesOfConsistentSettings(); AtomicBoolean allConsistent = new AtomicBoolean(true); for (String localSettingName : digestsBySettingKey.keySet()) { if (false == publishedHashesOfConsistentSettings.containsKey(localSettingName)) { @@ -160,6 +156,46 @@ public boolean areAllConsistent() { return allConsistent.get(); } + public boolean isConsistent(SecureSetting secureSetting) { + for (String localSettingName : digestsBySettingKey.keySet()) { + if (secureSetting.match(localSettingName)) { + Map publishedHashesOfConsistentSettings = getPublishedHashesOfConsistentSettings(); + if (false == publishedHashesOfConsistentSettings.containsKey(localSettingName)) { + logger.warn("no published hash for the consistent secure setting [{}] but it exists on the local node", + localSettingName); + return false; + } + String publishedSaltAndHash = publishedHashesOfConsistentSettings.get(localSettingName); + String[] parts = publishedSaltAndHash.split(":"); + if (parts == null || parts.length != 2) { + throw new IllegalArgumentException("published hash [" + publishedSaltAndHash + " ] for secure setting [" + + localSettingName + "] is invalid"); + } + String publishedSalt = parts[0]; + String publishedHash = parts[1]; + byte[] localDigest = digestsBySettingKey.get(localSettingName); + byte[] computedSaltedHashBytes = computeSaltedPBKDF2Hash(localDigest, publishedSalt.getBytes(StandardCharsets.UTF_8)); + final String computedSaltedHash = new String(Base64.getEncoder().encode(computedSaltedHashBytes), StandardCharsets.UTF_8); + if (false == publishedHash.equals(computedSaltedHash)) { + logger.warn("the published hash [{}] of the consistent secure setting [{}] differs from the locally computed one [{}]", + publishedHash, localSettingName, computedSaltedHash); + return false; + } + return true; + } + } + throw new IllegalArgumentException("Invalid setting [" + secureSetting.getKey() + "] for consistency check."); + } + + private Map getPublishedHashesOfConsistentSettings() { + ClusterState state = clusterService.state(); + if (state.metaData() == null || state.metaData().hashesOfConsistentSettings() == null) { + throw new IllegalStateException("Hashes of consistent secure settings are not yet published by the master node. Cannot " + + "check consistency at this time"); + } + return state.metaData().hashesOfConsistentSettings(); + } + /** * Iterate over the passed in secure settings, expanding {@link Setting.AffixSetting} to concrete settings, in the scope of the local * settings. diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index cfb2112b4044d..ecac94f54750b 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -268,7 +268,6 @@ public void createSnapshot(final CreateSnapshotRequest request, final ActionList validate(repositoryName, snapshotName); final SnapshotId snapshotId = new SnapshotId(snapshotName, UUIDs.randomBase64UUID()); // new UUID for the snapshot final StepListener repositoryDataListener = new StepListener<>(); - repositoriesService.repository(repositoryName).getRepositoryData(repositoryDataListener); repositoryDataListener.whenComplete(repositoryData -> { clusterService.submitStateUpdateTask("create_snapshot [" + snapshotName + ']', new ClusterStateUpdateTask() { @@ -349,6 +348,11 @@ public TimeValue timeout() { } }); }, listener::onFailure); + try { + repositoriesService.repository(repositoryName).getRepositoryData(repositoryDataListener); + } catch(Exception e) { + repositoryDataListener.onFailure(e); + } } /** diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 7d10f20aeabce..b05b2aaa2e717 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -17,12 +17,16 @@ import org.elasticsearch.common.blobstore.DeleteResult; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.settings.ConsistentSettingsService; +import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotId; @@ -55,15 +59,18 @@ public class EncryptedRepository extends BlobStoreRepository { private final BlobStoreRepository delegatedRepository; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryptor metadataEncryptor; + private final ConsistentSettingsService consistentSettingsService; private final SecureRandom secureRandom; - protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService - , BlobStoreRepository delegatedRepository, PasswordBasedEncryptor metadataEncryptor) throws NoSuchAlgorithmException { + protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService, + BlobStoreRepository delegatedRepository, PasswordBasedEncryptor metadataEncryptor, + ConsistentSettingsService consistentSettingsService) throws NoSuchAlgorithmException { super(metadata, namedXContentRegistry, clusterService, delegatedRepository.basePath()); this.delegatedRepository = delegatedRepository; this.dataEncryptionKeyGenerator = KeyGenerator.getInstance(EncryptedRepositoryPlugin.CIPHER_ALGO); this.dataEncryptionKeyGenerator.init(DEK_KEY_SIZE_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); this.metadataEncryptor = metadataEncryptor; + this.consistentSettingsService = consistentSettingsService; this.secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); } @@ -82,11 +89,17 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s @Override protected BlobStore createBlobStore() { return new EncryptedBlobStoreDecorator(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryptor, - secureRandom); + secureRandom, consistentSettingsService); } @Override protected void doStart() { + SecureSetting passwordSettingForThisRepo = + (SecureSetting) EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING.getConcreteSettingForNamespace(metadata.name()); + if (false == consistentSettingsService.isConsistent(passwordSettingForThisRepo)) { + throw new RepositoryException(metadata.name(), "The value for the Secure setting [" + passwordSettingForThisRepo.getKey() + + "] does not match the master's"); + } this.delegatedRepository.start(); super.doStart(); } @@ -109,13 +122,16 @@ private static class EncryptedBlobStoreDecorator implements BlobStore { private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryptor metadataEncryptor; private final SecureRandom secureRandom; + private final ConsistentSettingsService consistentSettingsService; EncryptedBlobStoreDecorator(BlobStore delegatedBlobStore, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryptor metadataEncryptor, SecureRandom secureRandom) { + PasswordBasedEncryptor metadataEncryptor, SecureRandom secureRandom, + ConsistentSettingsService consistentSettingsService) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryptor = metadataEncryptor; this.secureRandom = secureRandom; + this.consistentSettingsService = consistentSettingsService; } @Override @@ -132,7 +148,7 @@ public BlobContainer blobContainer(BlobPath path) { } return new EncryptedBlobContainerDecorator(delegatedBlobStore.blobContainer(path), delegatedBlobStore.blobContainer(encryptionMetadataBlobPath), dataEncryptionKeyGenerator, metadataEncryptor, - secureRandom); + secureRandom, consistentSettingsService); } } @@ -143,15 +159,17 @@ private static class EncryptedBlobContainerDecorator implements BlobContainer { private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryptor metadataEncryptor; private final SecureRandom secureRandom; + private final ConsistentSettingsService consistentSettingsService; EncryptedBlobContainerDecorator(BlobContainer delegatedBlobContainer, BlobContainer encryptionMetadataBlobContainer, KeyGenerator dataEncryptionKeyGenerator, PasswordBasedEncryptor metadataEncryptor, - SecureRandom secureRandom) { + SecureRandom secureRandom, ConsistentSettingsService consistentSettingsService) { this.delegatedBlobContainer = delegatedBlobContainer; this.encryptionMetadataBlobContainer = encryptionMetadataBlobContainer; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryptor = metadataEncryptor; this.secureRandom = secureRandom; + this.consistentSettingsService = consistentSettingsService; } @Override diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 71038d8f2676b..759e72d085f32 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -45,8 +45,6 @@ public final class EncryptedRepositoryPlugin extends Plugin implements Repositor "password", key -> SecureSetting.secureString(key, null, Setting.Property.Consistent)); static final Setting DELEGATE_TYPE = new Setting<>("delegate_type", "", Function.identity()); - private final Map cachedRepositoryPasswords = new HashMap<>(); - protected static XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } public EncryptedRepositoryPlugin(Settings settings) { @@ -55,14 +53,6 @@ public EncryptedRepositoryPlugin(Settings settings) { "Snapshotting to any encrypted repository is not permitted and will fail.", LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); } - // cache the passwords for all encrypted repositories during plugin instantiation - // the keystore-based secure passwords are not readable on repository instantiation - for (String repositoryName : ENCRYPTION_PASSWORD_SETTING.getNamespaces(settings)) { - Setting encryptionPasswordSetting = ENCRYPTION_PASSWORD_SETTING - .getConcreteSettingForNamespace(repositoryName); - SecureString encryptionPassword = encryptionPasswordSetting.get(settings); - cachedRepositoryPasswords.put(repositoryName, encryptionPassword.getChars()); - } } @Override @@ -73,12 +63,16 @@ public List> getSettings() { @Override public Map getRepositories(final Environment env, final NamedXContentRegistry registry, final ClusterService clusterService) { - final boolean areRepositoriesPasswordsConsistent = new ConsistentSettingsService(env.settings(), clusterService, - Set.of(ENCRYPTION_PASSWORD_SETTING)).areAllConsistent(); - // there might not be any encrypted repositories installed so no need to fail the node at this point - if (false == areRepositoriesPasswordsConsistent) { - LOGGER.warn("The password for encrypted snapshot repositories are not identical across all nodes."); + // cache all the passwords for encrypted repositories while keystore-based secure passwords are still readable + final Map cachedRepositoryPasswords = new HashMap<>(); + for (String repositoryName : ENCRYPTION_PASSWORD_SETTING.getNamespaces(env.settings())) { + Setting encryptionPasswordSetting = ENCRYPTION_PASSWORD_SETTING + .getConcreteSettingForNamespace(repositoryName); + SecureString encryptionPassword = encryptionPasswordSetting.get(env.settings()); + cachedRepositoryPasswords.put(repositoryName, encryptionPassword.getChars()); } + final ConsistentSettingsService consistentSettingsService = new ConsistentSettingsService(env.settings(), clusterService, + Set.of(ENCRYPTION_PASSWORD_SETTING)); return Collections.singletonMap(REPOSITORY_TYPE_NAME, new Repository.Factory() { @Override @@ -88,9 +82,6 @@ public Repository create(RepositoryMetaData metadata) { @Override public Repository create(RepositoryMetaData metaData, Function typeLookup) throws Exception { - if (false == areRepositoriesPasswordsConsistent) { - throw new SettingsException("The password for encrypted snapshot repositories are not identical across all nodes."); - } if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { LOGGER.warn("Encrypted snapshot repositories are not allowed for the current license." + "Snapshots to the [" + metaData.name() + "] encrypted repository are not permitted and will fail.", @@ -118,7 +109,7 @@ public Repository create(RepositoryMetaData metaData, Function Date: Thu, 9 Jan 2020 12:30:20 +0200 Subject: [PATCH 075/142] Cleanup WIP --- .../common/blobstore/BlobPath.java | 7 ++++++ .../encrypted/EncryptedRepository.java | 23 +++++++++++++++---- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java index d3acd02a06d1f..51e30160ff1f4 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java @@ -62,6 +62,13 @@ public BlobPath add(String path) { return new BlobPath(Collections.unmodifiableList(paths)); } + public BlobPath prepend(String path) { + List paths = new ArrayList<>(this.paths.size() + 1); + paths.add(path); + paths.addAll(this.paths); + return new BlobPath(Collections.unmodifiableList(paths)); + } + public String buildAsString() { String p = String.join(SEPARATOR, paths); if (p.isEmpty() || p.endsWith(SEPARATOR)) { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index b05b2aaa2e717..6816b8d004d83 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -25,6 +25,7 @@ import org.elasticsearch.index.store.Store; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.RepositoryCleanupResult; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; @@ -86,6 +87,22 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s } } + @Override + public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListener listener) { + super.cleanup(repositoryStateId, writeShardGens, new ActionListener() { + @Override + public void onResponse(RepositoryCleanupResult repositoryCleanupResult) { + + listener.onResponse(repositoryCleanupResult); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + @Override protected BlobStore createBlobStore() { return new EncryptedBlobStoreDecorator(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryptor, @@ -141,11 +158,7 @@ public void close() throws IOException { @Override public BlobContainer blobContainer(BlobPath path) { - BlobPath encryptionMetadataBlobPath = BlobPath.cleanPath(); - encryptionMetadataBlobPath = encryptionMetadataBlobPath.add(ENCRYPTION_METADATA_PREFIX); - for (String pathComponent : path) { - encryptionMetadataBlobPath = encryptionMetadataBlobPath.add(pathComponent); - } + BlobPath encryptionMetadataBlobPath = path.prepend(ENCRYPTION_METADATA_PREFIX); return new EncryptedBlobContainerDecorator(delegatedBlobStore.blobContainer(path), delegatedBlobStore.blobContainer(encryptionMetadataBlobPath), dataEncryptionKeyGenerator, metadataEncryptor, secureRandom, consistentSettingsService); From f1406ff12d5564bcb41467c68cdd6d2510614184 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 10 Jan 2020 10:49:55 +0200 Subject: [PATCH 076/142] Renames and nits --- .../DecryptionPacketsInputStream.java | 2 +- .../encrypted/EncryptedRepository.java | 103 ++++++++++++------ .../encrypted/EncryptedRepositoryPlugin.java | 7 +- .../EncryptionPacketsInputStream.java | 2 +- .../EncryptionPacketsInputStreamTests.java | 2 +- 5 files changed, 77 insertions(+), 39 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index 8fd86f96a128f..26c980c9a9884 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -163,7 +163,7 @@ private int decrypt(PrefixInputStream packetInputStream) throws IOException { private Cipher getPacketDecryptionCipher(byte[] packet) throws IOException { GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, packet, 0, GCM_IV_LENGTH_IN_BYTES); try { - Cipher packetCipher = Cipher.getInstance(EncryptedRepository.DEK_ENCRYPTION_SCHEME); + Cipher packetCipher = Cipher.getInstance(EncryptedRepository.DATA_ENCRYPTION_SCHEME); packetCipher.init(Cipher.DECRYPT_MODE, secretKey, gcmParameterSpec); return packetCipher; } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | InvalidAlgorithmParameterException e) { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 6816b8d004d83..407ed8a7c3ee7 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -6,6 +6,8 @@ package org.elasticsearch.repositories.encrypted; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexCommit; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.RepositoryMetaData; @@ -27,7 +29,6 @@ import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoryCleanupResult; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotId; @@ -40,17 +41,21 @@ import java.security.GeneralSecurityException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ExecutionException; public class EncryptedRepository extends BlobStoreRepository { + static final Logger logger = LogManager.getLogger(EncryptedRepository.class); static final int GCM_TAG_LENGTH_IN_BYTES = 16; static final int GCM_IV_LENGTH_IN_BYTES = 12; static final int AES_BLOCK_SIZE_IN_BYTES = 128; - static final String DEK_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; - static final int DEK_KEY_SIZE_IN_BITS = 256; + static final String DATA_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; + static final int DATA_KEY_SIZE_IN_BITS = 256; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 20; // 1MB static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB @@ -69,7 +74,7 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry super(metadata, namedXContentRegistry, clusterService, delegatedRepository.basePath()); this.delegatedRepository = delegatedRepository; this.dataEncryptionKeyGenerator = KeyGenerator.getInstance(EncryptedRepositoryPlugin.CIPHER_ALGO); - this.dataEncryptionKeyGenerator.init(DEK_KEY_SIZE_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); + this.dataEncryptionKeyGenerator.init(DATA_KEY_SIZE_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); this.metadataEncryptor = metadataEncryptor; this.consistentSettingsService = consistentSettingsService; this.secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); @@ -89,24 +94,28 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s @Override public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListener listener) { - super.cleanup(repositoryStateId, writeShardGens, new ActionListener() { - @Override - public void onResponse(RepositoryCleanupResult repositoryCleanupResult) { - - listener.onResponse(repositoryCleanupResult); - } + super.cleanup(repositoryStateId, writeShardGens, ActionListener.wrap(repositoryCleanupResult -> { + EncryptedBlobContainerDecorator encryptedBlobContainer = (EncryptedBlobContainerDecorator) blobContainer(); + cleanUpOrphanedMetadataRecursively(encryptedBlobContainer); + listener.onResponse(repositoryCleanupResult); + }, listener::onFailure)); + } - @Override - public void onFailure(Exception e) { - listener.onFailure(e); + private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainerDecorator encryptedBlobContainer) throws IOException{ + encryptedBlobContainer.cleanUpOrphanedMetadata(); + for (BlobContainer childEncryptedBlobContainer : encryptedBlobContainer.children().values()) { + try { + cleanUpOrphanedMetadataRecursively((EncryptedBlobContainerDecorator) childEncryptedBlobContainer); + } catch(IOException e) { + logger.warn("Exception while cleaning up [" + childEncryptedBlobContainer.path() + "]", e); } - }); + } } @Override protected BlobStore createBlobStore() { return new EncryptedBlobStoreDecorator(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryptor, - secureRandom, consistentSettingsService); + secureRandom); } @Override @@ -139,16 +148,13 @@ private static class EncryptedBlobStoreDecorator implements BlobStore { private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryptor metadataEncryptor; private final SecureRandom secureRandom; - private final ConsistentSettingsService consistentSettingsService; EncryptedBlobStoreDecorator(BlobStore delegatedBlobStore, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryptor metadataEncryptor, SecureRandom secureRandom, - ConsistentSettingsService consistentSettingsService) { + PasswordBasedEncryptor metadataEncryptor, SecureRandom secureRandom) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryptor = metadataEncryptor; this.secureRandom = secureRandom; - this.consistentSettingsService = consistentSettingsService; } @Override @@ -158,31 +164,30 @@ public void close() throws IOException { @Override public BlobContainer blobContainer(BlobPath path) { - BlobPath encryptionMetadataBlobPath = path.prepend(ENCRYPTION_METADATA_PREFIX); - return new EncryptedBlobContainerDecorator(delegatedBlobStore.blobContainer(path), - delegatedBlobStore.blobContainer(encryptionMetadataBlobPath), dataEncryptionKeyGenerator, metadataEncryptor, - secureRandom, consistentSettingsService); + return new EncryptedBlobContainerDecorator(delegatedBlobStore, path, dataEncryptionKeyGenerator, metadataEncryptor, + secureRandom); } } private static class EncryptedBlobContainerDecorator implements BlobContainer { - private final BlobContainer delegatedBlobContainer; - private final BlobContainer encryptionMetadataBlobContainer; + private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryptor metadataEncryptor; private final SecureRandom secureRandom; - private final ConsistentSettingsService consistentSettingsService; + private final BlobContainer delegatedBlobContainer; + private final BlobContainer encryptionMetadataBlobContainer; - EncryptedBlobContainerDecorator(BlobContainer delegatedBlobContainer, BlobContainer encryptionMetadataBlobContainer, + EncryptedBlobContainerDecorator(BlobStore delegatedBlobStore, BlobPath path, KeyGenerator dataEncryptionKeyGenerator, PasswordBasedEncryptor metadataEncryptor, - SecureRandom secureRandom, ConsistentSettingsService consistentSettingsService) { - this.delegatedBlobContainer = delegatedBlobContainer; - this.encryptionMetadataBlobContainer = encryptionMetadataBlobContainer; + SecureRandom secureRandom) { + this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryptor = metadataEncryptor; this.secureRandom = secureRandom; - this.consistentSettingsService = consistentSettingsService; + this.delegatedBlobContainer = delegatedBlobStore.blobContainer(path); + BlobPath encryptionMetadataBlobPath = path.prepend(ENCRYPTION_METADATA_PREFIX); + this.encryptionMetadataBlobContainer = delegatedBlobStore.blobContainer(encryptionMetadataBlobPath); } @Override @@ -265,6 +270,7 @@ public Map listBlobs() throws IOException { // the encrypted data blob container is the source-of-truth for list operations // the metadata blob container mirrors its structure, but in some failure cases it might contain // additional orphaned metadata blobs + // can list blobs that cannot be decrypted (because metadata is missing or corrupted) return this.delegatedBlobContainer.listBlobs(); } @@ -273,7 +279,14 @@ public Map children() throws IOException { // the encrypted data blob container is the source-of-truth for child container operations // the metadata blob container mirrors its structure, but in some failure cases it might contain // additional orphaned metadata blobs - return this.delegatedBlobContainer.children(); + Map childEncryptedBlobContainers = this.delegatedBlobContainer.children(); + Map result = new HashMap<>(childEncryptedBlobContainers.size()); + for (Map.Entry encryptedBlobContainer : childEncryptedBlobContainers.entrySet()) { + // get an encrypted blob container for each + result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainerDecorator(this.delegatedBlobStore, + encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryptor, secureRandom)); + } + return result; } @Override @@ -281,8 +294,34 @@ public Map listBlobsByPrefix(String blobNamePrefix) throws // the encrypted data blob container is the source-of-truth for list operations // the metadata blob container mirrors its structure, but in some failure cases it might contain // additional orphaned metadata blobs + // can list blobs that cannot be decrypted (because metadata is missing or corrupted) return this.delegatedBlobContainer.listBlobsByPrefix(blobNamePrefix); } + + public void cleanUpOrphanedMetadata() throws IOException{ + // delete encryption metadata blobs which don't pair with any data blobs + Set foundEncryptedBlobs = this.delegatedBlobContainer.listBlobs().keySet(); + Set foundMetadataBlobs = this.encryptionMetadataBlobContainer.listBlobs().keySet(); + List orphanedMetadataBlobs = new ArrayList<>(foundMetadataBlobs); + orphanedMetadataBlobs.removeAll(foundEncryptedBlobs); + try { + this.encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(orphanedMetadataBlobs); + } catch (IOException e) { + logger.warn("Exception while deleting orphaned metadata blobs " + orphanedMetadataBlobs, e); + } + // delete Encryption metadata blob containers which don't par with any data blob containers + Set foundEncryptedBlobContainers = this.delegatedBlobContainer.children().keySet(); + Map foundMetadataBlobContainers = this.encryptionMetadataBlobContainer.children(); + for (Map.Entry metadataBlobContainer : foundMetadataBlobContainers.entrySet()) { + if (false == foundEncryptedBlobContainers.contains(metadataBlobContainer.getKey())) { + try { + metadataBlobContainer.getValue().delete(); + } catch (IOException e) { + logger.warn("Exception while deleting orphaned metadata blob container [" + metadataBlobContainer + "]", e); + } + } + } + } } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 759e72d085f32..27d05501d5ff6 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.license.LicenseUtils; @@ -37,7 +36,7 @@ public final class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin { - static final Logger LOGGER = LogManager.getLogger(EncryptedRepository.class); + static final Logger logger = LogManager.getLogger(EncryptedRepositoryPlugin.class); static final String REPOSITORY_TYPE_NAME = "encrypted"; static final String CIPHER_ALGO = "AES"; static final String RAND_ALGO = "SHA1PRNG"; @@ -49,7 +48,7 @@ public final class EncryptedRepositoryPlugin extends Plugin implements Repositor public EncryptedRepositoryPlugin(Settings settings) { if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { - LOGGER.warn("Encrypted snapshot repositories are not allowed for the current license." + + logger.warn("Encrypted snapshot repositories are not allowed for the current license." + "Snapshotting to any encrypted repository is not permitted and will fail.", LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); } @@ -83,7 +82,7 @@ public Repository create(RepositoryMetaData metadata) { @Override public Repository create(RepositoryMetaData metaData, Function typeLookup) throws Exception { if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { - LOGGER.warn("Encrypted snapshot repositories are not allowed for the current license." + + logger.warn("Encrypted snapshot repositories are not allowed for the current license." + "Snapshots to the [" + metaData.name() + "] encrypted repository are not permitted and will fail.", LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 8ec28fee4c8b2..556759a831d5a 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -174,7 +174,7 @@ public void reset() throws IOException { private static Cipher getPacketEncryptionCipher(SecretKey secretKey, byte[] packetIv) throws IOException { GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, packetIv); try { - Cipher packetCipher = Cipher.getInstance(EncryptedRepository.DEK_ENCRYPTION_SCHEME); + Cipher packetCipher = Cipher.getInstance(EncryptedRepository.DATA_ENCRYPTION_SCHEME); packetCipher.init(Cipher.ENCRYPT_MODE, secretKey, gcmParameterSpec); return packetCipher; } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | InvalidAlgorithmParameterException e) { diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java index acf5d65a0b915..e3f96553af1a2 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -394,7 +394,7 @@ private void testEncryptPacketWise(int size, int packetSize, ReadStrategy readSt GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, Arrays.copyOfRange(ciphertextArray, ciphertextOffset, ciphertextOffset + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES)); - Cipher packetCipher = Cipher.getInstance(EncryptedRepository.DEK_ENCRYPTION_SCHEME); + Cipher packetCipher = Cipher.getInstance(EncryptedRepository.DATA_ENCRYPTION_SCHEME); packetCipher.init(Cipher.DECRYPT_MODE, secretKey, gcmParameterSpec); try (InputStream packetDecryptionInputStream = new CipherInputStream(new ByteArrayInputStream(ciphertextArray, ciphertextOffset + EncryptedRepository.GCM_IV_LENGTH_IN_BYTES, From 2565d6e5ef04d731bf2e966aca5434bb11065a43 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 10 Jan 2020 12:17:44 +0200 Subject: [PATCH 077/142] Checkstyle --- .../common/settings/ConsistentSettingsService.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java index d7f6e837061df..2c0ec773c145f 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java @@ -38,9 +38,7 @@ import java.util.Base64; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; -import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; From 2c40aada4ffa6f08b3375b047600bc97d1309226 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 13 Jan 2020 15:34:40 +0200 Subject: [PATCH 078/142] Consistency check on create snapshot --- .../encrypted/EncryptedRepository.java | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 407ed8a7c3ee7..dcf3132b8e6e1 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -67,6 +67,7 @@ public class EncryptedRepository extends BlobStoreRepository { private final PasswordBasedEncryptor metadataEncryptor; private final ConsistentSettingsService consistentSettingsService; private final SecureRandom secureRandom; + private final SecureSetting passwordSettingForThisRepo; protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService, BlobStoreRepository delegatedRepository, PasswordBasedEncryptor metadataEncryptor, @@ -78,6 +79,8 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry this.metadataEncryptor = metadataEncryptor; this.consistentSettingsService = consistentSettingsService; this.secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); + this.passwordSettingForThisRepo = + (SecureSetting) EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING.getConcreteSettingForNamespace(metadata.name()); } @Override @@ -85,7 +88,13 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, boolean writeShardGens, ActionListener listener) { if (EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { - super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, listener); + if (consistentSettingsService.isConsistent(passwordSettingForThisRepo)) { + super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, listener); + } else { + listener.onFailure(new RepositoryException(metadata.name(), + "The local node's value for the keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] does not " + + "match the master's")); + } } else { listener.onFailure(LicenseUtils.newComplianceException( EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); @@ -120,12 +129,6 @@ protected BlobStore createBlobStore() { @Override protected void doStart() { - SecureSetting passwordSettingForThisRepo = - (SecureSetting) EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING.getConcreteSettingForNamespace(metadata.name()); - if (false == consistentSettingsService.isConsistent(passwordSettingForThisRepo)) { - throw new RepositoryException(metadata.name(), "The value for the Secure setting [" + passwordSettingForThisRepo.getKey() + - "] does not match the master's"); - } this.delegatedRepository.start(); super.doStart(); } From f73b54c872cc363bba8508168eed960f3bf9d2eb Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 14 Jan 2020 20:08:50 +0200 Subject: [PATCH 079/142] More javadocs for PasswordBasedEncryptor --- .../encrypted/PasswordBasedEncryptor.java | 65 ++++++++++++++----- 1 file changed, 50 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java index dbf9f6777debf..8b6f8c8b1e201 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java @@ -20,6 +20,7 @@ import javax.crypto.spec.GCMParameterSpec; import javax.crypto.spec.PBEKeySpec; import javax.crypto.spec.SecretKeySpec; +import java.nio.charset.StandardCharsets; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; @@ -33,6 +34,17 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; +/** + * Encrypts and decrypts data using a password. + * Encryption generates the AES secret key from the password and a randomly generated salt (using the PBKDF2 algo). + * This key is then used to encrypt the data (AES/GCM/NoPadding). The encryption IV is generated randomly. + * The key is cached internally and used for at most {@link #ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY} encryption invocations. + * When this limit is exceeded, a new key is generated, by generating a new random salt. The encryption key is never + * stored on disk. The salt, however, which was used in generating the encryption key, is prepended to the ciphertext. + * Decryption extracts the salt prepended to the ciphertext, computes the key (using the secret password) and uses + * the key to decrypt the ciphertext (which also contains the IV). Decryption also does not store the generated key + * on disk, but caches it in memory because generating the key from the password is computationally expensive on purpose. + */ public final class PasswordBasedEncryptor { // the count of keys stored so as to avoid re-computation @@ -60,12 +72,14 @@ public final class PasswordBasedEncryptor { // to less than 2^32 (so we use 2^31 to err on the safe side) private static final long ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY = 1L<<31; - // the password which is used to generate all the encryption keys (the keys are different because they each + // the password which is used to generate all the encryption and decryption keys (the keys are different because they each // are generated using a different salt, which is generated randomly) private final char[] password; // this is used to generate the IVs for each encryption instance as well as the salt for every key generation private final SecureRandom secureRandom; + // this is used to store the secret keys given the salt that was used in generating it private final Cache> keyBySaltCache; + // the salt of the secret key which is used for encryption private final AtomicReference> currentEncryptionKeySalt; public PasswordBasedEncryptor(char[] password, SecureRandom secureRandom) { @@ -74,10 +88,11 @@ public PasswordBasedEncryptor(char[] password, SecureRandom secureRandom) { this.keyBySaltCache = CacheBuilder.>builder() .setMaximumWeight(ENCRYPTION_KEY_CACHE_SIZE) .build(); + // set the random salt which is used to generate the encryption key byte[] randomEncryptionKeySaltBytes = new byte[SALT_LENGTH_IN_BYTES]; secureRandom.nextBytes(randomEncryptionKeySaltBytes); this.currentEncryptionKeySalt = new AtomicReference<>(new LimitedSupplier<>( - Base64.getEncoder().encodeToString(randomEncryptionKeySaltBytes), + new String(Base64.getEncoder().encode(randomEncryptionKeySaltBytes), StandardCharsets.UTF_8), ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY)); } @@ -112,7 +127,7 @@ public byte[] decrypt(byte[] encryptedData) throws ExecutionException, NoSuchPad // extract the salt prepended to the ciphertext byte[] salt = Arrays.copyOf(encryptedData, SALT_LENGTH_IN_BYTES); // get the key associated with the salt - SecretKey decryptionKey = getKeyFromSalt(Base64.getEncoder().encodeToString(salt)).v2(); + SecretKey decryptionKey = getKeyFromSalt(new String(Base64.getEncoder().encode(salt), StandardCharsets.UTF_8)).v2(); // construct and initialize the decryption cipher GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BYTES * Byte.SIZE, encryptedData, SALT_LENGTH_IN_BYTES, IV_LENGTH_IN_BYTES); @@ -132,6 +147,9 @@ private SecretKey generatePasswordBasedSecretKey(char[] password, byte[] salt) t return secret; } + /** + * Return a secret key given the salt; computes the key if the key for the salt argument is not already cached. + */ private Tuple getKeyFromSalt(String salt) throws ExecutionException { return this.keyBySaltCache.computeIfAbsent(salt, (ignore) -> { byte[] saltBytes = Base64.getDecoder().decode(salt); @@ -140,33 +158,49 @@ private Tuple getKeyFromSalt(String salt) throws ExecutionExc }); } - private void resetCurrentEncryptionKeySalt() { + /** + * Replaces the currently exhausted salt supplier with a new one. The new salt is generated randomly. + */ + private void resetCurrentEncryptionKeySalt(LimitedSupplier currentExhaustedSupplier) { + // generate a new random salt byte[] randomEncryptionKeySaltBytes = new byte[SALT_LENGTH_IN_BYTES]; secureRandom.nextBytes(randomEncryptionKeySaltBytes); - this.currentEncryptionKeySalt.set(new LimitedSupplier<>( - Base64.getEncoder().encodeToString(randomEncryptionKeySaltBytes), - ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY)); + LimitedSupplier newSaltSupplier = new LimitedSupplier<>( + new String(Base64.getEncoder().encode(randomEncryptionKeySaltBytes), StandardCharsets.UTF_8), + ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY); + // replace the old salt supplier with the new one + this.currentEncryptionKeySalt.compareAndExchange(currentExhaustedSupplier, newSaltSupplier); } private Tuple useEncryptionKey() throws ExecutionException { - Optional encryptionKeySalt = this.currentEncryptionKeySalt.get().get(); + // get the salt of the encryption key + LimitedSupplier currentEncryptionKeySaltSupplier = currentEncryptionKeySalt.get(); + Optional encryptionKeySalt = currentEncryptionKeySaltSupplier.get(); if (encryptionKeySalt.isPresent()) { + // the salt has NOT been used more than {@code #ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY} times return getKeyFromSalt(encryptionKeySalt.get()); } - // change the salt and generate a new encryption key - resetCurrentEncryptionKeySalt(); - // try again + // change the salt used to generate a new encryption key + resetCurrentEncryptionKeySalt(currentEncryptionKeySaltSupplier); + // try to use the new supplier again return useEncryptionKey(); } + /** + * A supplier accepting a limited number of retrieve (get) invocations. After the limit has been exceeded + * the supplier returns {@code Optional#empty()}. + */ static class LimitedSupplier implements Supplier> { - + // the current {@code #get()) invocation count private final AtomicLong count; - + // the constant value to return when the invocation count has not been exceeded private final Optional value; - private final long limit; + LimitedSupplier(T value, long limit) { + if (limit <= 0) { + throw new IllegalArgumentException("limit argument must be strictly positive"); + } this.count = new AtomicLong(0L); this.value = Optional.of(Objects.requireNonNull(value)); this.limit = limit; @@ -174,7 +208,8 @@ static class LimitedSupplier implements Supplier> { @Override public Optional get() { - if (count.get() <= limit && count.incrementAndGet() <= limit) { + long invocationCount = count.getAndUpdate(prev -> prev < limit ? prev + 1 : limit); + if (invocationCount < limit) { return value; } return Optional.empty(); From 36062db672814b4f17bc7de35ae11ddd441e4866 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 15 Jan 2020 13:39:03 +0200 Subject: [PATCH 080/142] EncryptedRepository#restoreShard logging --- .../encrypted/EncryptedRepository.java | 83 ++++++++++++------- 1 file changed, 53 insertions(+), 30 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index dcf3132b8e6e1..3357ed0d643a8 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -23,8 +23,10 @@ import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; +import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoryCleanupResult; @@ -32,6 +34,7 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotId; +import javax.crypto.AEADBadTagException; import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; @@ -48,7 +51,7 @@ import java.util.Set; import java.util.concurrent.ExecutionException; -public class EncryptedRepository extends BlobStoreRepository { +public final class EncryptedRepository extends BlobStoreRepository { static final Logger logger = LogManager.getLogger(EncryptedRepository.class); static final int GCM_TAG_LENGTH_IN_BYTES = 16; @@ -92,8 +95,8 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, listener); } else { listener.onFailure(new RepositoryException(metadata.name(), - "The local node's value for the keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] does not " + - "match the master's")); + "Password mismatch for repository [" + metadata.name() + "]. The local node's value of the " + + "keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] is different from the master's")); } } else { listener.onFailure(LicenseUtils.newComplianceException( @@ -101,6 +104,23 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s } } + @Override + public void restoreShard(Store store, SnapshotId snapshotId, IndexId indexId, ShardId snapshotShardId, + RecoveryState recoveryState, ActionListener listener) { + if (false == consistentSettingsService.isConsistent(passwordSettingForThisRepo)) { + // the repository has a different password on the local node compared to the master node + // even though restoring the shard will surely fail (because we know that, by now, the master's password + // must be correct, otherwise this method will not get called) we let it pass-through in order to avoid + // having to manipulate the {@code recoveryState} argument + logger.error("Password mismatch for repository [" + metadata.name() + "]. The local node's value of the " + + "keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] is different from the master's"); + } + super.restoreShard(store, snapshotId, indexId, snapshotShardId, recoveryState, ActionListener.delegateResponse(listener, + (l, e) -> l.onFailure(new RepositoryException(metadata.name(), "Password mismatch for repository [" + metadata.name() + + "]. The local node's value of the keystore secure setting [" + + passwordSettingForThisRepo.getKey() + "] is different from the master's")))); + } + @Override public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListener listener) { super.cleanup(repositoryStateId, writeShardGens, ActionListener.wrap(repositoryCleanupResult -> { @@ -177,50 +197,53 @@ private static class EncryptedBlobContainerDecorator implements BlobContainer { private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryptor metadataEncryptor; - private final SecureRandom secureRandom; + private final SecureRandom nonceGenerator; private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; EncryptedBlobContainerDecorator(BlobStore delegatedBlobStore, BlobPath path, KeyGenerator dataEncryptionKeyGenerator, PasswordBasedEncryptor metadataEncryptor, - SecureRandom secureRandom) { + SecureRandom nonceGenerator) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryptor = metadataEncryptor; - this.secureRandom = secureRandom; + this.nonceGenerator = nonceGenerator; this.delegatedBlobContainer = delegatedBlobStore.blobContainer(path); - BlobPath encryptionMetadataBlobPath = path.prepend(ENCRYPTION_METADATA_PREFIX); - this.encryptionMetadataBlobContainer = delegatedBlobStore.blobContainer(encryptionMetadataBlobPath); + this.encryptionMetadataBlobContainer = delegatedBlobStore.blobContainer(path.prepend(ENCRYPTION_METADATA_PREFIX)); } @Override public BlobPath path() { - return this.delegatedBlobContainer.path(); + return delegatedBlobContainer.path(); } @Override public InputStream readBlob(String blobName) throws IOException { // read metadata - BytesReference encryptedMetadataBytes = Streams.readFully(this.encryptionMetadataBlobContainer.readBlob(blobName)); + BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(blobName)); final byte[] decryptedMetadata; try { decryptedMetadata = metadataEncryptor.decrypt(BytesReference.toBytes(encryptedMetadataBytes)); } catch (ExecutionException | GeneralSecurityException e) { - throw new IOException("Exception while decrypting metadata", e); + String failureMessage = "Failure to decrypt metadata for blob [" + blobName + "]"; + if (e.getCause() instanceof AEADBadTagException) { + failureMessage = failureMessage + ". The repository password is probably wrong."; + } + throw new IOException(failureMessage, e); } final BlobEncryptionMetadata metadata = BlobEncryptionMetadata.deserializeMetadataFromByteArray(decryptedMetadata); // decrypt metadata SecretKey dataDecryptionKey = new SecretKeySpec(metadata.getDataEncryptionKeyMaterial(), 0, metadata.getDataEncryptionKeyMaterial().length, "AES"); // read and decrypt blob - return new DecryptionPacketsInputStream(this.delegatedBlobContainer.readBlob(blobName), dataDecryptionKey, + return new DecryptionPacketsInputStream(delegatedBlobContainer.readBlob(blobName), dataDecryptionKey, metadata.getNonce(), metadata.getPacketLengthInBytes()); } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { SecretKey dataEncryptionKey = dataEncryptionKeyGenerator.generateKey(); - int nonce = secureRandom.nextInt(); + int nonce = nonceGenerator.nextInt(); // this is the metadata required to decrypt back the encrypted blob BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(dataEncryptionKey.getEncoded(), nonce, PACKET_LENGTH_IN_BYTES); // encrypt metadata @@ -228,18 +251,18 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b try { encryptedMetadata = metadataEncryptor.encrypt(BlobEncryptionMetadata.serializeMetadataToByteArray(metadata)); } catch (ExecutionException | GeneralSecurityException e) { - throw new IOException("Exception while encrypting metadata", e); + throw new IOException("Failure to encrypt metadata for blob [" + blobName + "]", e); } // first write the encrypted metadata try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { - this.encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, + encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, failIfAlreadyExists); } // afterwards write the encrypted data blob long encryptedBlobSize = EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { - this.delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); + delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); } } @@ -254,18 +277,18 @@ public void writeBlobAtomic(String blobName, InputStream inputStream, long blobS @Override public DeleteResult delete() throws IOException { // first delete the encrypted data blob - DeleteResult deleteResult = this.delegatedBlobContainer.delete(); + DeleteResult deleteResult = delegatedBlobContainer.delete(); // then delete metadata - this.encryptionMetadataBlobContainer.delete(); + encryptionMetadataBlobContainer.delete(); return deleteResult; } @Override public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOException { // first delete the encrypted data blob - this.delegatedBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); + delegatedBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); // then delete metadata - this.encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); + encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); } @Override @@ -274,7 +297,7 @@ public Map listBlobs() throws IOException { // the metadata blob container mirrors its structure, but in some failure cases it might contain // additional orphaned metadata blobs // can list blobs that cannot be decrypted (because metadata is missing or corrupted) - return this.delegatedBlobContainer.listBlobs(); + return delegatedBlobContainer.listBlobs(); } @Override @@ -282,12 +305,12 @@ public Map children() throws IOException { // the encrypted data blob container is the source-of-truth for child container operations // the metadata blob container mirrors its structure, but in some failure cases it might contain // additional orphaned metadata blobs - Map childEncryptedBlobContainers = this.delegatedBlobContainer.children(); + Map childEncryptedBlobContainers = delegatedBlobContainer.children(); Map result = new HashMap<>(childEncryptedBlobContainers.size()); for (Map.Entry encryptedBlobContainer : childEncryptedBlobContainers.entrySet()) { // get an encrypted blob container for each - result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainerDecorator(this.delegatedBlobStore, - encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryptor, secureRandom)); + result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainerDecorator(delegatedBlobStore, + encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryptor, nonceGenerator)); } return result; } @@ -298,23 +321,23 @@ public Map listBlobsByPrefix(String blobNamePrefix) throws // the metadata blob container mirrors its structure, but in some failure cases it might contain // additional orphaned metadata blobs // can list blobs that cannot be decrypted (because metadata is missing or corrupted) - return this.delegatedBlobContainer.listBlobsByPrefix(blobNamePrefix); + return delegatedBlobContainer.listBlobsByPrefix(blobNamePrefix); } public void cleanUpOrphanedMetadata() throws IOException{ // delete encryption metadata blobs which don't pair with any data blobs - Set foundEncryptedBlobs = this.delegatedBlobContainer.listBlobs().keySet(); - Set foundMetadataBlobs = this.encryptionMetadataBlobContainer.listBlobs().keySet(); + Set foundEncryptedBlobs = delegatedBlobContainer.listBlobs().keySet(); + Set foundMetadataBlobs = encryptionMetadataBlobContainer.listBlobs().keySet(); List orphanedMetadataBlobs = new ArrayList<>(foundMetadataBlobs); orphanedMetadataBlobs.removeAll(foundEncryptedBlobs); try { - this.encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(orphanedMetadataBlobs); + encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(orphanedMetadataBlobs); } catch (IOException e) { logger.warn("Exception while deleting orphaned metadata blobs " + orphanedMetadataBlobs, e); } // delete Encryption metadata blob containers which don't par with any data blob containers - Set foundEncryptedBlobContainers = this.delegatedBlobContainer.children().keySet(); - Map foundMetadataBlobContainers = this.encryptionMetadataBlobContainer.children(); + Set foundEncryptedBlobContainers = delegatedBlobContainer.children().keySet(); + Map foundMetadataBlobContainers = encryptionMetadataBlobContainer.children(); for (Map.Entry metadataBlobContainer : foundMetadataBlobContainers.entrySet()) { if (false == foundEncryptedBlobContainers.contains(metadataBlobContainer.getKey())) { try { From 677bbb7409035be8953d0b7cdf68572a82549b64 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 15 Jan 2020 14:29:36 +0200 Subject: [PATCH 081/142] Verify check --- .../settings/ConsistentSettingsService.java | 2 +- .../encrypted/EncryptedRepository.java | 39 ++++++++++++------- 2 files changed, 26 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java index 2c0ec773c145f..88f61d7fc2a1e 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java @@ -189,7 +189,7 @@ private Map getPublishedHashesOfConsistentSettings() { ClusterState state = clusterService.state(); if (state.metaData() == null || state.metaData().hashesOfConsistentSettings() == null) { throw new IllegalStateException("Hashes of consistent secure settings are not yet published by the master node. Cannot " + - "check consistency at this time"); + "check consistency at this time."); } return state.metaData().hashesOfConsistentSettings(); } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 3357ed0d643a8..664ddaffc65db 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexCommit; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; @@ -31,6 +32,7 @@ import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoryCleanupResult; import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotId; @@ -121,20 +123,31 @@ public void restoreShard(Store store, SnapshotId snapshotId, IndexId indexId, Sh passwordSettingForThisRepo.getKey() + "] is different from the master's")))); } + @Override + public void verify(String seed, DiscoveryNode localNode) { + if (consistentSettingsService.isConsistent(passwordSettingForThisRepo)) { + super.verify(seed, localNode); + } else { + // the repository has a different password on the local node compared to the master node + throw new RepositoryVerificationException(metadata.name(), "Repository password mismatch. The local node's [" + localNode + + "] value of the keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] is different from the master's"); + } + } + @Override public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListener listener) { super.cleanup(repositoryStateId, writeShardGens, ActionListener.wrap(repositoryCleanupResult -> { - EncryptedBlobContainerDecorator encryptedBlobContainer = (EncryptedBlobContainerDecorator) blobContainer(); + EncryptedBlobContainer encryptedBlobContainer = (EncryptedBlobContainer) blobContainer(); cleanUpOrphanedMetadataRecursively(encryptedBlobContainer); listener.onResponse(repositoryCleanupResult); }, listener::onFailure)); } - private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainerDecorator encryptedBlobContainer) throws IOException{ + private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainer encryptedBlobContainer) throws IOException{ encryptedBlobContainer.cleanUpOrphanedMetadata(); for (BlobContainer childEncryptedBlobContainer : encryptedBlobContainer.children().values()) { try { - cleanUpOrphanedMetadataRecursively((EncryptedBlobContainerDecorator) childEncryptedBlobContainer); + cleanUpOrphanedMetadataRecursively((EncryptedBlobContainer) childEncryptedBlobContainer); } catch(IOException e) { logger.warn("Exception while cleaning up [" + childEncryptedBlobContainer.path() + "]", e); } @@ -143,7 +156,7 @@ private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainerDecorator @Override protected BlobStore createBlobStore() { - return new EncryptedBlobStoreDecorator(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryptor, + return new EncryptedBlobStore(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryptor, secureRandom); } @@ -165,15 +178,15 @@ protected void doClose() { this.delegatedRepository.close(); } - private static class EncryptedBlobStoreDecorator implements BlobStore { + private static class EncryptedBlobStore implements BlobStore { private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryptor metadataEncryptor; private final SecureRandom secureRandom; - EncryptedBlobStoreDecorator(BlobStore delegatedBlobStore, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryptor metadataEncryptor, SecureRandom secureRandom) { + EncryptedBlobStore(BlobStore delegatedBlobStore, KeyGenerator dataEncryptionKeyGenerator, + PasswordBasedEncryptor metadataEncryptor, SecureRandom secureRandom) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryptor = metadataEncryptor; @@ -187,12 +200,11 @@ public void close() throws IOException { @Override public BlobContainer blobContainer(BlobPath path) { - return new EncryptedBlobContainerDecorator(delegatedBlobStore, path, dataEncryptionKeyGenerator, metadataEncryptor, - secureRandom); + return new EncryptedBlobContainer(delegatedBlobStore, path, dataEncryptionKeyGenerator, metadataEncryptor, secureRandom); } } - private static class EncryptedBlobContainerDecorator implements BlobContainer { + private static class EncryptedBlobContainer implements BlobContainer { private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; @@ -201,9 +213,8 @@ private static class EncryptedBlobContainerDecorator implements BlobContainer { private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; - EncryptedBlobContainerDecorator(BlobStore delegatedBlobStore, BlobPath path, - KeyGenerator dataEncryptionKeyGenerator, PasswordBasedEncryptor metadataEncryptor, - SecureRandom nonceGenerator) { + EncryptedBlobContainer(BlobStore delegatedBlobStore, BlobPath path, KeyGenerator dataEncryptionKeyGenerator, + PasswordBasedEncryptor metadataEncryptor, SecureRandom nonceGenerator) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryptor = metadataEncryptor; @@ -309,7 +320,7 @@ public Map children() throws IOException { Map result = new HashMap<>(childEncryptedBlobContainers.size()); for (Map.Entry encryptedBlobContainer : childEncryptedBlobContainers.entrySet()) { // get an encrypted blob container for each - result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainerDecorator(delegatedBlobStore, + result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainer(delegatedBlobStore, encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryptor, nonceGenerator)); } return result; From d2133e0a0ce89aadb4c6540b2c340a00584048f5 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 20 Jan 2020 12:13:28 +0200 Subject: [PATCH 082/142] Err msg nits --- .../repositories/encrypted/EncryptedRepository.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 664ddaffc65db..900c355eb57a3 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -97,7 +97,7 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, listener); } else { listener.onFailure(new RepositoryException(metadata.name(), - "Password mismatch for repository [" + metadata.name() + "]. The local node's value of the " + + "Password mismatch for repository. The local node's value of the " + "keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] is different from the master's")); } } else { @@ -118,8 +118,8 @@ public void restoreShard(Store store, SnapshotId snapshotId, IndexId indexId, Sh "keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] is different from the master's"); } super.restoreShard(store, snapshotId, indexId, snapshotShardId, recoveryState, ActionListener.delegateResponse(listener, - (l, e) -> l.onFailure(new RepositoryException(metadata.name(), "Password mismatch for repository [" + metadata.name() + - "]. The local node's value of the keystore secure setting [" + + (l, e) -> l.onFailure(new RepositoryException(metadata.name(), "Password mismatch for repository. " + + "The local node's value of the keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] is different from the master's")))); } From a26b2b9ecd483187d57a180f45d359af1edcc0e9 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 20 Jan 2020 16:05:55 +0200 Subject: [PATCH 083/142] Revert SnapshotsService --- .../java/org/elasticsearch/snapshots/SnapshotsService.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 44f5fcc2ea85a..45967235e8086 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -269,6 +269,7 @@ public void createSnapshot(final CreateSnapshotRequest request, final ActionList validate(repositoryName, snapshotName); final SnapshotId snapshotId = new SnapshotId(snapshotName, UUIDs.randomBase64UUID()); // new UUID for the snapshot final StepListener repositoryDataListener = new StepListener<>(); + repositoriesService.repository(repositoryName).getRepositoryData(repositoryDataListener); repositoryDataListener.whenComplete(repositoryData -> { final boolean hasOldFormatSnapshots = hasOldVersionSnapshots(repositoryName, repositoryData, null); clusterService.submitStateUpdateTask("create_snapshot [" + snapshotName + ']', new ClusterStateUpdateTask() { @@ -351,11 +352,6 @@ public TimeValue timeout() { } }); }, listener::onFailure); - try { - repositoriesService.repository(repositoryName).getRepositoryData(repositoryDataListener); - } catch(Exception e) { - repositoryDataListener.onFailure(e); - } } public boolean hasOldVersionSnapshots(String repositoryName, RepositoryData repositoryData, @Nullable SnapshotId excluded) { From 948b3c63c4ac6ee166306ced6d90f24932dcf12d Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 20 Jan 2020 23:00:24 +0200 Subject: [PATCH 084/142] PasswordBasedEncryptor javadoc --- .../encrypted/PasswordBasedEncryptor.java | 30 +++++++++++++------ 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java index 8b6f8c8b1e201..7710b0d8507e4 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptor.java @@ -35,15 +35,27 @@ import java.util.concurrent.atomic.AtomicReference; /** - * Encrypts and decrypts data using a password. - * Encryption generates the AES secret key from the password and a randomly generated salt (using the PBKDF2 algo). - * This key is then used to encrypt the data (AES/GCM/NoPadding). The encryption IV is generated randomly. - * The key is cached internally and used for at most {@link #ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY} encryption invocations. - * When this limit is exceeded, a new key is generated, by generating a new random salt. The encryption key is never - * stored on disk. The salt, however, which was used in generating the encryption key, is prepended to the ciphertext. - * Decryption extracts the salt prepended to the ciphertext, computes the key (using the secret password) and uses - * the key to decrypt the ciphertext (which also contains the IV). Decryption also does not store the generated key - * on disk, but caches it in memory because generating the key from the password is computationally expensive on purpose. + * Encrypts and decrypts using a password. Decryption authenticates the cyphertext so as to make sure that + * the same password has been used during encryption (the cipher mode is AES/GCM/NoPadding). The caller must + * ensure that the password and the ciphertext are not stored on the same "medium" (storage partition). + *

+ * The {@code password} constructor argument is used to generate AES 256-bit wide keys using the PBKDF2 algorithm. + * The "salt", which is the other required parameter to the PBKDF2 algo, is generated randomly (32 byte-wide) using a + * {@code SecureRandom} instance. The "salt" is not a secret, like the password is, and it is used to generate different + * keys starting from the same password. + *

+ * A new encryption key is generated for every {@link PasswordBasedEncryptor} instance (using a newly generated random + * "salt"). The key is then reused for as many as {@link #ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY} encryption invocations; + * when the limit is exceeded, a new key is computed from a newly generated "salt". In order to support the decryption + * operation, the "salt" is prepended to the returned ciphertext. Decryption reads-in the "salt" and uses the secret + * password to regenerate the same key and decrypt and authenticate the ciphertext. The key thus computed is locally + * cached for possible reuses because generating the key from the password is an expensive operation (by design). + *

+ * The reason why there is an encryption invocation limit for the same key is because the AES/GCM/NoPadding encryption mode + * must not be used with the same key and the same Initialization Vector. During encryption, the {@link PasswordBasedEncryptor} + * randomly generates a new 12-byte wide IV, and so in order to limit the risk of a collision, the key must be changed + * after at most {@link #ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY} IVs have been generated and used with that same key. For more + * details, see Section 8.2 of https://csrc.nist.gov/publications/detail/sp/800-38d/final . */ public final class PasswordBasedEncryptor { From 83c0e444be7cbb0dbee39fad90912ab26894e2c0 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 22 Jan 2020 09:42:03 +0200 Subject: [PATCH 085/142] Revert Consistent Settings check changes --- .../settings/ConsistentSettingsService.java | 151 +++++++----------- .../ConsistentSettingsServiceTests.java | 10 +- 2 files changed, 66 insertions(+), 95 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java index 88f61d7fc2a1e..411a470238638 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java @@ -38,7 +38,9 @@ import java.util.Base64; import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; @@ -49,7 +51,7 @@ /** * Used to publish secure setting hashes in the cluster state and to validate those hashes against the local values of those same settings. * This is colloquially referred to as the secure setting consistency check. It will publish and verify hashes only for the collection - * of settings passed in the constructor. The settings have to have the {@link Setting.Property#Consistent} property. + * of settings passed in the constructor. The settings have to have the {@link Setting.Property#Consistent} property. */ public final class ConsistentSettingsService { private static final Logger logger = LogManager.getLogger(ConsistentSettingsService.class); @@ -57,7 +59,6 @@ public final class ConsistentSettingsService { private final Settings settings; private final ClusterService clusterService; private final Collection> secureSettingsCollection; - private final Map digestsBySettingKey; private final SecretKeyFactory pbkdf2KeyFactory; public ConsistentSettingsService(Settings settings, ClusterService clusterService, @@ -65,8 +66,6 @@ public ConsistentSettingsService(Settings settings, ClusterService clusterServic this.settings = settings; this.clusterService = clusterService; this.secureSettingsCollection = secureSettingsCollection; - // eagerly compute digests because the keystore could be closed at a later time - this.digestsBySettingKey = computeDigestOfConsistentSecureSettings(); // this is used to compute the PBKDF2 hash (the published one) try { this.pbkdf2KeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA512"); @@ -80,118 +79,80 @@ public ConsistentSettingsService(Settings settings, ClusterService clusterServic * published by the master node only. Note that this is not designed for {@link SecureSettings} implementations that are mutable. */ public LocalNodeMasterListener newHashPublisher() { - // eagerly compute salted hashes to be published - final Map hashesBySettingKey = new HashMap<>(); - for (Map.Entry entry : this.digestsBySettingKey.entrySet()) { - String salt = UUIDs.randomBase64UUID(); - byte[] publicHash = computeSaltedPBKDF2Hash(entry.getValue(), salt.getBytes(StandardCharsets.UTF_8)); - String encodedPublicHash = new String(Base64.getEncoder().encode(publicHash), StandardCharsets.UTF_8); - hashesBySettingKey.put(entry.getKey(), salt + ":" + encodedPublicHash); - } - return new HashesPublisher(hashesBySettingKey, clusterService); + // eagerly compute hashes to be published + final Map computedHashesOfConsistentSettings = computeHashesOfConsistentSecureSettings(); + return new HashesPublisher(computedHashesOfConsistentSettings, clusterService); } /** * Verifies that the hashes of consistent secure settings in the latest {@code ClusterState} verify for the values of those same * settings on the local node. The settings to be checked are passed in the constructor. Also, validates that a missing local - * value is also missing in the published set, and vice-versa. + * value is also missing in the published set, and vice-versa. */ public boolean areAllConsistent() { - ClusterState state = clusterService.state(); - Map publishedHashesOfConsistentSettings = getPublishedHashesOfConsistentSettings(); - AtomicBoolean allConsistent = new AtomicBoolean(true); - for (String localSettingName : digestsBySettingKey.keySet()) { - if (false == publishedHashesOfConsistentSettings.containsKey(localSettingName)) { + final ClusterState state = clusterService.state(); + final Map publishedHashesOfConsistentSettings = state.metaData().hashesOfConsistentSettings(); + final Set publishedSettingKeysToVerify = new HashSet<>(); + publishedSettingKeysToVerify.addAll(publishedHashesOfConsistentSettings.keySet()); + final AtomicBoolean allConsistent = new AtomicBoolean(true); + forEachConcreteSecureSettingDo(concreteSecureSetting -> { + final String publishedSaltAndHash = publishedHashesOfConsistentSettings.get(concreteSecureSetting.getKey()); + final byte[] localHash = concreteSecureSetting.getSecretDigest(settings); + if (publishedSaltAndHash == null && localHash == null) { + // consistency of missing + logger.debug("no published hash for the consistent secure setting [{}] but it also does NOT exist on the local node", + concreteSecureSetting.getKey()); + } else if (publishedSaltAndHash == null && localHash != null) { // setting missing on master but present locally - logger.warn("no published hash for the consistent secure setting [{}] but it exists on the local node", localSettingName); + logger.warn("no published hash for the consistent secure setting [{}] but it exists on the local node", + concreteSecureSetting.getKey()); if (state.nodes().isLocalNodeElectedMaster()) { throw new IllegalStateException("Master node cannot validate consistent setting. No published hash for [" - + localSettingName + "] but setting exists."); + + concreteSecureSetting.getKey() + "] but setting exists."); } allConsistent.set(false); - } - } - for (String publishedSettingName : publishedHashesOfConsistentSettings.keySet()) { - boolean publishedMatches = false; - for (Setting secureSetting : secureSettingsCollection) { - if (secureSetting.match(publishedSettingName)) { - publishedMatches = true; - break; - } - } - if (publishedMatches && false == digestsBySettingKey.containsKey(publishedSettingName)) { + } else if (publishedSaltAndHash != null && localHash == null) { + // setting missing locally but present on master logger.warn("the consistent secure setting [{}] does not exist on the local node but there is a published hash for it", - publishedSettingName); + concreteSecureSetting.getKey()); allConsistent.set(false); - } - } - for (Map.Entry publishedSaltAndHashForSetting : publishedHashesOfConsistentSettings.entrySet()) { - String settingName = publishedSaltAndHashForSetting.getKey(); - String publishedSaltAndHash = publishedSaltAndHashForSetting.getValue(); - if (digestsBySettingKey.containsKey(settingName)) { - String[] parts = publishedSaltAndHash.split(":"); + } else { + assert publishedSaltAndHash != null; + assert localHash != null; + final String[] parts = publishedSaltAndHash.split(":"); if (parts == null || parts.length != 2) { throw new IllegalArgumentException("published hash [" + publishedSaltAndHash + " ] for secure setting [" - + settingName + "] is invalid"); + + concreteSecureSetting.getKey() + "] is invalid"); } - String publishedSalt = parts[0]; - String publishedHash = parts[1]; - byte[] localDigest = digestsBySettingKey.get(settingName); - byte[] computedSaltedHashBytes = computeSaltedPBKDF2Hash(localDigest, publishedSalt.getBytes(StandardCharsets.UTF_8)); + final String publishedSalt = parts[0]; + final String publishedHash = parts[1]; + final byte[] computedSaltedHashBytes = computeSaltedPBKDF2Hash(localHash, publishedSalt.getBytes(StandardCharsets.UTF_8)); final String computedSaltedHash = new String(Base64.getEncoder().encode(computedSaltedHashBytes), StandardCharsets.UTF_8); if (false == publishedHash.equals(computedSaltedHash)) { logger.warn("the published hash [{}] of the consistent secure setting [{}] differs from the locally computed one [{}]", - publishedHash, settingName, computedSaltedHash); + publishedHash, concreteSecureSetting.getKey(), computedSaltedHash); if (state.nodes().isLocalNodeElectedMaster()) { throw new IllegalStateException("Master node cannot validate consistent setting. The published hash [" - + publishedHash + "] of the consistent secure setting [" + settingName + + publishedHash + "] of the consistent secure setting [" + concreteSecureSetting.getKey() + "] differs from the locally computed one [" + computedSaltedHash + "]."); } allConsistent.set(false); } } - } - return allConsistent.get(); - } - - public boolean isConsistent(SecureSetting secureSetting) { - for (String localSettingName : digestsBySettingKey.keySet()) { - if (secureSetting.match(localSettingName)) { - Map publishedHashesOfConsistentSettings = getPublishedHashesOfConsistentSettings(); - if (false == publishedHashesOfConsistentSettings.containsKey(localSettingName)) { - logger.warn("no published hash for the consistent secure setting [{}] but it exists on the local node", - localSettingName); - return false; - } - String publishedSaltAndHash = publishedHashesOfConsistentSettings.get(localSettingName); - String[] parts = publishedSaltAndHash.split(":"); - if (parts == null || parts.length != 2) { - throw new IllegalArgumentException("published hash [" + publishedSaltAndHash + " ] for secure setting [" - + localSettingName + "] is invalid"); - } - String publishedSalt = parts[0]; - String publishedHash = parts[1]; - byte[] localDigest = digestsBySettingKey.get(localSettingName); - byte[] computedSaltedHashBytes = computeSaltedPBKDF2Hash(localDigest, publishedSalt.getBytes(StandardCharsets.UTF_8)); - final String computedSaltedHash = new String(Base64.getEncoder().encode(computedSaltedHashBytes), StandardCharsets.UTF_8); - if (false == publishedHash.equals(computedSaltedHash)) { - logger.warn("the published hash [{}] of the consistent secure setting [{}] differs from the locally computed one [{}]", - publishedHash, localSettingName, computedSaltedHash); - return false; + publishedSettingKeysToVerify.remove(concreteSecureSetting.getKey()); + }); + // another case of settings missing locally, when group settings have not expanded to all the keys published + for (String publishedSettingKey : publishedSettingKeysToVerify) { + for (Setting setting : secureSettingsCollection) { + if (setting.match(publishedSettingKey)) { + // setting missing locally but present on master + logger.warn("the consistent secure setting [{}] does not exist on the local node but there is a published hash for it", + publishedSettingKey); + allConsistent.set(false); } - return true; } } - throw new IllegalArgumentException("Invalid setting [" + secureSetting.getKey() + "] for consistency check."); - } - - private Map getPublishedHashesOfConsistentSettings() { - ClusterState state = clusterService.state(); - if (state.metaData() == null || state.metaData().hashesOfConsistentSettings() == null) { - throw new IllegalStateException("Hashes of consistent secure settings are not yet published by the master node. Cannot " + - "check consistency at this time."); - } - return state.metaData().hashesOfConsistentSettings(); + return allConsistent.get(); } /** @@ -214,15 +175,18 @@ private void forEachConcreteSecureSettingDo(Consumer> secureSet } } - private Map computeDigestOfConsistentSecureSettings() { - Map digestsBySettingKey = new HashMap<>(); + private Map computeHashesOfConsistentSecureSettings() { + final Map hashesBySettingKey = new HashMap<>(); forEachConcreteSecureSettingDo(concreteSecureSetting -> { - byte[] localDigest = concreteSecureSetting.getSecretDigest(settings); - if (localDigest != null) { - digestsBySettingKey.put(concreteSecureSetting.getKey(), localDigest); + final byte[] localHash = concreteSecureSetting.getSecretDigest(settings); + if (localHash != null) { + final String salt = UUIDs.randomBase64UUID(); + final byte[] publicHash = computeSaltedPBKDF2Hash(localHash, salt.getBytes(StandardCharsets.UTF_8)); + final String encodedPublicHash = new String(Base64.getEncoder().encode(publicHash), StandardCharsets.UTF_8); + hashesBySettingKey.put(concreteSecureSetting.getKey(), salt + ":" + encodedPublicHash); } }); - return digestsBySettingKey; + return hashesBySettingKey; } private byte[] computeSaltedPBKDF2Hash(byte[] bytes, byte[] salt) { @@ -245,11 +209,12 @@ private byte[] computeSaltedPBKDF2Hash(byte[] bytes, byte[] salt) { static final class HashesPublisher implements LocalNodeMasterListener { + // eagerly compute hashes to be published final Map computedHashesOfConsistentSettings; final ClusterService clusterService; HashesPublisher(Map computedHashesOfConsistentSettings, ClusterService clusterService) { - this.computedHashesOfConsistentSettings = computedHashesOfConsistentSettings; + this.computedHashesOfConsistentSettings = Map.copyOf(computedHashesOfConsistentSettings); this.clusterService = clusterService; } diff --git a/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java index c55038aa1ecc2..687b74e3397cb 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java @@ -65,12 +65,15 @@ public void testSingleStringSetting() throws Exception { assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(false)); // publish new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).newHashPublisher().onMaster(); - assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(true)); + ConsistentSettingsService consistentService = new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)); + assertThat(consistentService.areAllConsistent(), is(true)); // change value secureSettings.setString(stringSetting.getKey(), "_TYPO_somethingsecure"); + assertThat(consistentService.areAllConsistent(), is(false)); assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(false)); // publish change new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).newHashPublisher().onMaster(); + assertThat(consistentService.areAllConsistent(), is(true)); assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(true)); } @@ -89,12 +92,15 @@ public void testSingleAffixSetting() throws Exception { assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(false)); // publish new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).newHashPublisher().onMaster(); - assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(true)); + ConsistentSettingsService consistentService = new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)); + assertThat(consistentService.areAllConsistent(), is(true)); // change value secureSettings.setString("test.affix.second.bar", "_TYPO_second_secure"); + assertThat(consistentService.areAllConsistent(), is(false)); assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(false)); // publish change new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).newHashPublisher().onMaster(); + assertThat(consistentService.areAllConsistent(), is(true)); assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(true)); // add value secureSettings.setString("test.affix.third.bar", "third_secure"); From 0a14765d5fe0c62078be502d342bcf064219116a Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 29 Jan 2020 11:20:05 +0200 Subject: [PATCH 086/142] Rename xpack license test method to reflect that only encr snapshot are license conditioned --- .../license/XPackLicenseState.java | 24 +++++++------------ .../encrypted/EncryptedRepository.java | 2 +- .../encrypted/EncryptedRepositoryPlugin.java | 4 ++-- 3 files changed, 12 insertions(+), 18 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 59e5fcadd6ce4..fd68e11fc9e0a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -528,22 +528,12 @@ public synchronized boolean isWatcherAllowed() { } /** - * Determine if creating and using an encrypted repository is available based on the current license. + * Determines if creating an encrypted snapshot is allowed. Note that restoring an encrypted snapshot is not conditioned upon the + * license operation mode (it's free for all). */ - public synchronized boolean isEncryptedRepositoryAllowed() { - Status localStatus = status; - - if (localStatus.active == false) { - return false; - } - - switch (localStatus.mode) { - case TRIAL: - case PLATINUM: - return true; - default: - return false; - } + public synchronized boolean isEncryptedSnapshotAllowed() { + final Status currentStatus = status; + return currentStatus.active && isEncryptedSnapshotAllowedForOperationMode(currentStatus.mode); } /** @@ -885,6 +875,10 @@ public static boolean isCcrAllowedForOperationMode(final OperationMode operation return isPlatinumOrTrialOperationMode(operationMode); } + public static boolean isEncryptedSnapshotAllowedForOperationMode(final OperationMode operationMode) { + return isPlatinumOrTrialOperationMode(operationMode); + } + public static boolean isPlatinumOrTrialOperationMode(final OperationMode operationMode) { return operationMode == OperationMode.PLATINUM || operationMode == OperationMode.TRIAL; } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 900c355eb57a3..3999bdac41f9c 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -92,7 +92,7 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, boolean writeShardGens, ActionListener listener) { - if (EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { + if (EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed()) { if (consistentSettingsService.isConsistent(passwordSettingForThisRepo)) { super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, listener); } else { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 27d05501d5ff6..816f185addaf2 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -47,7 +47,7 @@ public final class EncryptedRepositoryPlugin extends Plugin implements Repositor protected static XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } public EncryptedRepositoryPlugin(Settings settings) { - if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { + if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed()) { logger.warn("Encrypted snapshot repositories are not allowed for the current license." + "Snapshotting to any encrypted repository is not permitted and will fail.", LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); @@ -81,7 +81,7 @@ public Repository create(RepositoryMetaData metadata) { @Override public Repository create(RepositoryMetaData metaData, Function typeLookup) throws Exception { - if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedRepositoryAllowed()) { + if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed()) { logger.warn("Encrypted snapshot repositories are not allowed for the current license." + "Snapshots to the [" + metaData.name() + "] encrypted repository are not permitted and will fail.", LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); From 6d562fc64e81b7b643821c3e5f3c2d090b641d8e Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 29 Jan 2020 11:50:24 +0200 Subject: [PATCH 087/142] Nits --- .../common/blobstore/BlobPathTests.java | 12 ++++++++++++ .../license/XPackLicenseStateTests.java | 16 ++++++++++++++++ x-pack/plugin/repository-encrypted/build.gradle | 1 + 3 files changed, 29 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java b/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java index ec846ca30690e..cfd505ba95466 100644 --- a/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java +++ b/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java @@ -38,4 +38,16 @@ public void testBuildAsString() { path = path.add("d/"); assertThat(path.buildAsString(), is("a/b/c/d/")); } + + public void testPrepend() { + String pathComponent = randomAlphaOfLengthBetween(1, 3); + assertThat(new BlobPath().prepend(pathComponent).buildAsString(), is(new BlobPath().add(pathComponent).buildAsString())); + + BlobPath path = new BlobPath(); + path = path.add(pathComponent); + + String prependComponent = randomAlphaOfLengthBetween(1, 3); + assertThat(path.prepend(prependComponent).buildAsString(), + is(new BlobPath().add(prependComponent).add(pathComponent).buildAsString())); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index 583de499b42f9..d4201af15c28a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -350,6 +350,22 @@ public void testWatcherInactivePlatinumGoldTrial() throws Exception { assertAllowed(STANDARD, false, XPackLicenseState::isWatcherAllowed, false); } + public void testEncryptedSnapshotsWithInactiveLicense() { + assertAllowed(BASIC, false, XPackLicenseState::isEncryptedSnapshotAllowed, false); + assertAllowed(TRIAL, false, XPackLicenseState::isEncryptedSnapshotAllowed, false); + assertAllowed(GOLD, false, XPackLicenseState::isEncryptedSnapshotAllowed, false); + assertAllowed(PLATINUM, false, XPackLicenseState::isEncryptedSnapshotAllowed, false); + assertAllowed(STANDARD, false, XPackLicenseState::isEncryptedSnapshotAllowed, false); + } + + public void testEncryptedSnapshotsWithActiveLicense() { + assertAllowed(BASIC, true, XPackLicenseState::isEncryptedSnapshotAllowed, false); + assertAllowed(TRIAL, true, XPackLicenseState::isEncryptedSnapshotAllowed, true); + assertAllowed(GOLD, true, XPackLicenseState::isEncryptedSnapshotAllowed, false); + assertAllowed(PLATINUM, true, XPackLicenseState::isEncryptedSnapshotAllowed, true); + assertAllowed(STANDARD, true, XPackLicenseState::isEncryptedSnapshotAllowed, false); + } + public void testGraphPlatinumTrial() throws Exception { assertAllowed(TRIAL, true, XPackLicenseState::isGraphAllowed, true); assertAllowed(PLATINUM, true, XPackLicenseState::isGraphAllowed, true); diff --git a/x-pack/plugin/repository-encrypted/build.gradle b/x-pack/plugin/repository-encrypted/build.gradle index 0a5ccb046854c..4f57de1efe44e 100644 --- a/x-pack/plugin/repository-encrypted/build.gradle +++ b/x-pack/plugin/repository-encrypted/build.gradle @@ -9,6 +9,7 @@ esplugin { } dependencies { + // necessary for the license check compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } From bceda5587517e0584b84b0458ef5632bb11b08f5 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 30 Jan 2020 18:44:42 +0200 Subject: [PATCH 088/142] Authenticated metadata --- .../license/XPackLicenseState.java | 2 +- .../encrypted/BlobEncryptionMetadata.java | 99 ++++++++++++------- .../encrypted/EncryptedRepository.java | 59 ++++++----- .../encrypted/EncryptedRepositoryPlugin.java | 2 +- ...ptor.java => PasswordBasedEncryption.java} | 33 ++++--- ...java => PasswordBasedEncryptionTests.java} | 4 +- 6 files changed, 120 insertions(+), 79 deletions(-) rename x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/{PasswordBasedEncryptor.java => PasswordBasedEncryption.java} (92%) rename x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/{PasswordBasedEncryptorTests.java => PasswordBasedEncryptionTests.java} (80%) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index fd68e11fc9e0a..2bfa9a5ff974d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -529,7 +529,7 @@ public synchronized boolean isWatcherAllowed() { /** * Determines if creating an encrypted snapshot is allowed. Note that restoring an encrypted snapshot is not conditioned upon the - * license operation mode (it's free for all). + * license operation mode (i.e. it's free for all). */ public synchronized boolean isEncryptedSnapshotAllowed() { final Status currentStatus = status; diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java index 33a53ed0a240a..92197da0dcc01 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java @@ -7,59 +7,64 @@ package org.elasticsearch.repositories.encrypted; import org.elasticsearch.Version; +import org.elasticsearch.common.CheckedBiFunction; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import javax.crypto.SecretKey; +import javax.crypto.spec.SecretKeySpec; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.util.Arrays; import java.util.Objects; +import java.util.function.BiFunction; +import java.util.function.Function; -public final class BlobEncryptionMetadata implements Writeable { - private final byte[] dataEncryptionKeyMaterial; +/** + * Holds the necessary, and sufficient, metadata required to decrypt the associated encrypted blob. + * The data encryption key (DEK {@link #dataEncryptionKey}) is the most important part of the metadata; + * it must be kept secret (i.e. MUST be stored encrypted). + * The metadata does not hold an explicit link to the associated encrypted blob. It's the responsibility of the creator + * ({@link EncryptedRepository}) to maintain this association. + */ +public final class BlobEncryptionMetadata { + + // this is part of the Initialization Vectors of the encrypted data blobs + // although the IVs of the encrypted data blobs are stored in plain in the ciphertext, + // storing it in the metadata as well, is a simpler way to verify the association without + // attempting the decryption (without using this software even, because the {@link #nonce} is the + // first 4-byte integer (little endian) of both the metadata and the associated encrypted blob) private final int nonce; + // the packet length from {@link EncryptionPacketsInputStream} private final int packetLengthInBytes; + // the key used to encrypt and decrypt the associated blob + private final SecretKey dataEncryptionKey; - public BlobEncryptionMetadata(byte[] dataEncryptionKeyMaterial, int nonce, int packetLengthInBytes) { - this.dataEncryptionKeyMaterial = Objects.requireNonNull(dataEncryptionKeyMaterial); + public BlobEncryptionMetadata(int nonce, int packetLengthInBytes, SecretKey dataEncryptionKey) { this.nonce = nonce; this.packetLengthInBytes = packetLengthInBytes; - } - - public byte[] getDataEncryptionKeyMaterial() { - return dataEncryptionKeyMaterial; - } - - public int getPacketLengthInBytes() { - return packetLengthInBytes; + this.dataEncryptionKey = Objects.requireNonNull(dataEncryptionKey); } public int getNonce() { return nonce; } - public BlobEncryptionMetadata(InputStream inputStream) throws IOException { - try (StreamInput in = new InputStreamStreamInput(inputStream)) { - final Version version = Version.readVersion(in); - in.setVersion(version); - this.dataEncryptionKeyMaterial = in.readByteArray(); - this.nonce = in.readInt(); - this.packetLengthInBytes = in.readInt(); - } + public int getPacketLengthInBytes() { + return packetLengthInBytes; } - @Override - public void writeTo(StreamOutput out) throws IOException { - Version.writeVersion(Version.CURRENT, out); - out.writeByteArray(this.dataEncryptionKeyMaterial); - out.writeInt(this.nonce); - out.writeInt(this.packetLengthInBytes); + public SecretKey getDataEncryptionKey() { + return dataEncryptionKey; } @Override @@ -69,28 +74,48 @@ public boolean equals(Object o) { BlobEncryptionMetadata metadata = (BlobEncryptionMetadata) o; return nonce == metadata.nonce && packetLengthInBytes == metadata.packetLengthInBytes && - Arrays.equals(dataEncryptionKeyMaterial, metadata.dataEncryptionKeyMaterial); + Objects.equals(dataEncryptionKey, metadata.dataEncryptionKey); } @Override public int hashCode() { int result = Objects.hash(nonce, packetLengthInBytes); - result = 31 * result + Arrays.hashCode(dataEncryptionKeyMaterial); + result = 31 * result + Objects.hashCode(dataEncryptionKey); return result; } - static byte[] serializeMetadataToByteArray(BlobEncryptionMetadata metadata) throws IOException { - try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { - try (StreamOutput out = new OutputStreamStreamOutput(baos)) { - metadata.writeTo(out); - } - return baos.toByteArray(); + static byte[] serializeMetadata(BlobEncryptionMetadata metadata, CheckedBiFunction encryptor) + throws IOException { + ByteBuffer byteBuffer = ByteBuffer.allocate(2 * Integer.BYTES).order(ByteOrder.LITTLE_ENDIAN); + byteBuffer.putInt(0, metadata.getNonce()); + byteBuffer.putInt(Integer.BYTES, metadata.getPacketLengthInBytes()); + byte[] authenticatedData = byteBuffer.array(); + final byte[] encryptedData; + try { + encryptedData = encryptor.apply(metadata.getDataEncryptionKey().getEncoded(), authenticatedData); + } catch (Exception e) { + throw new IOException("Failure to encrypt metadata", e); } + byte[] result = new byte[authenticatedData.length + encryptedData.length]; + System.arraycopy(authenticatedData, 0, result, 0, authenticatedData.length); + System.arraycopy(encryptedData, 0, result, authenticatedData.length, encryptedData.length); + return result; } - static BlobEncryptionMetadata deserializeMetadataFromByteArray(byte[] metadata) throws IOException { - try (ByteArrayInputStream decryptedMetadataInputStream = new ByteArrayInputStream(metadata)) { - return new BlobEncryptionMetadata(decryptedMetadataInputStream); + static BlobEncryptionMetadata deserializeMetadata(byte[] metadata, CheckedBiFunction decryptor) + throws IOException { + byte[] authenticatedData = Arrays.copyOf(metadata, 2 * Integer.BYTES); + ByteBuffer byteBuffer = ByteBuffer.wrap(authenticatedData).order(ByteOrder.LITTLE_ENDIAN); + int nonce = byteBuffer.get(0); + int packetLengthInBytes = byteBuffer.get(Integer.BYTES); + byte[] encryptedData = Arrays.copyOfRange(metadata, 2 * Integer.BYTES, metadata.length); + final byte[] decryptedData; + try { + decryptedData = decryptor.apply(encryptedData, authenticatedData); + } catch (Exception e) { + throw new IOException("Failure to decrypt metadata", e); } + SecretKey dataDecryptionKey = new SecretKeySpec(decryptedData, 0, decryptedData.length, "AES"); + return new BlobEncryptionMetadata(nonce, packetLengthInBytes, dataDecryptionKey); } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 3999bdac41f9c..6390581e6ac0e 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -54,28 +54,42 @@ import java.util.concurrent.ExecutionException; public final class EncryptedRepository extends BlobStoreRepository { - static final Logger logger = LogManager.getLogger(EncryptedRepository.class); + + // the following constants are fixed by definition static final int GCM_TAG_LENGTH_IN_BYTES = 16; static final int GCM_IV_LENGTH_IN_BYTES = 12; static final int AES_BLOCK_SIZE_IN_BYTES = 128; + // changing the following constants implies breaking compatibility with previous versions + // in this case the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER} MUST be incremented static final String DATA_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; static final int DATA_KEY_SIZE_IN_BITS = 256; static final long PACKET_START_COUNTER = Long.MIN_VALUE; - static final int MAX_PACKET_LENGTH_IN_BYTES = 1 << 20; // 1MB + static final int MAX_PACKET_LENGTH_IN_BYTES = 8 << 20; // 8MB + // this can be changed freely (can be made a repository parameter) without adjusting + // the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER}, as long as it stays under the value + // of {@link #MAX_PACKET_LENGTH_IN_BYTES} static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB - private static final String ENCRYPTION_METADATA_PREFIX = "encryption-metadata"; + // The encryption scheme version number to which the current implementation conforms to. + // The version number MUST be incremented whenever the format of the metadata, or + // the way the metadata is used for the actual decryption are changed. + // Incrementing the version number signals that previous implementations cannot make sense + // of the new scheme. + private static final int CURRENT_ENCRYPTION_VERSION_NUMBER = 2; // nobody trusts v1 of anything + // the path of the blob container holding the encryption metadata + // this is relative to the root path holding the encrypted blobs (i.e. the repository root path) + private static final String ENCRYPTION_METADATA_ROOT = "encryption-metadata-v" + CURRENT_ENCRYPTION_VERSION_NUMBER; private final BlobStoreRepository delegatedRepository; private final KeyGenerator dataEncryptionKeyGenerator; - private final PasswordBasedEncryptor metadataEncryptor; + private final PasswordBasedEncryption metadataEncryptor; private final ConsistentSettingsService consistentSettingsService; private final SecureRandom secureRandom; private final SecureSetting passwordSettingForThisRepo; protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService, - BlobStoreRepository delegatedRepository, PasswordBasedEncryptor metadataEncryptor, + BlobStoreRepository delegatedRepository, PasswordBasedEncryption metadataEncryptor, ConsistentSettingsService consistentSettingsService) throws NoSuchAlgorithmException { super(metadata, namedXContentRegistry, clusterService, delegatedRepository.basePath()); this.delegatedRepository = delegatedRepository; @@ -182,11 +196,11 @@ private static class EncryptedBlobStore implements BlobStore { private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; - private final PasswordBasedEncryptor metadataEncryptor; + private final PasswordBasedEncryption metadataEncryptor; private final SecureRandom secureRandom; EncryptedBlobStore(BlobStore delegatedBlobStore, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryptor metadataEncryptor, SecureRandom secureRandom) { + PasswordBasedEncryption metadataEncryptor, SecureRandom secureRandom) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryptor = metadataEncryptor; @@ -208,19 +222,19 @@ private static class EncryptedBlobContainer implements BlobContainer { private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; - private final PasswordBasedEncryptor metadataEncryptor; + private final PasswordBasedEncryption metadataEncryption; private final SecureRandom nonceGenerator; private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; EncryptedBlobContainer(BlobStore delegatedBlobStore, BlobPath path, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryptor metadataEncryptor, SecureRandom nonceGenerator) { + PasswordBasedEncryption metadataEncryption, SecureRandom nonceGenerator) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; - this.metadataEncryptor = metadataEncryptor; + this.metadataEncryption = metadataEncryption; this.nonceGenerator = nonceGenerator; this.delegatedBlobContainer = delegatedBlobStore.blobContainer(path); - this.encryptionMetadataBlobContainer = delegatedBlobStore.blobContainer(path.prepend(ENCRYPTION_METADATA_PREFIX)); + this.encryptionMetadataBlobContainer = delegatedBlobStore.blobContainer(path.prepend(ENCRYPTION_METADATA_ROOT)); } @Override @@ -232,22 +246,21 @@ public BlobPath path() { public InputStream readBlob(String blobName) throws IOException { // read metadata BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(blobName)); - final byte[] decryptedMetadata; + final BlobEncryptionMetadata metadata; try { - decryptedMetadata = metadataEncryptor.decrypt(BytesReference.toBytes(encryptedMetadataBytes)); - } catch (ExecutionException | GeneralSecurityException e) { + // decrypt and parse metadata + metadata = BlobEncryptionMetadata.deserializeMetadata(BytesReference.toBytes(encryptedMetadataBytes), + metadataEncryption::decrypt); + } catch (IOException e) { + // friendlier exception message String failureMessage = "Failure to decrypt metadata for blob [" + blobName + "]"; if (e.getCause() instanceof AEADBadTagException) { failureMessage = failureMessage + ". The repository password is probably wrong."; } throw new IOException(failureMessage, e); } - final BlobEncryptionMetadata metadata = BlobEncryptionMetadata.deserializeMetadataFromByteArray(decryptedMetadata); - // decrypt metadata - SecretKey dataDecryptionKey = new SecretKeySpec(metadata.getDataEncryptionKeyMaterial(), 0, - metadata.getDataEncryptionKeyMaterial().length, "AES"); // read and decrypt blob - return new DecryptionPacketsInputStream(delegatedBlobContainer.readBlob(blobName), dataDecryptionKey, + return new DecryptionPacketsInputStream(delegatedBlobContainer.readBlob(blobName), metadata.getDataEncryptionKey(), metadata.getNonce(), metadata.getPacketLengthInBytes()); } @@ -256,12 +269,12 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b SecretKey dataEncryptionKey = dataEncryptionKeyGenerator.generateKey(); int nonce = nonceGenerator.nextInt(); // this is the metadata required to decrypt back the encrypted blob - BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(dataEncryptionKey.getEncoded(), nonce, PACKET_LENGTH_IN_BYTES); + BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(nonce, PACKET_LENGTH_IN_BYTES, dataEncryptionKey); // encrypt metadata final byte[] encryptedMetadata; try { - encryptedMetadata = metadataEncryptor.encrypt(BlobEncryptionMetadata.serializeMetadataToByteArray(metadata)); - } catch (ExecutionException | GeneralSecurityException e) { + encryptedMetadata = BlobEncryptionMetadata.serializeMetadata(metadata, metadataEncryption::encrypt); + } catch (IOException e) { throw new IOException("Failure to encrypt metadata for blob [" + blobName + "]", e); } // first write the encrypted metadata @@ -321,7 +334,7 @@ public Map children() throws IOException { for (Map.Entry encryptedBlobContainer : childEncryptedBlobContainers.entrySet()) { // get an encrypted blob container for each result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainer(delegatedBlobStore, - encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryptor, nonceGenerator)); + encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryption, nonceGenerator)); } return result; } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 816f185addaf2..25a682f8c6e51 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -105,7 +105,7 @@ public Repository create(RepositoryMetaData metaData, Function - * A new encryption key is generated for every {@link PasswordBasedEncryptor} instance (using a newly generated random + * A new encryption key is generated for every {@link PasswordBasedEncryption} instance (using a newly generated random * "salt"). The key is then reused for as many as {@link #ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY} encryption invocations; * when the limit is exceeded, a new key is computed from a newly generated "salt". In order to support the decryption * operation, the "salt" is prepended to the returned ciphertext. Decryption reads-in the "salt" and uses the secret @@ -52,12 +49,12 @@ * cached for possible reuses because generating the key from the password is an expensive operation (by design). *

* The reason why there is an encryption invocation limit for the same key is because the AES/GCM/NoPadding encryption mode - * must not be used with the same key and the same Initialization Vector. During encryption, the {@link PasswordBasedEncryptor} + * must not be used with the same key and the same Initialization Vector. During encryption, the {@link PasswordBasedEncryption} * randomly generates a new 12-byte wide IV, and so in order to limit the risk of a collision, the key must be changed * after at most {@link #ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY} IVs have been generated and used with that same key. For more * details, see Section 8.2 of https://csrc.nist.gov/publications/detail/sp/800-38d/final . */ -public final class PasswordBasedEncryptor { +public final class PasswordBasedEncryption { // the count of keys stored so as to avoid re-computation static final int ENCRYPTION_KEY_CACHE_SIZE = 512; @@ -94,7 +91,7 @@ public final class PasswordBasedEncryptor { // the salt of the secret key which is used for encryption private final AtomicReference> currentEncryptionKeySalt; - public PasswordBasedEncryptor(char[] password, SecureRandom secureRandom) { + public PasswordBasedEncryption(char[] password, SecureRandom secureRandom) { this.password = password; this.secureRandom = secureRandom; this.keyBySaltCache = CacheBuilder.>builder() @@ -108,8 +105,7 @@ public PasswordBasedEncryptor(char[] password, SecureRandom secureRandom) { ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY)); } - public byte[] encrypt(byte[] data) throws NoSuchPaddingException, NoSuchAlgorithmException, BadPaddingException, - IllegalBlockSizeException, ExecutionException, InvalidAlgorithmParameterException, InvalidKeyException { + public byte[] encrypt(byte[] data, @Nullable byte[] associatedData) throws ExecutionException, GeneralSecurityException { Objects.requireNonNull(data); // retrieve the encryption key Tuple saltAndEncryptionKey = useEncryptionKey(); @@ -120,7 +116,11 @@ public byte[] encrypt(byte[] data) throws NoSuchPaddingException, NoSuchAlgorith GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BYTES * Byte.SIZE, iv); Cipher cipher = Cipher.getInstance(CIPHER_ALGO + "/" + CIPHER_MODE + "/" + CIPHER_PADDING); cipher.init(Cipher.ENCRYPT_MODE, saltAndEncryptionKey.v2(), gcmParameterSpec); - // encrypt + // update the cipher with the associated data + if (associatedData != null && associatedData.length > 0) { + cipher.updateAAD(associatedData); + } + // encrypt the data byte[] encryptedData = cipher.doFinal(data); // concatenate key salt, iv and metadata cipher text byte[] resultCiphertext = new byte[saltAndEncryptionKey.v1().length + iv.length + encryptedData.length]; @@ -131,8 +131,7 @@ public byte[] encrypt(byte[] data) throws NoSuchPaddingException, NoSuchAlgorith return resultCiphertext; } - public byte[] decrypt(byte[] encryptedData) throws ExecutionException, NoSuchPaddingException, NoSuchAlgorithmException, - InvalidAlgorithmParameterException, InvalidKeyException, BadPaddingException, IllegalBlockSizeException { + public byte[] decrypt(byte[] encryptedData, @Nullable byte[] associatedData) throws ExecutionException, GeneralSecurityException { if (Objects.requireNonNull(encryptedData).length < SALT_LENGTH_IN_BYTES + IV_LENGTH_IN_BYTES + TAG_LENGTH_IN_BYTES) { throw new IllegalArgumentException("Ciphertext too short"); } @@ -145,7 +144,11 @@ public byte[] decrypt(byte[] encryptedData) throws ExecutionException, NoSuchPad IV_LENGTH_IN_BYTES); Cipher cipher = Cipher.getInstance(CIPHER_ALGO + "/" + CIPHER_MODE + "/" + CIPHER_PADDING); cipher.init(Cipher.DECRYPT_MODE, decryptionKey, gcmParameterSpec); - // decrypt metadata (use cipher) + // update the cipher with the associated data + if (associatedData != null && associatedData.length > 0) { + cipher.updateAAD(associatedData); + } + // decrypt data return cipher.doFinal(encryptedData, SALT_LENGTH_IN_BYTES + IV_LENGTH_IN_BYTES, encryptedData.length - SALT_LENGTH_IN_BYTES - IV_LENGTH_IN_BYTES); } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptionTests.java similarity index 80% rename from x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java rename to x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptionTests.java index 6d614137215c7..70ecfc25727ba 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptorTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryptionTests.java @@ -11,10 +11,10 @@ import java.security.SecureRandom; -public class PasswordBasedEncryptorTests extends ESTestCase { +public class PasswordBasedEncryptionTests extends ESTestCase { public void testEncryptAndDecryptEmpty() throws Exception { - PasswordBasedEncryptor encryptor = new PasswordBasedEncryptor(new char[] {'p', 'a', 's', 's'}, + PasswordBasedEncryption encryptor = new PasswordBasedEncryption(new char[] {'p', 'a', 's', 's'}, SecureRandom.getInstance("SHA1PRNG")); byte[] emptyEncrypted = encryptor.encrypt(new byte[0]); byte[] ans = encryptor.decrypt(emptyEncrypted); From 29fa2aa9e45c07c424a390c0bd7e55cf9c583766 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 31 Jan 2020 08:25:06 +0200 Subject: [PATCH 089/142] Fix all compilation errors --- .../encrypted/EncryptedRepository.java | 216 +++++++++++++----- .../encrypted/EncryptedRepositoryPlugin.java | 6 +- .../PasswordBasedEncryptionTests.java | 4 +- 3 files changed, 161 insertions(+), 65 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 6390581e6ac0e..ec710489f88ff 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -10,15 +10,18 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexCommit; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.ConsistentSettingsService; import org.elasticsearch.common.settings.SecureSetting; @@ -33,25 +36,36 @@ import org.elasticsearch.repositories.RepositoryCleanupResult; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.RepositoryVerificationException; +import org.elasticsearch.repositories.ShardGenerations; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotShardFailure; import javax.crypto.AEADBadTagException; import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; +import javax.crypto.SecretKeyFactory; +import javax.crypto.spec.PBEKeySpec; import javax.crypto.spec.SecretKeySpec; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.security.GeneralSecurityException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; +import java.security.spec.InvalidKeySpecException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicReference; public final class EncryptedRepository extends BlobStoreRepository { static final Logger logger = LogManager.getLogger(EncryptedRepository.class); @@ -70,6 +84,11 @@ public final class EncryptedRepository extends BlobStoreRepository { // the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER}, as long as it stays under the value // of {@link #MAX_PACKET_LENGTH_IN_BYTES} static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB + // each snapshot metadata contains the salted password hash of the master node that started the snapshot + // this hash is then verified on each data node before the actual shard snapshot as well as on the + // master node that finalizes the snapshot (could be a different master node, if a master failover + // has occurred in the mean time) + private static final String PASSWORD_HASH_RESERVED_USER_METADATA_KEY = "__passwordHash"; // The encryption scheme version number to which the current implementation conforms to. // The version number MUST be incremented whenever the format of the metadata, or @@ -81,71 +100,72 @@ public final class EncryptedRepository extends BlobStoreRepository { // this is relative to the root path holding the encrypted blobs (i.e. the repository root path) private static final String ENCRYPTION_METADATA_ROOT = "encryption-metadata-v" + CURRENT_ENCRYPTION_VERSION_NUMBER; + private final BlobStoreRepository delegatedRepository; private final KeyGenerator dataEncryptionKeyGenerator; - private final PasswordBasedEncryption metadataEncryptor; - private final ConsistentSettingsService consistentSettingsService; + private final PasswordBasedEncryption metadataEncryption; private final SecureRandom secureRandom; - private final SecureSetting passwordSettingForThisRepo; + private final String passwordPublicHash; + private final HashVerifier passwordHashVerifier; protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService, - BlobStoreRepository delegatedRepository, PasswordBasedEncryption metadataEncryptor, - ConsistentSettingsService consistentSettingsService) throws NoSuchAlgorithmException { + BlobStoreRepository delegatedRepository, char[] password) throws NoSuchAlgorithmException { super(metadata, namedXContentRegistry, clusterService, delegatedRepository.basePath()); this.delegatedRepository = delegatedRepository; this.dataEncryptionKeyGenerator = KeyGenerator.getInstance(EncryptedRepositoryPlugin.CIPHER_ALGO); this.dataEncryptionKeyGenerator.init(DATA_KEY_SIZE_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); - this.metadataEncryptor = metadataEncryptor; - this.consistentSettingsService = consistentSettingsService; + this.metadataEncryption = new PasswordBasedEncryption(password, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); this.secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); - this.passwordSettingForThisRepo = - (SecureSetting) EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING.getConcreteSettingForNamespace(metadata.name()); + this.passwordPublicHash = computeSaltedPBKDF2Hash(password, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); + this.passwordHashVerifier = new HashVerifier(password); } @Override - public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, - IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, boolean writeShardGens, - ActionListener listener) { - if (EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed()) { - if (consistentSettingsService.isConsistent(passwordSettingForThisRepo)) { - super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, listener); - } else { - listener.onFailure(new RepositoryException(metadata.name(), - "Password mismatch for repository. The local node's value of the " + - "keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] is different from the master's")); - } - } else { - listener.onFailure(LicenseUtils.newComplianceException( - EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); + public Map adaptUserMetadata(Map userMetadata) { + Map snapshotUserMetadata = new HashMap<>(); + snapshotUserMetadata.putAll(userMetadata); + // pin down the hash of the password that is checked for all the operations of this snapshot + snapshotUserMetadata.put(PASSWORD_HASH_RESERVED_USER_METADATA_KEY, this.passwordPublicHash); + return snapshotUserMetadata; + } + + private void validatePasswordHash(Map snapshotUserMetadata, ActionListener listener) { + Object repositoryPasswordHash = snapshotUserMetadata.get(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); + if (repositoryPasswordHash == null || (false == repositoryPasswordHash instanceof String)) { + listener.onFailure(new IllegalStateException("Snapshot metadata does not contain the password hash or is invalid")); + return; + } + if (false == passwordHashVerifier.verify((String) repositoryPasswordHash)) { + listener.onFailure(new RepositoryException(metadata.name(), + "Password mismatch for repository. The local node's value of the keystore secure setting [" + + EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING.getConcreteSettingForNamespace(metadata.name()).getKey() + + "] is different from the master node that started the snapshot")); + return; } } @Override - public void restoreShard(Store store, SnapshotId snapshotId, IndexId indexId, ShardId snapshotShardId, - RecoveryState recoveryState, ActionListener listener) { - if (false == consistentSettingsService.isConsistent(passwordSettingForThisRepo)) { - // the repository has a different password on the local node compared to the master node - // even though restoring the shard will surely fail (because we know that, by now, the master's password - // must be correct, otherwise this method will not get called) we let it pass-through in order to avoid - // having to manipulate the {@code recoveryState} argument - logger.error("Password mismatch for repository [" + metadata.name() + "]. The local node's value of the " + - "keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] is different from the master's"); - } - super.restoreShard(store, snapshotId, indexId, snapshotShardId, recoveryState, ActionListener.delegateResponse(listener, - (l, e) -> l.onFailure(new RepositoryException(metadata.name(), "Password mismatch for repository. " + - "The local node's value of the keystore secure setting [" + - passwordSettingForThisRepo.getKey() + "] is different from the master's")))); + public void finalizeSnapshot(SnapshotId snapshotId, ShardGenerations shardGenerations, long startTime, String failure, + int totalShards, List shardFailures, long repositoryStateId, + boolean includeGlobalState, MetaData clusterMetaData, Map userMetadata, + boolean writeShardGens, ActionListener listener) { + validatePasswordHash(userMetadata, listener); + super.finalizeSnapshot(snapshotId, shardGenerations, startTime, failure, totalShards, shardFailures, repositoryStateId, + includeGlobalState, clusterMetaData, userMetadata, writeShardGens, listener); } @Override - public void verify(String seed, DiscoveryNode localNode) { - if (consistentSettingsService.isConsistent(passwordSettingForThisRepo)) { - super.verify(seed, localNode); - } else { - // the repository has a different password on the local node compared to the master node - throw new RepositoryVerificationException(metadata.name(), "Repository password mismatch. The local node's [" + localNode + - "] value of the keystore secure setting [" + passwordSettingForThisRepo.getKey() + "] is different from the master's"); - } + public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, + IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, boolean writeShardGens, + Map userMetadata, ActionListener listener) { + validatePasswordHash(userMetadata, listener); + super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, userMetadata, + listener); + } + + @Override + public boolean isReadOnly() { + return false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed(); } @Override @@ -170,7 +190,7 @@ private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainer encrypted @Override protected BlobStore createBlobStore() { - return new EncryptedBlobStore(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryptor, + return new EncryptedBlobStore(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryption, secureRandom); } @@ -266,9 +286,10 @@ public InputStream readBlob(String blobName) throws IOException { @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { - SecretKey dataEncryptionKey = dataEncryptionKeyGenerator.generateKey(); - int nonce = nonceGenerator.nextInt(); - // this is the metadata required to decrypt back the encrypted blob + final SecretKey dataEncryptionKey = dataEncryptionKeyGenerator.generateKey(); + final int nonce = nonceGenerator.nextInt(); + final long encryptedBlobSize = EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); + // this is the metadata required to decrypt back the (to be) encrypted blob BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(nonce, PACKET_LENGTH_IN_BYTES, dataEncryptionKey); // encrypt metadata final byte[] encryptedMetadata; @@ -277,16 +298,44 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } catch (IOException e) { throw new IOException("Failure to encrypt metadata for blob [" + blobName + "]", e); } - // first write the encrypted metadata - try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { - encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, - failIfAlreadyExists); - } - // afterwards write the encrypted data blob - long encryptedBlobSize = EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); - try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, - dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { - delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); + if (failIfAlreadyExists) { + // first TRY write the encrypted metadata + try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { + encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, true); + } catch (IOException e) { + boolean metadataExists = (false == encryptionMetadataBlobContainer.listBlobsByPrefix(blobName).isEmpty()); + if (metadataExists) { + // existing metadata might be orphaned, the data blob might not exist, TRY write the data blob + try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, + dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { + delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, true); + } + // writing the data blob succeeded, must have been an orphaned metadata + // TRY again, but now OVERWRITE the metadata + try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { + encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, false); + } + return; + } else { + // "regular" IOException + throw e; + } + } + // afterwards write the encrypted data blob + try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, + dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { + delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, false); + } + } else { + // first overwrite the encrypted metadata + try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { + encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, false); + } + // afterwards overwrite the encrypted data blob + try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, + dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { + delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, false); + } } } @@ -374,4 +423,53 @@ public void cleanUpOrphanedMetadata() throws IOException{ } } + private static String computeSaltedPBKDF2Hash(char[] password, SecureRandom secureRandom) { + byte[] salt = new byte[16]; + secureRandom.nextBytes(salt); + return computeSaltedPBKDF2Hash(salt, password); + } + + private static String computeSaltedPBKDF2Hash(byte[] salt, char[] password) { + final int iterations = 10000; + final int keyLength = 512; + final PBEKeySpec spec = new PBEKeySpec(password, salt, iterations, keyLength); + final byte[] hash; + try { + SecretKeyFactory pbkdf2KeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA512"); + hash = pbkdf2KeyFactory.generateSecret(spec).getEncoded(); + } catch (InvalidKeySpecException | NoSuchAlgorithmException e) { + throw new RuntimeException("Unexpected exception when computing PBKDF2 hash for the repository password", e); + } + return new String(Base64.getEncoder().encode(salt), StandardCharsets.UTF_8) + ":" + + new String(Base64.getEncoder().encode(hash), StandardCharsets.UTF_8); + } + + private static class HashVerifier { + private final char[] password; + private final AtomicReference lastVerifiedHash; + + HashVerifier(char[] password) { + this.password = password; + this.lastVerifiedHash = new AtomicReference<>(null); + } + + boolean verify(String saltedHash) { + Objects.requireNonNull(saltedHash); + if (saltedHash.equals(lastVerifiedHash.get())) { + return true; + } + String[] parts = saltedHash.split(":"); + if (parts == null || parts.length != 2) { + return false; + } + String salt = parts[0]; + String computedHash = computeSaltedPBKDF2Hash(Base64.getDecoder().decode(salt.getBytes(StandardCharsets.UTF_8)), password); + if (false == computedHash.equals(saltedHash)) { + return false; + } + lastVerifiedHash.set(computedHash); + return true; + } + } + } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 25a682f8c6e51..f0890eba8e7f0 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -70,8 +70,6 @@ public Map getRepositories(final Environment env, fi SecureString encryptionPassword = encryptionPasswordSetting.get(env.settings()); cachedRepositoryPasswords.put(repositoryName, encryptionPassword.getChars()); } - final ConsistentSettingsService consistentSettingsService = new ConsistentSettingsService(env.settings(), clusterService, - Set.of(ENCRYPTION_PASSWORD_SETTING)); return Collections.singletonMap(REPOSITORY_TYPE_NAME, new Repository.Factory() { @Override @@ -105,10 +103,10 @@ public Repository create(RepositoryMetaData metaData, Function Date: Sun, 2 Feb 2020 23:39:42 +0200 Subject: [PATCH 090/142] Prepend name of the metadata to the encrypted blob --- .../encrypted/ChainingInputStream.java | 39 ++++++ .../encrypted/EncryptedRepository.java | 126 ++++++++---------- 2 files changed, 96 insertions(+), 69 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index ff4b4ec8dc18b..2b0208f467906 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -72,6 +72,45 @@ public abstract class ChainingInputStream extends InputStream { */ private boolean closed; + /** + * Returns a new {@link ChainingInputStream} that concatenates the bytes to be read from the first + * input stream with the bytes from the second input stream. + * + * @param first the input stream supplying the first bytes of the returned {@link ChainingInputStream} + * @param second the input stream supplying the bytes after the {@code first} input stream has been exhausted (and closed) + * @param markSupported whether the returned {@link ChainingInputStream} supports mark and reset + */ + public static InputStream chain(InputStream first, InputStream second, boolean markSupported) { + Objects.requireNonNull(first); + Objects.requireNonNull(second); + if (markSupported && false == first.markSupported()) { + throw new IllegalArgumentException("The first input stream does not support mark"); + } + if (markSupported && false == second.markSupported()) { + throw new IllegalArgumentException("The second input stream does not support mark"); + } + return new ChainingInputStream() { + + @Override + InputStream nextComponent(InputStream currentComponentIn) { + if (currentComponentIn == null) { + return first; + } else if (currentComponentIn == first) { + return second; + } else if (currentComponentIn == second){ + return null; + } else { + throw new IllegalStateException("Unexpected component input stream"); + } + } + + @Override + public boolean markSupported() { + return markSupported; + } + }; + } + /** * This method is responsible for generating the component input streams. * It is passed the current input stream and must return the successive one, diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index ec710489f88ff..22be28d23195b 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -12,30 +12,21 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.settings.ConsistentSettingsService; -import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; -import org.elasticsearch.indices.recovery.RecoveryState; -import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoryCleanupResult; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.ShardGenerations; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotId; @@ -47,25 +38,23 @@ import javax.crypto.SecretKey; import javax.crypto.SecretKeyFactory; import javax.crypto.spec.PBEKeySpec; -import javax.crypto.spec.SecretKeySpec; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; -import java.security.GeneralSecurityException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.spec.InvalidKeySpecException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Base64; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Random; import java.util.Set; -import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; public final class EncryptedRepository extends BlobStoreRepository { static final Logger logger = LogManager.getLogger(EncryptedRepository.class); @@ -80,11 +69,12 @@ public final class EncryptedRepository extends BlobStoreRepository { static final int DATA_KEY_SIZE_IN_BITS = 256; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 8 << 20; // 8MB + static final int METADATA_NAME_LENGTH_IN_BYTES = 18; // this can be changed freely (can be made a repository parameter) without adjusting // the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER}, as long as it stays under the value // of {@link #MAX_PACKET_LENGTH_IN_BYTES} static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB - // each snapshot metadata contains the salted password hash of the master node that started the snapshot + // each snapshot metadata contains the salted password hash of the master node that started the snapshot operation // this hash is then verified on each data node before the actual shard snapshot as well as on the // master node that finalizes the snapshot (could be a different master node, if a master failover // has occurred in the mean time) @@ -100,11 +90,11 @@ public final class EncryptedRepository extends BlobStoreRepository { // this is relative to the root path holding the encrypted blobs (i.e. the repository root path) private static final String ENCRYPTION_METADATA_ROOT = "encryption-metadata-v" + CURRENT_ENCRYPTION_VERSION_NUMBER; - private final BlobStoreRepository delegatedRepository; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryption metadataEncryption; - private final SecureRandom secureRandom; + private final Supplier encryptionNonceGenerator; + private final Supplier metadataNameGenerator; private final String passwordPublicHash; private final HashVerifier passwordHashVerifier; @@ -115,7 +105,14 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry this.dataEncryptionKeyGenerator = KeyGenerator.getInstance(EncryptedRepositoryPlugin.CIPHER_ALGO); this.dataEncryptionKeyGenerator.init(DATA_KEY_SIZE_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); this.metadataEncryption = new PasswordBasedEncryption(password, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); - this.secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); + final SecureRandom secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); + this.encryptionNonceGenerator = () -> secureRandom.nextInt(); + final Random random = new Random(); + this.metadataNameGenerator = () -> { + byte[] randomMetadataName = new byte[METADATA_NAME_LENGTH_IN_BYTES]; + random.nextBytes(randomMetadataName); + return randomMetadataName; + }; this.passwordPublicHash = computeSaltedPBKDF2Hash(password, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); this.passwordHashVerifier = new HashVerifier(password); } @@ -191,7 +188,7 @@ private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainer encrypted @Override protected BlobStore createBlobStore() { return new EncryptedBlobStore(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryption, - secureRandom); + encryptionNonceGenerator, metadataNameGenerator); } @Override @@ -217,14 +214,17 @@ private static class EncryptedBlobStore implements BlobStore { private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryption metadataEncryptor; - private final SecureRandom secureRandom; + private final Supplier encryptionNonceGenerator; + private final Supplier metadataNameGenerator; EncryptedBlobStore(BlobStore delegatedBlobStore, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryption metadataEncryptor, SecureRandom secureRandom) { + PasswordBasedEncryption metadataEncryptor, Supplier encryptionNonceGenerator, + Supplier metadataNameGenerator) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryptor = metadataEncryptor; - this.secureRandom = secureRandom; + this.encryptionNonceGenerator = encryptionNonceGenerator; + this.metadataNameGenerator = metadataNameGenerator; } @Override @@ -234,7 +234,8 @@ public void close() throws IOException { @Override public BlobContainer blobContainer(BlobPath path) { - return new EncryptedBlobContainer(delegatedBlobStore, path, dataEncryptionKeyGenerator, metadataEncryptor, secureRandom); + return new EncryptedBlobContainer(delegatedBlobStore, path, dataEncryptionKeyGenerator, metadataEncryptor, + encryptionNonceGenerator, metadataNameGenerator); } } @@ -243,16 +244,19 @@ private static class EncryptedBlobContainer implements BlobContainer { private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryption metadataEncryption; - private final SecureRandom nonceGenerator; + private final Supplier encryptionNonceGenerator; + private final Supplier metadataNameGenerator; private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; EncryptedBlobContainer(BlobStore delegatedBlobStore, BlobPath path, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryption metadataEncryption, SecureRandom nonceGenerator) { + PasswordBasedEncryption metadataEncryption, Supplier encryptionNonceGenerator, + Supplier metadataNameGenerator) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryption = metadataEncryption; - this.nonceGenerator = nonceGenerator; + this.encryptionNonceGenerator = encryptionNonceGenerator; + this.metadataNameGenerator = metadataNameGenerator; this.delegatedBlobContainer = delegatedBlobStore.blobContainer(path); this.encryptionMetadataBlobContainer = delegatedBlobStore.blobContainer(path.prepend(ENCRYPTION_METADATA_ROOT)); } @@ -264,8 +268,16 @@ public BlobPath path() { @Override public InputStream readBlob(String blobName) throws IOException { + final InputStream encryptedDataInputStream = delegatedBlobContainer.readBlob(blobName); + // read the metadata name which is prefixed to the encrypted blob + final byte[] metadataNameBytes = encryptedDataInputStream.readNBytes(METADATA_NAME_LENGTH_IN_BYTES); + if (metadataNameBytes.length != METADATA_NAME_LENGTH_IN_BYTES) { + throw new IOException("Failure to read encrypted blob metadata name"); + } + final String metadataName = new String(Base64.getUrlEncoder().withoutPadding().encode(metadataNameBytes), + StandardCharsets.UTF_8); // read metadata - BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(blobName)); + BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(metadataName)); final BlobEncryptionMetadata metadata; try { // decrypt and parse metadata @@ -280,15 +292,14 @@ public InputStream readBlob(String blobName) throws IOException { throw new IOException(failureMessage, e); } // read and decrypt blob - return new DecryptionPacketsInputStream(delegatedBlobContainer.readBlob(blobName), metadata.getDataEncryptionKey(), - metadata.getNonce(), metadata.getPacketLengthInBytes()); + return new DecryptionPacketsInputStream(encryptedDataInputStream, metadata.getDataEncryptionKey(), metadata.getNonce(), + metadata.getPacketLengthInBytes()); } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { final SecretKey dataEncryptionKey = dataEncryptionKeyGenerator.generateKey(); - final int nonce = nonceGenerator.nextInt(); - final long encryptedBlobSize = EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); + final int nonce = encryptionNonceGenerator.get(); // this is the metadata required to decrypt back the (to be) encrypted blob BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(nonce, PACKET_LENGTH_IN_BYTES, dataEncryptionKey); // encrypt metadata @@ -298,44 +309,20 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } catch (IOException e) { throw new IOException("Failure to encrypt metadata for blob [" + blobName + "]", e); } - if (failIfAlreadyExists) { - // first TRY write the encrypted metadata - try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { - encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, true); - } catch (IOException e) { - boolean metadataExists = (false == encryptionMetadataBlobContainer.listBlobsByPrefix(blobName).isEmpty()); - if (metadataExists) { - // existing metadata might be orphaned, the data blob might not exist, TRY write the data blob - try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, - dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { - delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, true); - } - // writing the data blob succeeded, must have been an orphaned metadata - // TRY again, but now OVERWRITE the metadata - try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { - encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, false); - } - return; - } else { - // "regular" IOException - throw e; - } - } - // afterwards write the encrypted data blob - try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, - dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { - delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, false); - } - } else { - // first overwrite the encrypted metadata - try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { - encryptionMetadataBlobContainer.writeBlob(blobName, encryptedMetadataInputStream, encryptedMetadata.length, false); - } - // afterwards overwrite the encrypted data blob - try (EncryptionPacketsInputStream encryptedInputStream = new EncryptionPacketsInputStream(inputStream, - dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES)) { - delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, false); - } + final byte[] metadataNameBytes = metadataNameGenerator.get(); + final String metadataName = new String(Base64.getUrlEncoder().withoutPadding().encode(metadataNameBytes), + StandardCharsets.UTF_8); + // first write the encrypted metadata to a UNIQUE blob name + try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { + encryptionMetadataBlobContainer.writeBlob(metadataName, encryptedMetadataInputStream, encryptedMetadata.length, true); + } + // afterwards overwrite the encrypted data blob + // prepended to the encrypted data blob is the unique name of the metadata blob + final long encryptedBlobSize = metadataNameBytes.length + EncryptionPacketsInputStream.getEncryptionLength(blobSize, + PACKET_LENGTH_IN_BYTES); + try (InputStream encryptedInputStream = ChainingInputStream.chain(new ByteArrayInputStream(metadataNameBytes), + new EncryptionPacketsInputStream(inputStream, dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES), true)) { + delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); } } @@ -383,7 +370,8 @@ public Map children() throws IOException { for (Map.Entry encryptedBlobContainer : childEncryptedBlobContainers.entrySet()) { // get an encrypted blob container for each result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainer(delegatedBlobStore, - encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryption, nonceGenerator)); + encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryption, + encryptionNonceGenerator, metadataNameGenerator)); } return result; } From 0308140aee838f0bcbac4efbdfbd0aa5d2d8a5a8 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 3 Feb 2020 16:47:56 +0200 Subject: [PATCH 091/142] Nits --- .../encrypted/EncryptedRepository.java | 110 +++++++++++------- .../encrypted/EncryptedRepositoryPlugin.java | 19 ++- 2 files changed, 78 insertions(+), 51 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 22be28d23195b..12b61bc1b78ba 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -24,6 +24,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; +import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoryCleanupResult; import org.elasticsearch.repositories.RepositoryException; @@ -58,44 +59,57 @@ public final class EncryptedRepository extends BlobStoreRepository { static final Logger logger = LogManager.getLogger(EncryptedRepository.class); - // the following constants are fixed by definition static final int GCM_TAG_LENGTH_IN_BYTES = 16; static final int GCM_IV_LENGTH_IN_BYTES = 12; static final int AES_BLOCK_SIZE_IN_BYTES = 128; - // changing the following constants implies breaking compatibility with previous versions + // changing the following constants implies breaking compatibility with previous versions of encrypted snapshots // in this case the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER} MUST be incremented static final String DATA_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; static final int DATA_KEY_SIZE_IN_BITS = 256; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 8 << 20; // 8MB - static final int METADATA_NAME_LENGTH_IN_BYTES = 18; + static final int METADATA_NAME_LENGTH_IN_BYTES = 18; // 16 bits is the UUIDS length; 18 is the next multiple for Base64 encoding // this can be changed freely (can be made a repository parameter) without adjusting // the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER}, as long as it stays under the value // of {@link #MAX_PACKET_LENGTH_IN_BYTES} static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB // each snapshot metadata contains the salted password hash of the master node that started the snapshot operation - // this hash is then verified on each data node before the actual shard snapshot as well as on the + // this hash is then verified on each data node before the actual shard files snapshot, as well as on the // master node that finalizes the snapshot (could be a different master node, if a master failover // has occurred in the mean time) - private static final String PASSWORD_HASH_RESERVED_USER_METADATA_KEY = "__passwordHash"; - + private static final String PASSWORD_HASH_RESERVED_USER_METADATA_KEY = EncryptedRepository.class.getName() + ".saltedPasswordHash"; // The encryption scheme version number to which the current implementation conforms to. // The version number MUST be incremented whenever the format of the metadata, or // the way the metadata is used for the actual decryption are changed. // Incrementing the version number signals that previous implementations cannot make sense - // of the new scheme. + // of the new scheme, so they will fail all operations on the repository. private static final int CURRENT_ENCRYPTION_VERSION_NUMBER = 2; // nobody trusts v1 of anything // the path of the blob container holding the encryption metadata // this is relative to the root path holding the encrypted blobs (i.e. the repository root path) private static final String ENCRYPTION_METADATA_ROOT = "encryption-metadata-v" + CURRENT_ENCRYPTION_VERSION_NUMBER; + // this is the repository instance to which all blob reads and writes are forwarded to private final BlobStoreRepository delegatedRepository; + // every data blob is encrypted with its randomly generated AES key (this is the "Data Encryption Key") private final KeyGenerator dataEncryptionKeyGenerator; + // the {@link PasswordBasedEncryption} is used to encrypt (and decrypt) the data encryption key and the other associated metadata + // the metadata encryption is based on AES keys which are generated from the repository password private final PasswordBasedEncryption metadataEncryption; + // Data blob encryption requires a "nonce", only if the SAME data encryption key is used for several data blobs. + // Because data encryption keys are generated randomly (see {@link #dataEncryptionKey}) the nonce in this case can be a constant value. + // But it is not a constant for reasons of greater robustness (future code changes might assume that the nonce is really a nonce), and + // to allow that the encryption IV (which is part of the ciphertext) be checked for ACCIDENTAL tampering without attempting decryption private final Supplier encryptionNonceGenerator; - private final Supplier metadataNameGenerator; - private final String passwordPublicHash; + // the metadata is stored in a separate blob so that when the metadata is regenerated (for example, rencrypting it after the repository + // password is changed) it will not incur updating the encrypted blob, but only recreating a new metadata blob. + // However, the encrypted blob is prepended a fixed length identifier which is used to locate the corresponding metadata. + // This identifier is fixed, so it will not change when the metadata is recreated. + private final Supplier metadataIdentifierGenerator; + // the salted hash of this repository's password on the local node. The password is fixed for the lifetime of the repository. + private final String repositoryPasswordSaltedHash; + // this is used to check that the salted hash of the repository password on the node that started the snapshot matches up with the + // repository password on the local node private final HashVerifier passwordHashVerifier; protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService, @@ -108,39 +122,44 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry final SecureRandom secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); this.encryptionNonceGenerator = () -> secureRandom.nextInt(); final Random random = new Random(); - this.metadataNameGenerator = () -> { + this.metadataIdentifierGenerator = () -> { byte[] randomMetadataName = new byte[METADATA_NAME_LENGTH_IN_BYTES]; random.nextBytes(randomMetadataName); return randomMetadataName; }; - this.passwordPublicHash = computeSaltedPBKDF2Hash(password, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); + this.repositoryPasswordSaltedHash = computeSaltedPBKDF2Hash(SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO), password); this.passwordHashVerifier = new HashVerifier(password); } + /** + * The repository hook method which populates the snapshot metadata with the salted password hash of the repository on the (master) + * node that starts of the snapshot operation. All the other actions associated with the same snapshot operation will first verify + * that the local repository password checks with the hash from the snapshot metadata. + *

+ * In addition, if the installed license does not comply with encrypted snapshots, this throws an exception, which aborts the snapshot + * operation. + * + * See {@link org.elasticsearch.repositories.Repository#adaptUserMetadata(Map)}. + * + * @param userMetadata the snapshot metadata as received from the calling user + * @return the snapshot metadata containing the salted password hash of the node initializing the snapshot + */ @Override public Map adaptUserMetadata(Map userMetadata) { + // because populating the snapshot metadata must be done before the actual snapshot is first initialized, + // we take the opportunity to validate the license and abort if non-compliant + if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed()) { + throw LicenseUtils.newComplianceException("encrypted snapshots"); + } Map snapshotUserMetadata = new HashMap<>(); snapshotUserMetadata.putAll(userMetadata); - // pin down the hash of the password that is checked for all the operations of this snapshot - snapshotUserMetadata.put(PASSWORD_HASH_RESERVED_USER_METADATA_KEY, this.passwordPublicHash); + // pin down the salted hash of the repository password + // this is then checked before every snapshot operation (i.e. {@link #snapshotShard} and {@link #finalizeSnapshot}) + // to assure that all participating nodes in the snapshot have the same repository password set + snapshotUserMetadata.put(PASSWORD_HASH_RESERVED_USER_METADATA_KEY, this.repositoryPasswordSaltedHash); return snapshotUserMetadata; } - private void validatePasswordHash(Map snapshotUserMetadata, ActionListener listener) { - Object repositoryPasswordHash = snapshotUserMetadata.get(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); - if (repositoryPasswordHash == null || (false == repositoryPasswordHash instanceof String)) { - listener.onFailure(new IllegalStateException("Snapshot metadata does not contain the password hash or is invalid")); - return; - } - if (false == passwordHashVerifier.verify((String) repositoryPasswordHash)) { - listener.onFailure(new RepositoryException(metadata.name(), - "Password mismatch for repository. The local node's value of the keystore secure setting [" + - EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING.getConcreteSettingForNamespace(metadata.name()).getKey() + - "] is different from the master node that started the snapshot")); - return; - } - } - @Override public void finalizeSnapshot(SnapshotId snapshotId, ShardGenerations shardGenerations, long startTime, String failure, int totalShards, List shardFailures, long repositoryStateId, @@ -160,11 +179,6 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s listener); } - @Override - public boolean isReadOnly() { - return false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed(); - } - @Override public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListener listener) { super.cleanup(repositoryStateId, writeShardGens, ActionListener.wrap(repositoryCleanupResult -> { @@ -188,7 +202,7 @@ private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainer encrypted @Override protected BlobStore createBlobStore() { return new EncryptedBlobStore(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryption, - encryptionNonceGenerator, metadataNameGenerator); + encryptionNonceGenerator, metadataIdentifierGenerator); } @Override @@ -210,7 +224,6 @@ protected void doClose() { } private static class EncryptedBlobStore implements BlobStore { - private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryption metadataEncryptor; @@ -240,7 +253,6 @@ public BlobContainer blobContainer(BlobPath path) { } private static class EncryptedBlobContainer implements BlobContainer { - private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryption metadataEncryption; @@ -411,7 +423,7 @@ public void cleanUpOrphanedMetadata() throws IOException{ } } - private static String computeSaltedPBKDF2Hash(char[] password, SecureRandom secureRandom) { + private static String computeSaltedPBKDF2Hash(SecureRandom secureRandom, char[] password) { byte[] salt = new byte[16]; secureRandom.nextBytes(salt); return computeSaltedPBKDF2Hash(salt, password); @@ -428,11 +440,27 @@ private static String computeSaltedPBKDF2Hash(byte[] salt, char[] password) { } catch (InvalidKeySpecException | NoSuchAlgorithmException e) { throw new RuntimeException("Unexpected exception when computing PBKDF2 hash for the repository password", e); } - return new String(Base64.getEncoder().encode(salt), StandardCharsets.UTF_8) + ":" + - new String(Base64.getEncoder().encode(hash), StandardCharsets.UTF_8); + return new String(Base64.getUrlEncoder().withoutPadding().encode(salt), StandardCharsets.UTF_8) + ":" + + new String(Base64.getUrlEncoder().withoutPadding().encode(hash), StandardCharsets.UTF_8); + } + + private void validatePasswordHash(Map snapshotUserMetadata, ActionListener listener) { + Object repositoryPasswordHash = snapshotUserMetadata.get(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); + if (repositoryPasswordHash == null || (false == repositoryPasswordHash instanceof String)) { + listener.onFailure(new IllegalStateException("Snapshot metadata does not contain the repository password hash as a String")); + return; + } + if (false == passwordHashVerifier.verify((String) repositoryPasswordHash)) { + listener.onFailure(new RepositoryException(metadata.name(), + "Repository password mismatch. The local node's value of the keystore secure setting [" + + EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING.getConcreteSettingForNamespace(metadata.name()).getKey() + + "] is different from the master node, which started the snapshot operation")); + return; + } } private static class HashVerifier { + private final char[] password; private final AtomicReference lastVerifiedHash; @@ -451,13 +479,13 @@ boolean verify(String saltedHash) { return false; } String salt = parts[0]; - String computedHash = computeSaltedPBKDF2Hash(Base64.getDecoder().decode(salt.getBytes(StandardCharsets.UTF_8)), password); + String computedHash = computeSaltedPBKDF2Hash(Base64.getUrlDecoder().decode(salt.getBytes(StandardCharsets.UTF_8)), password); if (false == computedHash.equals(saltedHash)) { return false; } lastVerifiedHash.set(computedHash); return true; } - } + } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index f0890eba8e7f0..9bca8a7da6cd5 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -80,9 +80,10 @@ public Repository create(RepositoryMetaData metadata) { @Override public Repository create(RepositoryMetaData metaData, Function typeLookup) throws Exception { if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed()) { - logger.warn("Encrypted snapshot repositories are not allowed for the current license." + - "Snapshots to the [" + metaData.name() + "] encrypted repository are not permitted and will fail.", - LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); + logger.warn("Encrypted snapshots are not allowed for the currently installed license." + + "Snapshots to the [" + metaData.name() + "] encrypted repository are not permitted." + + "All the other operations, including restore, are still permitted.", + LicenseUtils.newComplianceException("encrypted snapshots")); } String delegateType = DELEGATE_TYPE.get(metaData.settings()); if (Strings.hasLength(delegateType) == false) { @@ -92,19 +93,17 @@ public Repository create(RepositoryMetaData metaData, Function Date: Mon, 3 Feb 2020 21:44:20 +0200 Subject: [PATCH 092/142] Before rewriting cleanup --- .../encrypted/EncryptedRepository.java | 164 +++++++++++++----- .../encrypted/PasswordBasedEncryption.java | 2 + .../EncryptionPacketsInputStreamTests.java | 8 +- 3 files changed, 122 insertions(+), 52 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 12b61bc1b78ba..e7865878ec752 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -55,6 +55,7 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; import java.util.function.Supplier; public final class EncryptedRepository extends BlobStoreRepository { @@ -62,11 +63,11 @@ public final class EncryptedRepository extends BlobStoreRepository { // the following constants are fixed by definition static final int GCM_TAG_LENGTH_IN_BYTES = 16; static final int GCM_IV_LENGTH_IN_BYTES = 12; - static final int AES_BLOCK_SIZE_IN_BYTES = 128; + static final int AES_BLOCK_LENGTH_IN_BYTES = 128; // changing the following constants implies breaking compatibility with previous versions of encrypted snapshots // in this case the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER} MUST be incremented static final String DATA_ENCRYPTION_SCHEME = "AES/GCM/NoPadding"; - static final int DATA_KEY_SIZE_IN_BITS = 256; + static final int DATA_KEY_LENGTH_IN_BITS = 256; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 8 << 20; // 8MB static final int METADATA_NAME_LENGTH_IN_BYTES = 18; // 16 bits is the UUIDS length; 18 is the next multiple for Base64 encoding @@ -74,6 +75,11 @@ public final class EncryptedRepository extends BlobStoreRepository { // the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER}, as long as it stays under the value // of {@link #MAX_PACKET_LENGTH_IN_BYTES} static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB + static final String SALTED_PASSWORD_HASH_ALGO = "PBKDF2WithHmacSHA512"; + static final int SALTED_PASSWORD_HASH_ITER_COUNT = 10000; + static final int SALTED_PASSWORD_HASH_KEY_LENGTH_IN_BITS = 512; + static final int PASSWORD_HASH_SALT_LENGTH_IN_BYES = 16; + // each snapshot metadata contains the salted password hash of the master node that started the snapshot operation // this hash is then verified on each data node before the actual shard files snapshot, as well as on the // master node that finalizes the snapshot (could be a different master node, if a master failover @@ -117,17 +123,22 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry super(metadata, namedXContentRegistry, clusterService, delegatedRepository.basePath()); this.delegatedRepository = delegatedRepository; this.dataEncryptionKeyGenerator = KeyGenerator.getInstance(EncryptedRepositoryPlugin.CIPHER_ALGO); - this.dataEncryptionKeyGenerator.init(DATA_KEY_SIZE_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); + this.dataEncryptionKeyGenerator.init(DATA_KEY_LENGTH_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); this.metadataEncryption = new PasswordBasedEncryption(password, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); final SecureRandom secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); + // data encryption uses random "nonce"s although currently a constant would be just as secure this.encryptionNonceGenerator = () -> secureRandom.nextInt(); + // the metadata used to decrypt the encrypted blob resides in a different blob, one for every encrypted blob, + // which has a sufficiently long random name, enough to make it effectively unique in any given practical blob container final Random random = new Random(); this.metadataIdentifierGenerator = () -> { byte[] randomMetadataName = new byte[METADATA_NAME_LENGTH_IN_BYTES]; random.nextBytes(randomMetadataName); return randomMetadataName; }; + // the salted password hash for this encrypted repository password, on the local node (this is constant) this.repositoryPasswordSaltedHash = computeSaltedPBKDF2Hash(SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO), password); + // used to verify that the salted password hash in the snapshot metadata matches up with the repository password on the local node this.passwordHashVerifier = new HashVerifier(password); } @@ -165,7 +176,7 @@ public void finalizeSnapshot(SnapshotId snapshotId, ShardGenerations shardGenera int totalShards, List shardFailures, long repositoryStateId, boolean includeGlobalState, MetaData clusterMetaData, Map userMetadata, boolean writeShardGens, ActionListener listener) { - validatePasswordHash(userMetadata, listener); + validateRepositoryPasswordHash(userMetadata, listener::onFailure); super.finalizeSnapshot(snapshotId, shardGenerations, startTime, failure, totalShards, shardFailures, repositoryStateId, includeGlobalState, clusterMetaData, userMetadata, writeShardGens, listener); } @@ -174,7 +185,7 @@ public void finalizeSnapshot(SnapshotId snapshotId, ShardGenerations shardGenera public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, boolean writeShardGens, Map userMetadata, ActionListener listener) { - validatePasswordHash(userMetadata, listener); + validateRepositoryPasswordHash(userMetadata, listener::onFailure); super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, userMetadata, listener); } @@ -194,7 +205,7 @@ private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainer encrypted try { cleanUpOrphanedMetadataRecursively((EncryptedBlobContainer) childEncryptedBlobContainer); } catch(IOException e) { - logger.warn("Exception while cleaning up [" + childEncryptedBlobContainer.path() + "]", e); + logger.warn("Failure to clean-up blob container [" + childEncryptedBlobContainer.path() + "]", e); } } } @@ -226,18 +237,18 @@ protected void doClose() { private static class EncryptedBlobStore implements BlobStore { private final BlobStore delegatedBlobStore; private final KeyGenerator dataEncryptionKeyGenerator; - private final PasswordBasedEncryption metadataEncryptor; + private final PasswordBasedEncryption metadataEncryption; private final Supplier encryptionNonceGenerator; - private final Supplier metadataNameGenerator; + private final Supplier metadataIdentifierGenerator; EncryptedBlobStore(BlobStore delegatedBlobStore, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryption metadataEncryptor, Supplier encryptionNonceGenerator, - Supplier metadataNameGenerator) { + PasswordBasedEncryption metadataEncryption, Supplier encryptionNonceGenerator, + Supplier metadataIdentifierGenerator) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; - this.metadataEncryptor = metadataEncryptor; + this.metadataEncryption = metadataEncryption; this.encryptionNonceGenerator = encryptionNonceGenerator; - this.metadataNameGenerator = metadataNameGenerator; + this.metadataIdentifierGenerator = metadataIdentifierGenerator; } @Override @@ -247,8 +258,8 @@ public void close() throws IOException { @Override public BlobContainer blobContainer(BlobPath path) { - return new EncryptedBlobContainer(delegatedBlobStore, path, dataEncryptionKeyGenerator, metadataEncryptor, - encryptionNonceGenerator, metadataNameGenerator); + return new EncryptedBlobContainer(delegatedBlobStore, path, dataEncryptionKeyGenerator, metadataEncryption, + encryptionNonceGenerator, metadataIdentifierGenerator); } } @@ -257,39 +268,60 @@ private static class EncryptedBlobContainer implements BlobContainer { private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryption metadataEncryption; private final Supplier encryptionNonceGenerator; - private final Supplier metadataNameGenerator; + private final Supplier metadataIdentifierGenerator; private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; EncryptedBlobContainer(BlobStore delegatedBlobStore, BlobPath path, KeyGenerator dataEncryptionKeyGenerator, PasswordBasedEncryption metadataEncryption, Supplier encryptionNonceGenerator, - Supplier metadataNameGenerator) { + Supplier metadataIdentifierGenerator) { this.delegatedBlobStore = delegatedBlobStore; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryption = metadataEncryption; this.encryptionNonceGenerator = encryptionNonceGenerator; - this.metadataNameGenerator = metadataNameGenerator; + this.metadataIdentifierGenerator = metadataIdentifierGenerator; this.delegatedBlobContainer = delegatedBlobStore.blobContainer(path); this.encryptionMetadataBlobContainer = delegatedBlobStore.blobContainer(path.prepend(ENCRYPTION_METADATA_ROOT)); } + /** + * Returns the {@link BlobPath} to where the encrypted blobs are stored. Note that the encryption metadata is contained + * in separate blobs which are stored under a different blob path (see + * {@link #encryptionMetadataBlobContainer}). This blob path resembles the path of the encrypted + * blobs but is rooted under a specific path component (see {@link #ENCRYPTION_METADATA_ROOT}). The encryption is transparent + * in the sense that the metadata is not exposed by the {@link EncryptedBlobContainer}. + * + * @return the BlobPath to where the encrypted blobs are contained + */ @Override public BlobPath path() { return delegatedBlobContainer.path(); } + /** + * Returns a new {@link InputStream} for the given {@code blobName} that can be used to read the contents of the blob. + * The returned {@code InputStream} transparently handles the decryption of the blob contents, by first working out + * the blob name of the associated metadata, reading and decrypting the metadata (given the repository password and utilizing + * the {@link PasswordBasedEncryption}) and lastly reading and decrypting the data blob, in a streaming fashion by employing the + * {@link DecryptionPacketsInputStream}. The {@code DecryptionPacketsInputStream} does not return un-authenticated data. + * + * @param blobName + * The name of the blob to get an {@link InputStream} for. + */ @Override public InputStream readBlob(String blobName) throws IOException { final InputStream encryptedDataInputStream = delegatedBlobContainer.readBlob(blobName); - // read the metadata name which is prefixed to the encrypted blob - final byte[] metadataNameBytes = encryptedDataInputStream.readNBytes(METADATA_NAME_LENGTH_IN_BYTES); - if (metadataNameBytes.length != METADATA_NAME_LENGTH_IN_BYTES) { - throw new IOException("Failure to read encrypted blob metadata name"); + // read the metadata identifier (fixed length) which is prepended to the encrypted blob + final byte[] metadataIdentifier = encryptedDataInputStream.readNBytes(METADATA_NAME_LENGTH_IN_BYTES); + if (metadataIdentifier.length != METADATA_NAME_LENGTH_IN_BYTES) { + throw new IOException("Failure to read encrypted blob metadata identifier"); } - final String metadataName = new String(Base64.getUrlEncoder().withoutPadding().encode(metadataNameBytes), + // the metadata blob name is simply the base64 encoding (URL safe) of the metadata identifier, + // inside a fixed root blob container + final String metadataBlobName = new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), StandardCharsets.UTF_8); - // read metadata - BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(metadataName)); + // read the encrypted metadata contents + BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(metadataBlobName)); final BlobEncryptionMetadata metadata; try { // decrypt and parse metadata @@ -303,36 +335,58 @@ public InputStream readBlob(String blobName) throws IOException { } throw new IOException(failureMessage, e); } - // read and decrypt blob + // read and decrypt the data blob return new DecryptionPacketsInputStream(encryptedDataInputStream, metadata.getDataEncryptionKey(), metadata.getNonce(), metadata.getPacketLengthInBytes()); } + /** + * Reads the blob content from the input stream and writes it to the container in a new blob with the given name. + * If {@code failIfAlreadyExists} is {@code true} and a blob with the same name already exists, the write operation will fail; + * otherwise, if {@code failIfAlreadyExists} is {@code false} the blob is overwritten. + * The contents are encrypted in a streaming fashion. The encryption key is randomly generated for each blob. + * The encryption key is separately stored in a metadata blob, which is encrypted with another key derived from the repository + * password. The metadata blob is stored first, before the encrypted data blob, so as to ensure that no encrypted data blobs + * are left without the associated metadata, in any failure scenario. + * + * @param blobName + * The name of the blob to write the contents of the input stream to. + * @param inputStream + * The input stream from which to retrieve the bytes to write to the blob. + * @param blobSize + * The size of the blob to be written, in bytes. It is implementation dependent whether + * this value is used in writing the blob to the repository. + * @param failIfAlreadyExists + * whether to throw a FileAlreadyExistsException if the given blob already exists + */ @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { final SecretKey dataEncryptionKey = dataEncryptionKeyGenerator.generateKey(); final int nonce = encryptionNonceGenerator.get(); - // this is the metadata required to decrypt back the (to be) encrypted blob + // this is the metadata required to decrypt back the (soon to be) encrypted blob BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(nonce, PACKET_LENGTH_IN_BYTES, dataEncryptionKey); - // encrypt metadata + // encrypt the metadata final byte[] encryptedMetadata; try { encryptedMetadata = BlobEncryptionMetadata.serializeMetadata(metadata, metadataEncryption::encrypt); } catch (IOException e) { throw new IOException("Failure to encrypt metadata for blob [" + blobName + "]", e); } - final byte[] metadataNameBytes = metadataNameGenerator.get(); - final String metadataName = new String(Base64.getUrlEncoder().withoutPadding().encode(metadataNameBytes), + // the metadata identifier is a sufficiently long random byte array so as to make it practically unique + // the goal is to avoid overwriting metadata blobs even if the encrypted data blobs are overwritten + final byte[] metadataIdentifier = metadataIdentifierGenerator.get(); + final String metadataBlobName = new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), StandardCharsets.UTF_8); // first write the encrypted metadata to a UNIQUE blob name try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { - encryptionMetadataBlobContainer.writeBlob(metadataName, encryptedMetadataInputStream, encryptedMetadata.length, true); + encryptionMetadataBlobContainer.writeBlob(metadataBlobName, encryptedMetadataInputStream, encryptedMetadata.length, true + /* fail in the exceptional case of metadata blob name conflict */); } - // afterwards overwrite the encrypted data blob - // prepended to the encrypted data blob is the unique name of the metadata blob - final long encryptedBlobSize = metadataNameBytes.length + EncryptionPacketsInputStream.getEncryptionLength(blobSize, + // afterwards write the encrypted data blob + // prepended to the encrypted data blob is the unique identifier (fixed length) of the metadata blob + final long encryptedBlobSize = metadataIdentifier.length + EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); - try (InputStream encryptedInputStream = ChainingInputStream.chain(new ByteArrayInputStream(metadataNameBytes), + try (InputStream encryptedInputStream = ChainingInputStream.chain(new ByteArrayInputStream(metadataIdentifier), new EncryptionPacketsInputStream(inputStream, dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES), true)) { delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); } @@ -383,7 +437,7 @@ public Map children() throws IOException { // get an encrypted blob container for each result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainer(delegatedBlobStore, encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryption, - encryptionNonceGenerator, metadataNameGenerator)); + encryptionNonceGenerator, metadataIdentifierGenerator)); } return result; } @@ -406,9 +460,9 @@ public void cleanUpOrphanedMetadata() throws IOException{ try { encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(orphanedMetadataBlobs); } catch (IOException e) { - logger.warn("Exception while deleting orphaned metadata blobs " + orphanedMetadataBlobs, e); + logger.warn("Failure to delete orphaned metadata blobs " + orphanedMetadataBlobs, e); } - // delete Encryption metadata blob containers which don't par with any data blob containers + // delete encryption metadata blob containers which don't pair with any data blob containers Set foundEncryptedBlobContainers = delegatedBlobContainer.children().keySet(); Map foundMetadataBlobContainers = encryptionMetadataBlobContainer.children(); for (Map.Entry metadataBlobContainer : foundMetadataBlobContainers.entrySet()) { @@ -424,44 +478,55 @@ public void cleanUpOrphanedMetadata() throws IOException{ } private static String computeSaltedPBKDF2Hash(SecureRandom secureRandom, char[] password) { - byte[] salt = new byte[16]; + byte[] salt = new byte[PASSWORD_HASH_SALT_LENGTH_IN_BYES]; secureRandom.nextBytes(salt); return computeSaltedPBKDF2Hash(salt, password); } private static String computeSaltedPBKDF2Hash(byte[] salt, char[] password) { - final int iterations = 10000; - final int keyLength = 512; - final PBEKeySpec spec = new PBEKeySpec(password, salt, iterations, keyLength); + final PBEKeySpec spec = new PBEKeySpec(password, salt, SALTED_PASSWORD_HASH_ITER_COUNT, SALTED_PASSWORD_HASH_KEY_LENGTH_IN_BITS); final byte[] hash; try { - SecretKeyFactory pbkdf2KeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA512"); + SecretKeyFactory pbkdf2KeyFactory = SecretKeyFactory.getInstance(SALTED_PASSWORD_HASH_ALGO); hash = pbkdf2KeyFactory.generateSecret(spec).getEncoded(); } catch (InvalidKeySpecException | NoSuchAlgorithmException e) { - throw new RuntimeException("Unexpected exception when computing PBKDF2 hash for the repository password", e); + throw new RuntimeException("Unexpected exception when computing the hash of the repository password", e); } return new String(Base64.getUrlEncoder().withoutPadding().encode(salt), StandardCharsets.UTF_8) + ":" + new String(Base64.getUrlEncoder().withoutPadding().encode(hash), StandardCharsets.UTF_8); } - private void validatePasswordHash(Map snapshotUserMetadata, ActionListener listener) { + /** + * Called before every snapshot operation on every node to validate that the snapshot metadata contains a password hash + * that matches up with the repository password on the local node. + * + * @param snapshotUserMetadata the snapshot metadata to verify + * @param exception the exception handler to call when the repository password check fails + */ + private void validateRepositoryPasswordHash(Map snapshotUserMetadata, Consumer exception) { Object repositoryPasswordHash = snapshotUserMetadata.get(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); if (repositoryPasswordHash == null || (false == repositoryPasswordHash instanceof String)) { - listener.onFailure(new IllegalStateException("Snapshot metadata does not contain the repository password hash as a String")); + exception.accept(new IllegalStateException("Snapshot metadata does not contain the repository password hash as a String")); return; } if (false == passwordHashVerifier.verify((String) repositoryPasswordHash)) { - listener.onFailure(new RepositoryException(metadata.name(), + exception.accept(new RepositoryException(metadata.name(), "Repository password mismatch. The local node's value of the keystore secure setting [" + EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING.getConcreteSettingForNamespace(metadata.name()).getKey() + - "] is different from the master node, which started the snapshot operation")); + "] is different from the elected master node, which started the snapshot operation")); return; } } + /** + * This is used to verify that salted hashes match up with the {@code password} from the constructor argument. + * This also caches the last successfully verified hash, so that repeated checks for the same hash turn into a simple {@code String + * #equals}. + */ private static class HashVerifier { - + // the password to which the salted hashes must match up with private final char[] password; + // the last successfully matched salted hash private final AtomicReference lastVerifiedHash; HashVerifier(char[] password) { @@ -471,11 +536,13 @@ private static class HashVerifier { boolean verify(String saltedHash) { Objects.requireNonNull(saltedHash); + // first check if this exact hash has been checked before if (saltedHash.equals(lastVerifiedHash.get())) { return true; } String[] parts = saltedHash.split(":"); if (parts == null || parts.length != 2) { + // the hash has an invalid format return false; } String salt = parts[0]; @@ -483,6 +550,7 @@ boolean verify(String saltedHash) { if (false == computedHash.equals(saltedHash)) { return false; } + // remember last successfully verified hash lastVerifiedHash.set(computedHash); return true; } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryption.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryption.java index 03c1e1faaaf3f..46a2eb96c2c37 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryption.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryption.java @@ -126,7 +126,9 @@ public byte[] encrypt(byte[] data, @Nullable byte[] associatedData) throws Execu byte[] resultCiphertext = new byte[saltAndEncryptionKey.v1().length + iv.length + encryptedData.length]; // prepend salt System.arraycopy(saltAndEncryptionKey.v1(), 0, resultCiphertext, 0, saltAndEncryptionKey.v1().length); + // follow-up with the iv System.arraycopy(iv, 0, resultCiphertext, saltAndEncryptionKey.v1().length, iv.length); + // and finally conclude the result with the ciphertext (the output of the cipher) System.arraycopy(encryptedData, 0, resultCiphertext, saltAndEncryptionKey.v1().length + iv.length, encryptedData.length); return resultCiphertext; } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java index e3f96553af1a2..a25514eb4d2d5 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStreamTests.java @@ -110,10 +110,10 @@ public void testShortPacketSizes() throws Exception { public void testPacketSizeMultipleOfAESBlockSize() throws Exception { int packetSize = 1 + Randomness.get().nextInt(8); - testEncryptPacketWise(1 + Randomness.get().nextInt(packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES), - packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES, new DefaultBufferedReadAllStrategy()); - testEncryptPacketWise(packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES + Randomness.get().nextInt(8192), - packetSize * EncryptedRepository.AES_BLOCK_SIZE_IN_BYTES, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(1 + Randomness.get().nextInt(packetSize * EncryptedRepository.AES_BLOCK_LENGTH_IN_BYTES), + packetSize * EncryptedRepository.AES_BLOCK_LENGTH_IN_BYTES, new DefaultBufferedReadAllStrategy()); + testEncryptPacketWise(packetSize * EncryptedRepository.AES_BLOCK_LENGTH_IN_BYTES + Randomness.get().nextInt(8192), + packetSize * EncryptedRepository.AES_BLOCK_LENGTH_IN_BYTES, new DefaultBufferedReadAllStrategy()); } public void testMarkAndResetPacketBoundaryNoMock() throws Exception { From 90bcaab9dc336227ca95e7c7f748dd30c5940c86 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 4 Feb 2020 16:55:37 +0200 Subject: [PATCH 093/142] Refactor delete + cleanup --- .../encrypted/EncryptedRepository.java | 61 +++++++++++++++---- 1 file changed, 49 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index e7865878ec752..049efa17c20b7 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -49,6 +49,7 @@ import java.util.ArrayList; import java.util.Base64; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -70,7 +71,8 @@ public final class EncryptedRepository extends BlobStoreRepository { static final int DATA_KEY_LENGTH_IN_BITS = 256; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 8 << 20; // 8MB - static final int METADATA_NAME_LENGTH_IN_BYTES = 18; // 16 bits is the UUIDS length; 18 is the next multiple for Base64 encoding + static final int METADATA_UID_LENGTH_IN_BYTES = 18; // 16 bits is the UUIDS length; 18 is the next multiple for Base64 encoding + static final int METADATA_UID_LENGTH_IN_CHARS = 24; // base64 encoding with no padding // this can be changed freely (can be made a repository parameter) without adjusting // the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER}, as long as it stays under the value // of {@link #MAX_PACKET_LENGTH_IN_BYTES} @@ -132,7 +134,7 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry // which has a sufficiently long random name, enough to make it effectively unique in any given practical blob container final Random random = new Random(); this.metadataIdentifierGenerator = () -> { - byte[] randomMetadataName = new byte[METADATA_NAME_LENGTH_IN_BYTES]; + byte[] randomMetadataName = new byte[METADATA_UID_LENGTH_IN_BYTES]; random.nextBytes(randomMetadataName); return randomMetadataName; }; @@ -312,13 +314,12 @@ public BlobPath path() { public InputStream readBlob(String blobName) throws IOException { final InputStream encryptedDataInputStream = delegatedBlobContainer.readBlob(blobName); // read the metadata identifier (fixed length) which is prepended to the encrypted blob - final byte[] metadataIdentifier = encryptedDataInputStream.readNBytes(METADATA_NAME_LENGTH_IN_BYTES); - if (metadataIdentifier.length != METADATA_NAME_LENGTH_IN_BYTES) { + final byte[] metadataIdentifier = encryptedDataInputStream.readNBytes(METADATA_UID_LENGTH_IN_BYTES); + if (metadataIdentifier.length != METADATA_UID_LENGTH_IN_BYTES) { throw new IOException("Failure to read encrypted blob metadata identifier"); } - // the metadata blob name is simply the base64 encoding (URL safe) of the metadata identifier, - // inside a fixed root blob container - final String metadataBlobName = new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), + // the metadata blob name is the name of the data blob followed by the base64 encoding (URL safe) of the metadata identifier + final String metadataBlobName = blobName + new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), StandardCharsets.UTF_8); // read the encrypted metadata contents BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(metadataBlobName)); @@ -375,7 +376,7 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b // the metadata identifier is a sufficiently long random byte array so as to make it practically unique // the goal is to avoid overwriting metadata blobs even if the encrypted data blobs are overwritten final byte[] metadataIdentifier = metadataIdentifierGenerator.get(); - final String metadataBlobName = new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), + final String metadataBlobName = blobName + new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), StandardCharsets.UTF_8); // first write the encrypted metadata to a UNIQUE blob name try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { @@ -411,10 +412,19 @@ public DeleteResult delete() throws IOException { @Override public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOException { + Objects.requireNonNull(blobNames); // first delete the encrypted data blob delegatedBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); // then delete metadata - encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); + Set blobNamesSet = new HashSet<>(blobNames); + List metadataBlobsToDelete = new ArrayList<>(blobNames.size()); + for (String metadataBlobName : encryptionMetadataBlobContainer.listBlobs().keySet()) { + if (blobNamesSet.contains(metadataBlobName.substring(0, + metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS))) { + metadataBlobsToDelete.add(metadataBlobName); + } + } + encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(metadataBlobsToDelete); } @Override @@ -451,12 +461,39 @@ public Map listBlobsByPrefix(String blobNamePrefix) throws return delegatedBlobContainer.listBlobsByPrefix(blobNamePrefix); } - public void cleanUpOrphanedMetadata() throws IOException{ + private String readMetadataUidFromEncryptedBlob(String blobName) throws IOException { + try (InputStream encryptedDataInputStream = delegatedBlobContainer.readBlob(blobName)) { + // read the metadata identifier (fixed length) which is prepended to the encrypted blob + final byte[] metadataIdentifier = encryptedDataInputStream.readNBytes(METADATA_UID_LENGTH_IN_BYTES); + if (metadataIdentifier.length != METADATA_UID_LENGTH_IN_BYTES) { + throw new IOException("Failure to read encrypted blob metadata identifier"); + } + return new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), StandardCharsets.UTF_8); + } + } + + public void cleanUpOrphanedMetadata() throws IOException { // delete encryption metadata blobs which don't pair with any data blobs Set foundEncryptedBlobs = delegatedBlobContainer.listBlobs().keySet(); Set foundMetadataBlobs = encryptionMetadataBlobContainer.listBlobs().keySet(); - List orphanedMetadataBlobs = new ArrayList<>(foundMetadataBlobs); - orphanedMetadataBlobs.removeAll(foundEncryptedBlobs); + List orphanedMetadataBlobs = new ArrayList<>(); + Map> blobNameToMetadataNames = new HashMap<>(); + for (String metadataBlobName : foundMetadataBlobs) { + String blobName = metadataBlobName.substring(0, metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS); + blobNameToMetadataNames.computeIfAbsent(blobName, k -> new ArrayList<>()).add(metadataBlobName); + } + for (Map.Entry> blobAndMetadataName : blobNameToMetadataNames.entrySet()) { + if (false == foundEncryptedBlobs.contains(blobAndMetadataName.getKey())) { + orphanedMetadataBlobs.addAll(blobAndMetadataName.getValue()); + } else if (blobAndMetadataName.getValue().size() > 1) { + String metadataIdentifier = readMetadataUidFromEncryptedBlob(blobAndMetadataName.getKey()); + for (String metadataBlobName : blobAndMetadataName.getValue()) { + if (false == metadataBlobName.endsWith(metadataIdentifier)) { + orphanedMetadataBlobs.add(metadataBlobName); + } + } + } + } try { encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(orphanedMetadataBlobs); } catch (IOException e) { From 57ab5adb232a185522ac12dd3a9a6cb944db5136 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 4 Feb 2020 17:31:39 +0200 Subject: [PATCH 094/142] BlobEncryptionMetadata getInt bug --- .../repositories/encrypted/BlobEncryptionMetadata.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java index 92197da0dcc01..d765533f06a93 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java @@ -106,8 +106,8 @@ static BlobEncryptionMetadata deserializeMetadata(byte[] metadata, CheckedBiFunc throws IOException { byte[] authenticatedData = Arrays.copyOf(metadata, 2 * Integer.BYTES); ByteBuffer byteBuffer = ByteBuffer.wrap(authenticatedData).order(ByteOrder.LITTLE_ENDIAN); - int nonce = byteBuffer.get(0); - int packetLengthInBytes = byteBuffer.get(Integer.BYTES); + int nonce = byteBuffer.getInt(0); + int packetLengthInBytes = byteBuffer.getInt(Integer.BYTES); byte[] encryptedData = Arrays.copyOfRange(metadata, 2 * Integer.BYTES, metadata.length); final byte[] decryptedData; try { From 5d9d4c190c1d19c9960624068a7bd1069ee87951 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 4 Feb 2020 18:54:53 +0200 Subject: [PATCH 095/142] Fix delete and cleanup for unknown metadata --- .../encrypted/EncryptedRepository.java | 35 ++++++++++++------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 049efa17c20b7..c5b62e2442a98 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -419,8 +419,12 @@ public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOExce Set blobNamesSet = new HashSet<>(blobNames); List metadataBlobsToDelete = new ArrayList<>(blobNames.size()); for (String metadataBlobName : encryptionMetadataBlobContainer.listBlobs().keySet()) { - if (blobNamesSet.contains(metadataBlobName.substring(0, - metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS))) { + boolean invalidMetadataName = metadataBlobName.length() <= METADATA_UID_LENGTH_IN_CHARS; + if (invalidMetadataName) { + continue; + } + String blobName = metadataBlobName.substring(0, metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS); + if (blobNamesSet.contains(blobName)) { metadataBlobsToDelete.add(metadataBlobName); } } @@ -461,17 +465,6 @@ public Map listBlobsByPrefix(String blobNamePrefix) throws return delegatedBlobContainer.listBlobsByPrefix(blobNamePrefix); } - private String readMetadataUidFromEncryptedBlob(String blobName) throws IOException { - try (InputStream encryptedDataInputStream = delegatedBlobContainer.readBlob(blobName)) { - // read the metadata identifier (fixed length) which is prepended to the encrypted blob - final byte[] metadataIdentifier = encryptedDataInputStream.readNBytes(METADATA_UID_LENGTH_IN_BYTES); - if (metadataIdentifier.length != METADATA_UID_LENGTH_IN_BYTES) { - throw new IOException("Failure to read encrypted blob metadata identifier"); - } - return new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), StandardCharsets.UTF_8); - } - } - public void cleanUpOrphanedMetadata() throws IOException { // delete encryption metadata blobs which don't pair with any data blobs Set foundEncryptedBlobs = delegatedBlobContainer.listBlobs().keySet(); @@ -479,6 +472,11 @@ public void cleanUpOrphanedMetadata() throws IOException { List orphanedMetadataBlobs = new ArrayList<>(); Map> blobNameToMetadataNames = new HashMap<>(); for (String metadataBlobName : foundMetadataBlobs) { + boolean invalidMetadataName = metadataBlobName.length() <= METADATA_UID_LENGTH_IN_CHARS; + if (invalidMetadataName) { + orphanedMetadataBlobs.add(metadataBlobName); + continue; + } String blobName = metadataBlobName.substring(0, metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS); blobNameToMetadataNames.computeIfAbsent(blobName, k -> new ArrayList<>()).add(metadataBlobName); } @@ -512,6 +510,17 @@ public void cleanUpOrphanedMetadata() throws IOException { } } } + + private String readMetadataUidFromEncryptedBlob(String blobName) throws IOException { + try (InputStream encryptedDataInputStream = delegatedBlobContainer.readBlob(blobName)) { + // read the metadata identifier (fixed length) which is prepended to the encrypted blob + final byte[] metadataIdentifier = encryptedDataInputStream.readNBytes(METADATA_UID_LENGTH_IN_BYTES); + if (metadataIdentifier.length != METADATA_UID_LENGTH_IN_BYTES) { + throw new IOException("Failure to read encrypted blob metadata identifier"); + } + return new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), StandardCharsets.UTF_8); + } + } } private static String computeSaltedPBKDF2Hash(SecureRandom secureRandom, char[] password) { From 03a58ed13a965be4f31a4ca2612f7344403f434d Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 4 Feb 2020 19:47:53 +0200 Subject: [PATCH 096/142] Try make custom metadata not display to user --- .../repositories/encrypted/EncryptedRepository.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index c5b62e2442a98..63e169fe49e36 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -165,7 +165,9 @@ public Map adaptUserMetadata(Map userMetadata) { throw LicenseUtils.newComplianceException("encrypted snapshots"); } Map snapshotUserMetadata = new HashMap<>(); - snapshotUserMetadata.putAll(userMetadata); + if (userMetadata != null) { + snapshotUserMetadata.putAll(userMetadata); + } // pin down the salted hash of the repository password // this is then checked before every snapshot operation (i.e. {@link #snapshotShard} and {@link #finalizeSnapshot}) // to assure that all participating nodes in the snapshot have the same repository password set @@ -178,6 +180,9 @@ public void finalizeSnapshot(SnapshotId snapshotId, ShardGenerations shardGenera int totalShards, List shardFailures, long repositoryStateId, boolean includeGlobalState, MetaData clusterMetaData, Map userMetadata, boolean writeShardGens, ActionListener listener) { + if (userMetadata != null && userMetadata.containsKey(PASSWORD_HASH_RESERVED_USER_METADATA_KEY)) { + userMetadata.remove(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); + } validateRepositoryPasswordHash(userMetadata, listener::onFailure); super.finalizeSnapshot(snapshotId, shardGenerations, startTime, failure, totalShards, shardFailures, repositoryStateId, includeGlobalState, clusterMetaData, userMetadata, writeShardGens, listener); From 3ee957da67d3361ffb03b114ff2dba5030f80edd Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 4 Feb 2020 20:34:32 +0200 Subject: [PATCH 097/142] Fix children --- .../elasticsearch/common/blobstore/BlobPath.java | 14 ++++++++++++++ .../encrypted/EncryptedRepository.java | 9 +++++++-- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java index 51e30160ff1f4..6cea5e50dc978 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java @@ -25,6 +25,7 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.Objects; /** * The list of paths where a blob can reside. The contents of the paths are dependent upon the implementation of {@link BlobContainer}. @@ -99,4 +100,17 @@ public String toString() { } return sb.toString(); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BlobPath other = (BlobPath) o; + return Objects.equals(paths, other.paths); + } + + @Override + public int hashCode() { + return Objects.hash(paths); + } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 63e169fe49e36..e9e3a23baf153 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -180,10 +180,10 @@ public void finalizeSnapshot(SnapshotId snapshotId, ShardGenerations shardGenera int totalShards, List shardFailures, long repositoryStateId, boolean includeGlobalState, MetaData clusterMetaData, Map userMetadata, boolean writeShardGens, ActionListener listener) { + validateRepositoryPasswordHash(userMetadata, listener::onFailure); if (userMetadata != null && userMetadata.containsKey(PASSWORD_HASH_RESERVED_USER_METADATA_KEY)) { userMetadata.remove(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); } - validateRepositoryPasswordHash(userMetadata, listener::onFailure); super.finalizeSnapshot(snapshotId, shardGenerations, startTime, failure, totalShards, shardFailures, repositoryStateId, includeGlobalState, clusterMetaData, userMetadata, writeShardGens, listener); } @@ -453,6 +453,10 @@ public Map children() throws IOException { Map childEncryptedBlobContainers = delegatedBlobContainer.children(); Map result = new HashMap<>(childEncryptedBlobContainers.size()); for (Map.Entry encryptedBlobContainer : childEncryptedBlobContainers.entrySet()) { + if (encryptedBlobContainer.getValue().path().equals(encryptionMetadataBlobContainer.path())) { + // do not descend recursively into the metadata blob container itself + continue; + } // get an encrypted blob container for each result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainer(delegatedBlobStore, encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryption, @@ -557,7 +561,8 @@ private static String computeSaltedPBKDF2Hash(byte[] salt, char[] password) { private void validateRepositoryPasswordHash(Map snapshotUserMetadata, Consumer exception) { Object repositoryPasswordHash = snapshotUserMetadata.get(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); if (repositoryPasswordHash == null || (false == repositoryPasswordHash instanceof String)) { - exception.accept(new IllegalStateException("Snapshot metadata does not contain the repository password hash as a String")); + exception.accept(new RepositoryException(metadata.name(), "Unexpected fatal internal error", + new IllegalStateException("Snapshot metadata does not contain the repository password hash as a String"))); return; } if (false == passwordHashVerifier.verify((String) repositoryPasswordHash)) { From 82e8a890fe8111007f7f5bdd9202bdddf76f67c0 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 5 Feb 2020 23:29:10 +0200 Subject: [PATCH 098/142] missing encryption metadata handling --- .../common/blobstore/BlobPath.java | 7 +- .../common/blobstore/BlobPathTests.java | 11 --- .../encrypted/EncryptedRepository.java | 84 ++++++++++++++----- 3 files changed, 66 insertions(+), 36 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java index 6cea5e50dc978..b107eb4c9010a 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java @@ -63,10 +63,9 @@ public BlobPath add(String path) { return new BlobPath(Collections.unmodifiableList(paths)); } - public BlobPath prepend(String path) { - List paths = new ArrayList<>(this.paths.size() + 1); - paths.add(path); - paths.addAll(this.paths); + public BlobPath append(BlobPath otherPath) { + List paths = new ArrayList<>(this.paths); + paths.addAll(otherPath.paths); return new BlobPath(Collections.unmodifiableList(paths)); } diff --git a/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java b/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java index cfd505ba95466..48caddf31a505 100644 --- a/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java +++ b/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java @@ -39,15 +39,4 @@ public void testBuildAsString() { assertThat(path.buildAsString(), is("a/b/c/d/")); } - public void testPrepend() { - String pathComponent = randomAlphaOfLengthBetween(1, 3); - assertThat(new BlobPath().prepend(pathComponent).buildAsString(), is(new BlobPath().add(pathComponent).buildAsString())); - - BlobPath path = new BlobPath(); - path = path.add(pathComponent); - - String prependComponent = randomAlphaOfLengthBetween(1, 3); - assertThat(path.prepend(prependComponent).buildAsString(), - is(new BlobPath().add(prependComponent).add(pathComponent).buildAsString())); - } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index e9e3a23baf153..78399da54645f 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -122,7 +122,7 @@ public final class EncryptedRepository extends BlobStoreRepository { protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService, BlobStoreRepository delegatedRepository, char[] password) throws NoSuchAlgorithmException { - super(metadata, namedXContentRegistry, clusterService, delegatedRepository.basePath()); + super(metadata, namedXContentRegistry, clusterService, BlobPath.cleanPath()); this.delegatedRepository = delegatedRepository; this.dataEncryptionKeyGenerator = KeyGenerator.getInstance(EncryptedRepositoryPlugin.CIPHER_ALGO); this.dataEncryptionKeyGenerator.init(DATA_KEY_LENGTH_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); @@ -219,7 +219,7 @@ private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainer encrypted @Override protected BlobStore createBlobStore() { - return new EncryptedBlobStore(this.delegatedRepository.blobStore(), dataEncryptionKeyGenerator, metadataEncryption, + return new EncryptedBlobStore(delegatedRepository, dataEncryptionKeyGenerator, metadataEncryption, encryptionNonceGenerator, metadataIdentifierGenerator); } @@ -243,15 +243,17 @@ protected void doClose() { private static class EncryptedBlobStore implements BlobStore { private final BlobStore delegatedBlobStore; + private final BlobPath delegatedBasePath; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryption metadataEncryption; private final Supplier encryptionNonceGenerator; private final Supplier metadataIdentifierGenerator; - EncryptedBlobStore(BlobStore delegatedBlobStore, KeyGenerator dataEncryptionKeyGenerator, + EncryptedBlobStore(BlobStoreRepository delegatedBlobStoreRepository, KeyGenerator dataEncryptionKeyGenerator, PasswordBasedEncryption metadataEncryption, Supplier encryptionNonceGenerator, Supplier metadataIdentifierGenerator) { - this.delegatedBlobStore = delegatedBlobStore; + this.delegatedBlobStore = delegatedBlobStoreRepository.blobStore(); + this.delegatedBasePath = delegatedBlobStoreRepository.basePath(); this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryption = metadataEncryption; this.encryptionNonceGenerator = encryptionNonceGenerator; @@ -265,13 +267,15 @@ public void close() throws IOException { @Override public BlobContainer blobContainer(BlobPath path) { - return new EncryptedBlobContainer(delegatedBlobStore, path, dataEncryptionKeyGenerator, metadataEncryption, + return new EncryptedBlobContainer(delegatedBlobStore, delegatedBasePath, path, dataEncryptionKeyGenerator, metadataEncryption, encryptionNonceGenerator, metadataIdentifierGenerator); } } private static class EncryptedBlobContainer implements BlobContainer { private final BlobStore delegatedBlobStore; + private final BlobPath delegatedBasePath; + private final BlobPath path; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryption metadataEncryption; private final Supplier encryptionNonceGenerator; @@ -279,16 +283,19 @@ private static class EncryptedBlobContainer implements BlobContainer { private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; - EncryptedBlobContainer(BlobStore delegatedBlobStore, BlobPath path, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryption metadataEncryption, Supplier encryptionNonceGenerator, - Supplier metadataIdentifierGenerator) { + EncryptedBlobContainer(BlobStore delegatedBlobStore, BlobPath delegatedBasePath, BlobPath path, + KeyGenerator dataEncryptionKeyGenerator, PasswordBasedEncryption metadataEncryption, + Supplier encryptionNonceGenerator, Supplier metadataIdentifierGenerator) { this.delegatedBlobStore = delegatedBlobStore; + this.delegatedBasePath = delegatedBasePath; + this.path = path; this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; this.metadataEncryption = metadataEncryption; this.encryptionNonceGenerator = encryptionNonceGenerator; this.metadataIdentifierGenerator = metadataIdentifierGenerator; - this.delegatedBlobContainer = delegatedBlobStore.blobContainer(path); - this.encryptionMetadataBlobContainer = delegatedBlobStore.blobContainer(path.prepend(ENCRYPTION_METADATA_ROOT)); + this.delegatedBlobContainer = delegatedBlobStore.blobContainer(delegatedBasePath.append(path)); + this.encryptionMetadataBlobContainer = + delegatedBlobStore.blobContainer(delegatedBasePath.add(ENCRYPTION_METADATA_ROOT).append(path)); } /** @@ -302,7 +309,7 @@ private static class EncryptedBlobContainer implements BlobContainer { */ @Override public BlobPath path() { - return delegatedBlobContainer.path(); + return path; } /** @@ -411,7 +418,12 @@ public DeleteResult delete() throws IOException { // first delete the encrypted data blob DeleteResult deleteResult = delegatedBlobContainer.delete(); // then delete metadata - encryptionMetadataBlobContainer.delete(); + try { + encryptionMetadataBlobContainer.delete(); + } catch (IOException e) { + // the encryption metadata blob container might not exist at all + logger.warn("Failure to delete metadata blob container " + encryptionMetadataBlobContainer.path(), e); + } return deleteResult; } @@ -423,7 +435,15 @@ public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOExce // then delete metadata Set blobNamesSet = new HashSet<>(blobNames); List metadataBlobsToDelete = new ArrayList<>(blobNames.size()); - for (String metadataBlobName : encryptionMetadataBlobContainer.listBlobs().keySet()) { + final Set allMetadataBlobs; + try { + allMetadataBlobs = encryptionMetadataBlobContainer.listBlobs().keySet(); + } catch (IOException e) { + // the encryption metadata blob container might not exist at all + logger.warn("Failure to list blobs of metadata blob container " + encryptionMetadataBlobContainer.path(), e); + return; + } + for (String metadataBlobName : allMetadataBlobs) { boolean invalidMetadataName = metadataBlobName.length() <= METADATA_UID_LENGTH_IN_CHARS; if (invalidMetadataName) { continue; @@ -433,7 +453,12 @@ public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOExce metadataBlobsToDelete.add(metadataBlobName); } } - encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(metadataBlobsToDelete); + try { + encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(metadataBlobsToDelete); + } catch (IOException e) { + logger.warn("Failure to delete metadata blobs " + metadataBlobsToDelete + " from blob container " + + encryptionMetadataBlobContainer.path(), e); + } } @Override @@ -457,9 +482,10 @@ public Map children() throws IOException { // do not descend recursively into the metadata blob container itself continue; } - // get an encrypted blob container for each - result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainer(delegatedBlobStore, - encryptedBlobContainer.getValue().path(), dataEncryptionKeyGenerator, metadataEncryption, + // get an encrypted blob container for each child + // Note that the encryption metadata blob container might be missing + result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainer(delegatedBlobStore, delegatedBasePath, + path.add(encryptedBlobContainer.getKey()), dataEncryptionKeyGenerator, metadataEncryption, encryptionNonceGenerator, metadataIdentifierGenerator)); } return result; @@ -477,10 +503,18 @@ public Map listBlobsByPrefix(String blobNamePrefix) throws public void cleanUpOrphanedMetadata() throws IOException { // delete encryption metadata blobs which don't pair with any data blobs Set foundEncryptedBlobs = delegatedBlobContainer.listBlobs().keySet(); - Set foundMetadataBlobs = encryptionMetadataBlobContainer.listBlobs().keySet(); + final Set foundMetadataBlobs; + try { + foundMetadataBlobs = encryptionMetadataBlobContainer.listBlobs().keySet(); + } catch (IOException e) { + logger.warn("Failure to list blobs of metadata blob container " + encryptionMetadataBlobContainer.path(), e); + return; + } List orphanedMetadataBlobs = new ArrayList<>(); Map> blobNameToMetadataNames = new HashMap<>(); for (String metadataBlobName : foundMetadataBlobs) { + // also remove unrecognized blobs in the metadata blob container (mainly because it's tedious in the general + // case to tell between bogus and legit stale metadata, and it would require reading the blobs, which is not worth it) boolean invalidMetadataName = metadataBlobName.length() <= METADATA_UID_LENGTH_IN_CHARS; if (invalidMetadataName) { orphanedMetadataBlobs.add(metadataBlobName); @@ -504,17 +538,25 @@ public void cleanUpOrphanedMetadata() throws IOException { try { encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(orphanedMetadataBlobs); } catch (IOException e) { - logger.warn("Failure to delete orphaned metadata blobs " + orphanedMetadataBlobs, e); + logger.warn("Failure to delete orphaned metadata blobs " + orphanedMetadataBlobs + " from blob container " + + encryptionMetadataBlobContainer.path(), e); } // delete encryption metadata blob containers which don't pair with any data blob containers Set foundEncryptedBlobContainers = delegatedBlobContainer.children().keySet(); - Map foundMetadataBlobContainers = encryptionMetadataBlobContainer.children(); + final Map foundMetadataBlobContainers; + try { + foundMetadataBlobContainers = encryptionMetadataBlobContainer.children(); + } catch (IOException e) { + logger.warn("Failure to list child blob containers for metadata blob container " + encryptionMetadataBlobContainer.path(), + e); + return; + } for (Map.Entry metadataBlobContainer : foundMetadataBlobContainers.entrySet()) { if (false == foundEncryptedBlobContainers.contains(metadataBlobContainer.getKey())) { try { metadataBlobContainer.getValue().delete(); } catch (IOException e) { - logger.warn("Exception while deleting orphaned metadata blob container [" + metadataBlobContainer + "]", e); + logger.warn("Failure to delete orphaned metadata blob container " + metadataBlobContainer.getValue().path(), e); } } } From b994d28cd105611122fbed06f8a912bd7017d2bd Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 6 Feb 2020 19:56:03 +0200 Subject: [PATCH 099/142] Checkstyle --- .../encrypted/BlobEncryptionMetadata.java | 13 ------------- .../repositories/encrypted/EncryptedRepository.java | 3 ++- .../encrypted/EncryptedRepositoryPlugin.java | 3 --- 3 files changed, 2 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java index d765533f06a93..25ee44d71e7aa 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/BlobEncryptionMetadata.java @@ -6,28 +6,15 @@ package org.elasticsearch.repositories.encrypted; -import org.elasticsearch.Version; import org.elasticsearch.common.CheckedBiFunction; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; import java.util.Objects; -import java.util.function.BiFunction; -import java.util.function.Function; - /** * Holds the necessary, and sufficient, metadata required to decrypt the associated encrypted blob. diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 78399da54645f..910f52c07385d 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -139,7 +139,8 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry return randomMetadataName; }; // the salted password hash for this encrypted repository password, on the local node (this is constant) - this.repositoryPasswordSaltedHash = computeSaltedPBKDF2Hash(SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO), password); + this.repositoryPasswordSaltedHash = computeSaltedPBKDF2Hash(SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO), + password); // used to verify that the salted password hash in the snapshot metadata matches up with the repository password on the local node this.passwordHashVerifier = new HashVerifier(password); } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 9bca8a7da6cd5..30a65ecadd21b 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.ConsistentSettingsService; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; @@ -26,12 +25,10 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.xpack.core.XPackPlugin; -import java.security.SecureRandom; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.Function; public final class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin { From 2e144538737384698b8aa225611f3758e791883a Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 6 Feb 2020 20:10:46 +0200 Subject: [PATCH 100/142] Checkstyle --- .../repositories/encrypted/EncryptedRepository.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 910f52c07385d..9d89a13ed019f 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Randomness; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; @@ -132,7 +133,7 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry this.encryptionNonceGenerator = () -> secureRandom.nextInt(); // the metadata used to decrypt the encrypted blob resides in a different blob, one for every encrypted blob, // which has a sufficiently long random name, enough to make it effectively unique in any given practical blob container - final Random random = new Random(); + final Random random = Randomness.get(); this.metadataIdentifierGenerator = () -> { byte[] randomMetadataName = new byte[METADATA_UID_LENGTH_IN_BYTES]; random.nextBytes(randomMetadataName); From 4aaf895943097ac3c8255a140a97747b95c47f77 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 11 Feb 2020 14:50:39 +0200 Subject: [PATCH 101/142] writeSnapshotIndexLatestBlob --- .../blobstore/BlobStoreRepository.java | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 053a832604fc6..14d48b14dac4f 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -1321,14 +1321,8 @@ public void onFailure(Exception e) { writeAtomic(indexBlob, BytesReference.bytes(filteredRepositoryData.snapshotsToXContent(XContentFactory.jsonBuilder(), writeShardGens)), true); // write the current generation to the index-latest file - final BytesReference genBytes; - try (BytesStreamOutput bStream = new BytesStreamOutput()) { - bStream.writeLong(newGen); - genBytes = bStream.bytes(); - } logger.debug("Repository [{}] updating index.latest with generation [{}]", metadata.name(), newGen); - - writeAtomic(INDEX_LATEST_BLOB, genBytes, false); + writeSnapshotIndexLatestBlob(newGen); // Step 3: Update CS to reflect new repository generation. clusterService.submitStateUpdateTask("set safe repository generation [" + metadata.name() + "][" + newGen + "]", @@ -1419,11 +1413,21 @@ long latestIndexBlobId() throws IOException { } } - // package private for testing - long readSnapshotIndexLatestBlob() throws IOException { + // protected for tests and to allow subclasses to override + protected long readSnapshotIndexLatestBlob() throws IOException { return Numbers.bytesToLong(Streams.readFully(blobContainer().readBlob(INDEX_LATEST_BLOB)).toBytesRef()); } + // protected to allow subclasses to override + protected void writeSnapshotIndexLatestBlob(long newGen) throws IOException { + final BytesReference genBytes; + try (BytesStreamOutput bStream = new BytesStreamOutput()) { + bStream.writeLong(newGen); + genBytes = bStream.bytes(); + } + writeAtomic(INDEX_LATEST_BLOB, genBytes, false); + } + private long listBlobsToGetLatestIndexId() throws IOException { return latestGeneration(blobContainer().listBlobsByPrefix(INDEX_FILE_PREFIX).keySet()); } From 8a9fe5d3a019478d6035095085273ce4672c3a20 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 11 Feb 2020 16:59:28 +0200 Subject: [PATCH 102/142] Write index.latest un-encrypted --- .../encrypted/EncryptedRepository.java | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 9d89a13ed019f..818632d6d2524 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Numbers; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; @@ -21,6 +22,7 @@ import org.elasticsearch.common.blobstore.DeleteResult; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; @@ -243,6 +245,25 @@ protected void doClose() { this.delegatedRepository.close(); } + @Override + protected long readSnapshotIndexLatestBlob() throws IOException { + EncryptedBlobContainer encryptedBlobContainer = (EncryptedBlobContainer) blobContainer(); + return Numbers.bytesToLong(Streams.readFully(encryptedBlobContainer.delegatedBlobContainer.readBlob(INDEX_LATEST_BLOB)).toBytesRef()); + } + + @Override + protected void writeSnapshotIndexLatestBlob(long newGen) throws IOException { + final BytesReference genBytes; + try (BytesStreamOutput bStream = new BytesStreamOutput()) { + bStream.writeLong(newGen); + genBytes = bStream.bytes(); + } + EncryptedBlobContainer encryptedBlobContainer = (EncryptedBlobContainer) blobContainer(); + try (InputStream stream = genBytes.streamInput()) { + encryptedBlobContainer.delegatedBlobContainer.writeBlobAtomic(INDEX_LATEST_BLOB, stream, genBytes.length(), false); + } + } + private static class EncryptedBlobStore implements BlobStore { private final BlobStore delegatedBlobStore; private final BlobPath delegatedBasePath; From 80e536cc3089862a81acdedcbb5b5c8f230b19ba Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 11 Feb 2020 17:09:14 +0200 Subject: [PATCH 103/142] Keep all metadata blobs until the data blob is deleted --- .../encrypted/EncryptedRepository.java | 25 ++----------------- 1 file changed, 2 insertions(+), 23 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 818632d6d2524..01f2be0f2a699 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -534,7 +534,6 @@ public void cleanUpOrphanedMetadata() throws IOException { return; } List orphanedMetadataBlobs = new ArrayList<>(); - Map> blobNameToMetadataNames = new HashMap<>(); for (String metadataBlobName : foundMetadataBlobs) { // also remove unrecognized blobs in the metadata blob container (mainly because it's tedious in the general // case to tell between bogus and legit stale metadata, and it would require reading the blobs, which is not worth it) @@ -544,18 +543,8 @@ public void cleanUpOrphanedMetadata() throws IOException { continue; } String blobName = metadataBlobName.substring(0, metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS); - blobNameToMetadataNames.computeIfAbsent(blobName, k -> new ArrayList<>()).add(metadataBlobName); - } - for (Map.Entry> blobAndMetadataName : blobNameToMetadataNames.entrySet()) { - if (false == foundEncryptedBlobs.contains(blobAndMetadataName.getKey())) { - orphanedMetadataBlobs.addAll(blobAndMetadataName.getValue()); - } else if (blobAndMetadataName.getValue().size() > 1) { - String metadataIdentifier = readMetadataUidFromEncryptedBlob(blobAndMetadataName.getKey()); - for (String metadataBlobName : blobAndMetadataName.getValue()) { - if (false == metadataBlobName.endsWith(metadataIdentifier)) { - orphanedMetadataBlobs.add(metadataBlobName); - } - } + if (false == foundEncryptedBlobs.contains(blobName)) { + orphanedMetadataBlobs.add(metadataBlobName); } } try { @@ -585,16 +574,6 @@ public void cleanUpOrphanedMetadata() throws IOException { } } - private String readMetadataUidFromEncryptedBlob(String blobName) throws IOException { - try (InputStream encryptedDataInputStream = delegatedBlobContainer.readBlob(blobName)) { - // read the metadata identifier (fixed length) which is prepended to the encrypted blob - final byte[] metadataIdentifier = encryptedDataInputStream.readNBytes(METADATA_UID_LENGTH_IN_BYTES); - if (metadataIdentifier.length != METADATA_UID_LENGTH_IN_BYTES) { - throw new IOException("Failure to read encrypted blob metadata identifier"); - } - return new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), StandardCharsets.UTF_8); - } - } } private static String computeSaltedPBKDF2Hash(SecureRandom secureRandom, char[] password) { From 60c4ad9ae487bae5b6bf4b50f1c7317e6cf1d134 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 12 Feb 2020 19:47:50 +0200 Subject: [PATCH 104/142] IntegTests in progress --- ...eCloudStorageBlobStoreRepositoryTests.java | 2 +- .../blobstore/BlobStoreTestUtil.java | 4 +- .../ESBlobStoreRepositoryIntegTestCase.java | 14 +-- ...ESMockAPIBasedRepositoryIntegTestCase.java | 2 +- .../encrypted/EncryptedRepository.java | 57 +++++++----- .../encrypted/EncryptedRepositoryPlugin.java | 11 +-- ...ncryptedBlobStoreRepositoryIntegTests.java | 86 +++++++++++++++++++ .../LocalStateEncryptedRepositoryPlugin.java | 31 +++++++ 8 files changed, 172 insertions(+), 35 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedBlobStoreRepositoryIntegTests.java create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/LocalStateEncryptedRepositoryPlugin.java diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 0333d96510397..757df361bf3cf 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -114,7 +114,7 @@ protected Settings nodeSettings(int nodeOrdinal) { } public void testDeleteSingleItem() { - final String repoName = createRepository(randomName()); + final String repoName = createRepository(randomRepositoryName()); final RepositoriesService repositoriesService = internalCluster().getMasterNodeInstance(RepositoriesService.class); final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repoName); PlainActionFuture.get(f -> repository.threadPool().generic().execute(ActionRunnable.run(f, () -> diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java index 54ea0f26dcd0a..6c0c8ebb6bb5f 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java @@ -116,8 +116,8 @@ public static void assertConsistency(BlobStoreRepository repository, Executor ex executor.execute(ActionRunnable.run(listener, () -> { final BlobContainer blobContainer = repository.blobContainer(); final long latestGen; - try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob("index.latest"))) { - latestGen = inputStream.readLong(); + try { + latestGen = repository.readSnapshotIndexLatestBlob(); } catch (NoSuchFileException e) { throw new AssertionError("Could not find index.latest blob for repo [" + repository + "]"); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 6ae627972b62c..291d30f4ce7c8 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -262,7 +262,7 @@ protected static void writeBlob(BlobContainer container, String blobName, BytesA } protected BlobStore newBlobStore() { - final String repository = createRepository(randomName()); + final String repository = createRepository(randomRepositoryName()); final BlobStoreRepository blobStoreRepository = (BlobStoreRepository) internalCluster().getMasterNodeInstance(RepositoriesService.class).repository(repository); return PlainActionFuture.get( @@ -270,7 +270,7 @@ protected BlobStore newBlobStore() { } public void testSnapshotAndRestore() throws Exception { - final String repoName = createRepository(randomName()); + final String repoName = createRepository(randomRepositoryName()); int indexCount = randomIntBetween(1, 5); int[] docCounts = new int[indexCount]; String[] indexNames = generateRandomNames(indexCount); @@ -341,7 +341,7 @@ public void testSnapshotAndRestore() throws Exception { } public void testMultipleSnapshotAndRollback() throws Exception { - final String repoName = createRepository(randomName()); + final String repoName = createRepository(randomRepositoryName()); int iterationCount = randomIntBetween(2, 5); int[] docCounts = new int[iterationCount]; String indexName = randomName(); @@ -396,7 +396,7 @@ public void testMultipleSnapshotAndRollback() throws Exception { } public void testIndicesDeletedFromRepository() throws Exception { - final String repoName = createRepository("test-repo"); + final String repoName = createRepository(randomRepositoryName()); Client client = client(); createIndex("test-idx-1", "test-idx-2", "test-idx-3"); ensureGreen(); @@ -493,7 +493,11 @@ private static void assertSuccessfulRestore(RestoreSnapshotResponse response) { assertThat(response.getRestoreInfo().successfulShards(), equalTo(response.getRestoreInfo().totalShards())); } - protected static String randomName() { + protected String randomName() { + return randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + } + + protected String randomRepositoryName() { return randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); } } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index ce36027b81379..ce953389d0f4a 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -123,7 +123,7 @@ public void tearDownHttpServer() { * Test the snapshot and restore of an index which has large segments files. */ public final void testSnapshotWithLargeSegmentFiles() throws Exception { - final String repository = createRepository(randomName()); + final String repository = createRepository(randomRepositoryName()); final String index = "index-no-merges"; createIndex(index, Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 01f2be0f2a699..23e321bbd9f45 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoryCleanupResult; import org.elasticsearch.repositories.RepositoryException; @@ -111,12 +112,13 @@ public final class EncryptedRepository extends BlobStoreRepository { // Because data encryption keys are generated randomly (see {@link #dataEncryptionKey}) the nonce in this case can be a constant value. // But it is not a constant for reasons of greater robustness (future code changes might assume that the nonce is really a nonce), and // to allow that the encryption IV (which is part of the ciphertext) be checked for ACCIDENTAL tampering without attempting decryption - private final Supplier encryptionNonceGenerator; + private final ThreadLocal> encryptionNonceGenerator; // the metadata is stored in a separate blob so that when the metadata is regenerated (for example, rencrypting it after the repository // password is changed) it will not incur updating the encrypted blob, but only recreating a new metadata blob. // However, the encrypted blob is prepended a fixed length identifier which is used to locate the corresponding metadata. // This identifier is fixed, so it will not change when the metadata is recreated. - private final Supplier metadataIdentifierGenerator; + private final ThreadLocal> metadataIdentifierGenerator; + private final Supplier licenseStateSupplier; // the salted hash of this repository's password on the local node. The password is fixed for the lifetime of the repository. private final String repositoryPasswordSaltedHash; // this is used to check that the salted hash of the repository password on the node that started the snapshot matches up with the @@ -124,23 +126,35 @@ public final class EncryptedRepository extends BlobStoreRepository { private final HashVerifier passwordHashVerifier; protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService, - BlobStoreRepository delegatedRepository, char[] password) throws NoSuchAlgorithmException { + BlobStoreRepository delegatedRepository, Supplier licenseStateSupplier, + char[] password) throws NoSuchAlgorithmException { super(metadata, namedXContentRegistry, clusterService, BlobPath.cleanPath()); this.delegatedRepository = delegatedRepository; this.dataEncryptionKeyGenerator = KeyGenerator.getInstance(EncryptedRepositoryPlugin.CIPHER_ALGO); this.dataEncryptionKeyGenerator.init(DATA_KEY_LENGTH_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); this.metadataEncryption = new PasswordBasedEncryption(password, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); - final SecureRandom secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); // data encryption uses random "nonce"s although currently a constant would be just as secure - this.encryptionNonceGenerator = () -> secureRandom.nextInt(); + this.encryptionNonceGenerator = ThreadLocal.withInitial(() -> { + final SecureRandom secureRandom; + try { + secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); + } catch (NoSuchAlgorithmException e) { + throw new IllegalStateException("Unexpected exception creating SecureRandom instance for [" + + EncryptedRepositoryPlugin.RAND_ALGO + "]", e); + } + return (Supplier) () -> secureRandom.nextInt(); + }); // the metadata used to decrypt the encrypted blob resides in a different blob, one for every encrypted blob, // which has a sufficiently long random name, enough to make it effectively unique in any given practical blob container - final Random random = Randomness.get(); - this.metadataIdentifierGenerator = () -> { - byte[] randomMetadataName = new byte[METADATA_UID_LENGTH_IN_BYTES]; - random.nextBytes(randomMetadataName); - return randomMetadataName; - }; + this.metadataIdentifierGenerator = ThreadLocal.withInitial(() -> { + final Random random = Randomness.get(); + return (Supplier) () -> { + byte[] randomMetadataName = new byte[METADATA_UID_LENGTH_IN_BYTES]; + random.nextBytes(randomMetadataName); + return randomMetadataName; + }; + }); + this.licenseStateSupplier = licenseStateSupplier; // the salted password hash for this encrypted repository password, on the local node (this is constant) this.repositoryPasswordSaltedHash = computeSaltedPBKDF2Hash(SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO), password); @@ -165,7 +179,7 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry public Map adaptUserMetadata(Map userMetadata) { // because populating the snapshot metadata must be done before the actual snapshot is first initialized, // we take the opportunity to validate the license and abort if non-compliant - if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed()) { + if (false == licenseStateSupplier.get().isEncryptedSnapshotAllowed()) { throw LicenseUtils.newComplianceException("encrypted snapshots"); } Map snapshotUserMetadata = new HashMap<>(); @@ -269,12 +283,12 @@ private static class EncryptedBlobStore implements BlobStore { private final BlobPath delegatedBasePath; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryption metadataEncryption; - private final Supplier encryptionNonceGenerator; - private final Supplier metadataIdentifierGenerator; + private final ThreadLocal> encryptionNonceGenerator; + private final ThreadLocal> metadataIdentifierGenerator; EncryptedBlobStore(BlobStoreRepository delegatedBlobStoreRepository, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryption metadataEncryption, Supplier encryptionNonceGenerator, - Supplier metadataIdentifierGenerator) { + PasswordBasedEncryption metadataEncryption, ThreadLocal> encryptionNonceGenerator, + ThreadLocal> metadataIdentifierGenerator) { this.delegatedBlobStore = delegatedBlobStoreRepository.blobStore(); this.delegatedBasePath = delegatedBlobStoreRepository.basePath(); this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; @@ -301,14 +315,15 @@ private static class EncryptedBlobContainer implements BlobContainer { private final BlobPath path; private final KeyGenerator dataEncryptionKeyGenerator; private final PasswordBasedEncryption metadataEncryption; - private final Supplier encryptionNonceGenerator; - private final Supplier metadataIdentifierGenerator; + private final ThreadLocal> encryptionNonceGenerator; + private final ThreadLocal> metadataIdentifierGenerator; private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; EncryptedBlobContainer(BlobStore delegatedBlobStore, BlobPath delegatedBasePath, BlobPath path, KeyGenerator dataEncryptionKeyGenerator, PasswordBasedEncryption metadataEncryption, - Supplier encryptionNonceGenerator, Supplier metadataIdentifierGenerator) { + ThreadLocal> encryptionNonceGenerator, + ThreadLocal> metadataIdentifierGenerator) { this.delegatedBlobStore = delegatedBlobStore; this.delegatedBasePath = delegatedBasePath; this.path = path; @@ -398,7 +413,7 @@ public InputStream readBlob(String blobName) throws IOException { @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { final SecretKey dataEncryptionKey = dataEncryptionKeyGenerator.generateKey(); - final int nonce = encryptionNonceGenerator.get(); + final int nonce = encryptionNonceGenerator.get().get(); // this is the metadata required to decrypt back the (soon to be) encrypted blob BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(nonce, PACKET_LENGTH_IN_BYTES, dataEncryptionKey); // encrypt the metadata @@ -410,7 +425,7 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } // the metadata identifier is a sufficiently long random byte array so as to make it practically unique // the goal is to avoid overwriting metadata blobs even if the encrypted data blobs are overwritten - final byte[] metadataIdentifier = metadataIdentifierGenerator.get(); + final byte[] metadataIdentifier = metadataIdentifierGenerator.get().get(); final String metadataBlobName = blobName + new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), StandardCharsets.UTF_8); // first write the encrypted metadata to a UNIQUE blob name diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 30a65ecadd21b..fdc17029d7e06 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -31,7 +31,8 @@ import java.util.Map; import java.util.function.Function; -public final class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin { +// not-final for tests +public class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin { static final Logger logger = LogManager.getLogger(EncryptedRepositoryPlugin.class); static final String REPOSITORY_TYPE_NAME = "encrypted"; @@ -41,10 +42,10 @@ public final class EncryptedRepositoryPlugin extends Plugin implements Repositor "password", key -> SecureSetting.secureString(key, null, Setting.Property.Consistent)); static final Setting DELEGATE_TYPE = new Setting<>("delegate_type", "", Function.identity()); - protected static XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } + protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } public EncryptedRepositoryPlugin(Settings settings) { - if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed()) { + if (false == getLicenseState().isEncryptedSnapshotAllowed()) { logger.warn("Encrypted snapshot repositories are not allowed for the current license." + "Snapshotting to any encrypted repository is not permitted and will fail.", LicenseUtils.newComplianceException(EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME + " snapshot repository")); @@ -76,7 +77,7 @@ public Repository create(RepositoryMetaData metadata) { @Override public Repository create(RepositoryMetaData metaData, Function typeLookup) throws Exception { - if (false == EncryptedRepositoryPlugin.getLicenseState().isEncryptedSnapshotAllowed()) { + if (false == getLicenseState().isEncryptedSnapshotAllowed()) { logger.warn("Encrypted snapshots are not allowed for the currently installed license." + "Snapshots to the [" + metaData.name() + "] encrypted repository are not permitted." + "All the other operations, including restore, are still permitted.", @@ -102,7 +103,7 @@ public Repository create(RepositoryMetaData metaData, Function getLicenseState(), repositoryPassword); } }); } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedBlobStoreRepositoryIntegTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedBlobStoreRepositoryIntegTests.java new file mode 100644 index 0000000000000..51cc190b27717 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedBlobStoreRepositoryIntegTests.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase; +import org.elasticsearch.repositories.fs.FsRepository; +import org.junit.BeforeClass; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +public class EncryptedBlobStoreRepositoryIntegTests extends ESBlobStoreRepositoryIntegTestCase { + + private static List repositoryNames; + + @BeforeClass + private static void preGenerateRepositoryNames() { + repositoryNames = Collections.synchronizedList(randomList(100, 100, () -> "passwordPassword")); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + final MockSecureSettings secureSettings = new MockSecureSettings(); + for (String repositoryName : repositoryNames) { + secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. + getConcreteSettingForNamespace(repositoryName).getKey(), randomAlphaOfLength(10)); + } + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial") + .setSecureSettings(secureSettings) + .build(); + } + + @Override + protected String randomRepositoryName() { + return repositoryNames.remove(0); + } + + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateEncryptedRepositoryPlugin.class); + } + + + @Override + protected String repositoryType() { + return EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME; + } + + @Override + protected Settings repositorySettings() { + final Settings.Builder settings = Settings.builder(); + settings.put(super.repositorySettings()); + settings.put("location", randomRepoPath()); + settings.put(EncryptedRepositoryPlugin.DELEGATE_TYPE.getKey(), FsRepository.TYPE); + if (randomBoolean()) { + long size = 1 << randomInt(10); + settings.put("chunk_size", new ByteSizeValue(size, ByteSizeUnit.KB)); + } + return settings.build(); + } +} diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/LocalStateEncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/LocalStateEncryptedRepositoryPlugin.java new file mode 100644 index 0000000000000..cd1cae28ffd28 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/LocalStateEncryptedRepositoryPlugin.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; + +import java.nio.file.Path; + +public class LocalStateEncryptedRepositoryPlugin extends LocalStateCompositeXPackPlugin { + + final EncryptedRepositoryPlugin encryptedRepositoryPlugin; + + public LocalStateEncryptedRepositoryPlugin(final Settings settings, final Path configPath) throws Exception { + super(settings, configPath); + LocalStateEncryptedRepositoryPlugin thisVar = this; + + encryptedRepositoryPlugin = new EncryptedRepositoryPlugin(settings) { + @Override + protected XPackLicenseState getLicenseState() { + return thisVar.getLicenseState(); + } + }; + plugins.add(encryptedRepositoryPlugin); + } + +} From 7953174e43c3a956a62709e2b095257c595de920 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 13 Feb 2020 17:58:50 +0200 Subject: [PATCH 105/142] Yey! EncryptedFS tests!!! --- .../ESBlobStoreRepositoryIntegTestCase.java | 6 ++++- .../DecryptionPacketsInputStream.java | 6 +++++ .../EncryptionPacketsInputStream.java | 6 +++++ ...yptedFSBlobStoreRepositoryIntegTests.java} | 24 ++++++++++++++++--- 4 files changed, 38 insertions(+), 4 deletions(-) rename x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/{EncryptedBlobStoreRepositoryIntegTests.java => EncryptedFSBlobStoreRepositoryIntegTests.java} (75%) diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 291d30f4ce7c8..b776c749f1fb5 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -175,7 +175,7 @@ public void testList() throws IOException { BlobMetaData blobMetaData = blobs.get(generated.getKey()); assertThat(generated.getKey(), blobMetaData, CoreMatchers.notNullValue()); assertThat(blobMetaData.name(), CoreMatchers.equalTo(generated.getKey())); - assertThat(blobMetaData.length(), CoreMatchers.equalTo(generated.getValue())); + assertThat(blobLengthFromDiskLength(blobMetaData), CoreMatchers.equalTo(generated.getValue())); } assertThat(container.listBlobsByPrefix("foo-").size(), CoreMatchers.equalTo(numberOfFooBlobs)); @@ -185,6 +185,10 @@ public void testList() throws IOException { } } + protected long blobLengthFromDiskLength(BlobMetaData blobMetaData) { + return blobMetaData.length(); + } + public void testDeleteBlobs() throws IOException { try (BlobStore store = newBlobStore()) { final List blobNames = Arrays.asList("foobar", "barfoo"); diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index 26c980c9a9884..7ccecf801ebb5 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -123,6 +123,12 @@ public void reset() throws IOException { throw new IOException("Mark/reset not supported"); } + @Override + public void close() throws IOException { + super.close(); + source.close(); + } + private int decrypt(PrefixInputStream packetInputStream) throws IOException { // read only the IV prefix into the packet buffer int ivLength = packetInputStream.readNBytes(packetBuffer, 0, GCM_IV_LENGTH_IN_BYTES); diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 556759a831d5a..27178d07ac534 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -171,6 +171,12 @@ public void reset() throws IOException { } } + @Override + public void close() throws IOException { + super.close(); + source.close(); + } + private static Cipher getPacketEncryptionCipher(SecretKey secretKey, byte[] packetIv) throws IOException { GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(EncryptedRepository.GCM_TAG_LENGTH_IN_BYTES * Byte.SIZE, packetIv); try { diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedBlobStoreRepositoryIntegTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java similarity index 75% rename from x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedBlobStoreRepositoryIntegTests.java rename to x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java index 51cc190b27717..e7c2b39b79500 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedBlobStoreRepositoryIntegTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java @@ -18,28 +18,35 @@ */ package org.elasticsearch.repositories.encrypted; +import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.license.LicenseService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase; import org.elasticsearch.repositories.fs.FsRepository; import org.junit.BeforeClass; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; -public class EncryptedBlobStoreRepositoryIntegTests extends ESBlobStoreRepositoryIntegTestCase { +public class EncryptedFSBlobStoreRepositoryIntegTests extends ESBlobStoreRepositoryIntegTestCase { private static List repositoryNames; @BeforeClass private static void preGenerateRepositoryNames() { - repositoryNames = Collections.synchronizedList(randomList(100, 100, () -> "passwordPassword")); + List names = new ArrayList<>(); + for (int i = 0; i < 32; i++) { + names.add("test-repo-" + i); + } + repositoryNames = Collections.synchronizedList(names); } @Override @@ -47,7 +54,7 @@ protected Settings nodeSettings(int nodeOrdinal) { final MockSecureSettings secureSettings = new MockSecureSettings(); for (String repositoryName : repositoryNames) { secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. - getConcreteSettingForNamespace(repositoryName).getKey(), randomAlphaOfLength(10)); + getConcreteSettingForNamespace(repositoryName).getKey(), "passwordPassword"); } return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) @@ -61,6 +68,16 @@ protected String randomRepositoryName() { return repositoryNames.remove(0); } + protected long blobLengthFromDiskLength(BlobMetaData blobMetaData) { + if (BlobStoreRepository.INDEX_LATEST_BLOB.equals(blobMetaData.name())) { + // index.latest is not encrypted, hence the size on disk is equal to the content + return blobMetaData.length(); + } else { + return DecryptionPacketsInputStream.getDecryptionLength(blobMetaData.length() - + EncryptedRepository.METADATA_UID_LENGTH_IN_BYTES, EncryptedRepository.PACKET_LENGTH_IN_BYTES); + } + } + protected Collection> nodePlugins() { return Arrays.asList(LocalStateEncryptedRepositoryPlugin.class); } @@ -83,4 +100,5 @@ protected Settings repositorySettings() { } return settings.build(); } + } From 9f7f4b44d25a3e6c4caab887e78cbdc2c5051e18 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 14 Feb 2020 16:26:55 +0200 Subject: [PATCH 106/142] Trimmings --- .../encrypted/EncryptedRepository.java | 119 ++++++++---------- .../encrypted/PasswordBasedEncryption.java | 4 +- 2 files changed, 57 insertions(+), 66 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 23e321bbd9f45..49383fbaab945 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -57,7 +57,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -75,8 +74,8 @@ public final class EncryptedRepository extends BlobStoreRepository { static final int DATA_KEY_LENGTH_IN_BITS = 256; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 8 << 20; // 8MB - static final int METADATA_UID_LENGTH_IN_BYTES = 18; // 16 bits is the UUIDS length; 18 is the next multiple for Base64 encoding - static final int METADATA_UID_LENGTH_IN_CHARS = 24; // base64 encoding with no padding + static final int METADATA_UID_LENGTH_IN_BYTES = 9; // the length of unique tag appended to the metadata blob name to render it unique + static final int METADATA_UID_LENGTH_IN_CHARS = 12; // base64 encoding with no padding // this can be changed freely (can be made a repository parameter) without adjusting // the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER}, as long as it stays under the value // of {@link #MAX_PACKET_LENGTH_IN_BYTES} @@ -104,20 +103,20 @@ public final class EncryptedRepository extends BlobStoreRepository { // this is the repository instance to which all blob reads and writes are forwarded to private final BlobStoreRepository delegatedRepository; // every data blob is encrypted with its randomly generated AES key (this is the "Data Encryption Key") - private final KeyGenerator dataEncryptionKeyGenerator; + private final Supplier dataEncryptionKeySupplier; // the {@link PasswordBasedEncryption} is used to encrypt (and decrypt) the data encryption key and the other associated metadata // the metadata encryption is based on AES keys which are generated from the repository password private final PasswordBasedEncryption metadataEncryption; // Data blob encryption requires a "nonce", only if the SAME data encryption key is used for several data blobs. // Because data encryption keys are generated randomly (see {@link #dataEncryptionKey}) the nonce in this case can be a constant value. - // But it is not a constant for reasons of greater robustness (future code changes might assume that the nonce is really a nonce), and - // to allow that the encryption IV (which is part of the ciphertext) be checked for ACCIDENTAL tampering without attempting decryption - private final ThreadLocal> encryptionNonceGenerator; + // But it is not a constant for reasons of greater robustness, and to allow that the encryption IV (which is part of the ciphertext) + // be inspected for ACCIDENTAL tampering without attempting decryption + private final Supplier encryptionNonceSupplier; // the metadata is stored in a separate blob so that when the metadata is regenerated (for example, rencrypting it after the repository - // password is changed) it will not incur updating the encrypted blob, but only recreating a new metadata blob. - // However, the encrypted blob is prepended a fixed length identifier which is used to locate the corresponding metadata. - // This identifier is fixed, so it will not change when the metadata is recreated. - private final ThreadLocal> metadataIdentifierGenerator; + // password is changed) it does not incur updates to the encrypted blob, but only recreating a new metadata blob. + // However, the encrypted blob's contents is prepended a fixed length identifier which is used to locate the corresponding metadata. + // This identifier is static for a given encrypted blob, i.e. it will not change when the metadata is recreated. + private final Supplier metadataIdentifierSupplier; private final Supplier licenseStateSupplier; // the salted hash of this repository's password on the local node. The password is fixed for the lifetime of the repository. private final String repositoryPasswordSaltedHash; @@ -130,30 +129,22 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry char[] password) throws NoSuchAlgorithmException { super(metadata, namedXContentRegistry, clusterService, BlobPath.cleanPath()); this.delegatedRepository = delegatedRepository; - this.dataEncryptionKeyGenerator = KeyGenerator.getInstance(EncryptedRepositoryPlugin.CIPHER_ALGO); - this.dataEncryptionKeyGenerator.init(DATA_KEY_LENGTH_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); + KeyGenerator dataEncryptionKeyGenerator = KeyGenerator.getInstance(EncryptedRepositoryPlugin.CIPHER_ALGO); + dataEncryptionKeyGenerator.init(DATA_KEY_LENGTH_IN_BITS, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); + this.dataEncryptionKeySupplier = () -> dataEncryptionKeyGenerator.generateKey(); this.metadataEncryption = new PasswordBasedEncryption(password, SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO)); - // data encryption uses random "nonce"s although currently a constant would be just as secure - this.encryptionNonceGenerator = ThreadLocal.withInitial(() -> { - final SecureRandom secureRandom; - try { - secureRandom = SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO); - } catch (NoSuchAlgorithmException e) { - throw new IllegalStateException("Unexpected exception creating SecureRandom instance for [" + - EncryptedRepositoryPlugin.RAND_ALGO + "]", e); - } - return (Supplier) () -> secureRandom.nextInt(); - }); - // the metadata used to decrypt the encrypted blob resides in a different blob, one for every encrypted blob, - // which has a sufficiently long random name, enough to make it effectively unique in any given practical blob container - this.metadataIdentifierGenerator = ThreadLocal.withInitial(() -> { - final Random random = Randomness.get(); - return (Supplier) () -> { - byte[] randomMetadataName = new byte[METADATA_UID_LENGTH_IN_BYTES]; - random.nextBytes(randomMetadataName); - return randomMetadataName; - }; - }); + // data encryption uses a random "nonce"s, although currently a constant "nonce" would be just as secure (because the data + // encryption key is randomly generated, using a {@code SecureRandom}, so there is no risk of reusing the same key with the same IV) + // don't use a {@code SecureRandom} though, it would be an unnecessary entropy drain + this.encryptionNonceSupplier = () -> Randomness.get().nextInt(); + // the metadata used to decrypt the encrypted blob resides in a different blob, one for each encrypted data blob + // the metadata blob name is formed from the encrypted data blob name by appending a random tag, enough to make it unique + // in the unusual case of data blob overwrite (encrypted data overwrite does not imply metadata overwrite) + this.metadataIdentifierSupplier = () -> { + byte[] randomMetadataNameTag = new byte[METADATA_UID_LENGTH_IN_BYTES]; + Randomness.get().nextBytes(randomMetadataNameTag); + return randomMetadataNameTag; + }; this.licenseStateSupplier = licenseStateSupplier; // the salted password hash for this encrypted repository password, on the local node (this is constant) this.repositoryPasswordSaltedHash = computeSaltedPBKDF2Hash(SecureRandom.getInstance(EncryptedRepositoryPlugin.RAND_ALGO), @@ -237,8 +228,8 @@ private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainer encrypted @Override protected BlobStore createBlobStore() { - return new EncryptedBlobStore(delegatedRepository, dataEncryptionKeyGenerator, metadataEncryption, - encryptionNonceGenerator, metadataIdentifierGenerator); + return new EncryptedBlobStore(delegatedRepository, dataEncryptionKeySupplier, metadataEncryption, + encryptionNonceSupplier, metadataIdentifierSupplier); } @Override @@ -279,22 +270,22 @@ protected void writeSnapshotIndexLatestBlob(long newGen) throws IOException { } private static class EncryptedBlobStore implements BlobStore { + private final BlobStore delegatedBlobStore; private final BlobPath delegatedBasePath; - private final KeyGenerator dataEncryptionKeyGenerator; + private final Supplier dataEncryptionKeySupplier; private final PasswordBasedEncryption metadataEncryption; - private final ThreadLocal> encryptionNonceGenerator; - private final ThreadLocal> metadataIdentifierGenerator; - - EncryptedBlobStore(BlobStoreRepository delegatedBlobStoreRepository, KeyGenerator dataEncryptionKeyGenerator, - PasswordBasedEncryption metadataEncryption, ThreadLocal> encryptionNonceGenerator, - ThreadLocal> metadataIdentifierGenerator) { + private final Supplier encryptionNonceSupplier; + private final Supplier metadataIdentifierSupplier; + EncryptedBlobStore(BlobStoreRepository delegatedBlobStoreRepository, Supplier dataEncryptionKeySupplier, + PasswordBasedEncryption metadataEncryption, Supplier encryptionNonceSupplier, + Supplier metadataIdentifierSupplier) { this.delegatedBlobStore = delegatedBlobStoreRepository.blobStore(); this.delegatedBasePath = delegatedBlobStoreRepository.basePath(); - this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; + this.dataEncryptionKeySupplier = dataEncryptionKeySupplier; this.metadataEncryption = metadataEncryption; - this.encryptionNonceGenerator = encryptionNonceGenerator; - this.metadataIdentifierGenerator = metadataIdentifierGenerator; + this.encryptionNonceSupplier = encryptionNonceSupplier; + this.metadataIdentifierSupplier = metadataIdentifierSupplier; } @Override @@ -304,33 +295,32 @@ public void close() throws IOException { @Override public BlobContainer blobContainer(BlobPath path) { - return new EncryptedBlobContainer(delegatedBlobStore, delegatedBasePath, path, dataEncryptionKeyGenerator, metadataEncryption, - encryptionNonceGenerator, metadataIdentifierGenerator); + return new EncryptedBlobContainer(delegatedBlobStore, delegatedBasePath, path, dataEncryptionKeySupplier, metadataEncryption, + encryptionNonceSupplier, metadataIdentifierSupplier); } - } + } private static class EncryptedBlobContainer implements BlobContainer { + private final BlobStore delegatedBlobStore; private final BlobPath delegatedBasePath; private final BlobPath path; - private final KeyGenerator dataEncryptionKeyGenerator; + private final Supplier dataEncryptionKeySupplier; private final PasswordBasedEncryption metadataEncryption; - private final ThreadLocal> encryptionNonceGenerator; - private final ThreadLocal> metadataIdentifierGenerator; + private final Supplier encryptionNonceSupplier; + private final Supplier metadataIdentifierSupplier; private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; - EncryptedBlobContainer(BlobStore delegatedBlobStore, BlobPath delegatedBasePath, BlobPath path, - KeyGenerator dataEncryptionKeyGenerator, PasswordBasedEncryption metadataEncryption, - ThreadLocal> encryptionNonceGenerator, - ThreadLocal> metadataIdentifierGenerator) { + Supplier dataEncryptionKeySupplier, PasswordBasedEncryption metadataEncryption, + Supplier encryptionNonceSupplier, Supplier metadataIdentifierSupplier) { this.delegatedBlobStore = delegatedBlobStore; this.delegatedBasePath = delegatedBasePath; this.path = path; - this.dataEncryptionKeyGenerator = dataEncryptionKeyGenerator; + this.dataEncryptionKeySupplier = dataEncryptionKeySupplier; this.metadataEncryption = metadataEncryption; - this.encryptionNonceGenerator = encryptionNonceGenerator; - this.metadataIdentifierGenerator = metadataIdentifierGenerator; + this.encryptionNonceSupplier = encryptionNonceSupplier; + this.metadataIdentifierSupplier = metadataIdentifierSupplier; this.delegatedBlobContainer = delegatedBlobStore.blobContainer(delegatedBasePath.append(path)); this.encryptionMetadataBlobContainer = delegatedBlobStore.blobContainer(delegatedBasePath.add(ENCRYPTION_METADATA_ROOT).append(path)); @@ -412,8 +402,8 @@ public InputStream readBlob(String blobName) throws IOException { */ @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { - final SecretKey dataEncryptionKey = dataEncryptionKeyGenerator.generateKey(); - final int nonce = encryptionNonceGenerator.get().get(); + final SecretKey dataEncryptionKey = dataEncryptionKeySupplier.get(); + final int nonce = encryptionNonceSupplier.get(); // this is the metadata required to decrypt back the (soon to be) encrypted blob BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(nonce, PACKET_LENGTH_IN_BYTES, dataEncryptionKey); // encrypt the metadata @@ -425,7 +415,7 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } // the metadata identifier is a sufficiently long random byte array so as to make it practically unique // the goal is to avoid overwriting metadata blobs even if the encrypted data blobs are overwritten - final byte[] metadataIdentifier = metadataIdentifierGenerator.get().get(); + final byte[] metadataIdentifier = metadataIdentifierSupplier.get(); final String metadataBlobName = blobName + new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), StandardCharsets.UTF_8); // first write the encrypted metadata to a UNIQUE blob name @@ -523,8 +513,8 @@ public Map children() throws IOException { // get an encrypted blob container for each child // Note that the encryption metadata blob container might be missing result.put(encryptedBlobContainer.getKey(), new EncryptedBlobContainer(delegatedBlobStore, delegatedBasePath, - path.add(encryptedBlobContainer.getKey()), dataEncryptionKeyGenerator, metadataEncryption, - encryptionNonceGenerator, metadataIdentifierGenerator)); + path.add(encryptedBlobContainer.getKey()), dataEncryptionKeySupplier, metadataEncryption, + encryptionNonceSupplier, metadataIdentifierSupplier)); } return result; } @@ -588,7 +578,6 @@ public void cleanUpOrphanedMetadata() throws IOException { } } } - } private static String computeSaltedPBKDF2Hash(SecureRandom secureRandom, char[] password) { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryption.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryption.java index 46a2eb96c2c37..86c3a24171a9b 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryption.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/PasswordBasedEncryption.java @@ -53,6 +53,8 @@ * randomly generates a new 12-byte wide IV, and so in order to limit the risk of a collision, the key must be changed * after at most {@link #ENCRYPT_INVOKE_LIMIT_USING_SAME_KEY} IVs have been generated and used with that same key. For more * details, see Section 8.2 of https://csrc.nist.gov/publications/detail/sp/800-38d/final . + *

+ * {@code PasswordBasedEncryption} objects are safe for use by multiple concurrent threads. */ public final class PasswordBasedEncryption { @@ -85,7 +87,7 @@ public final class PasswordBasedEncryption { // are generated using a different salt, which is generated randomly) private final char[] password; // this is used to generate the IVs for each encryption instance as well as the salt for every key generation - private final SecureRandom secureRandom; + private final SecureRandom secureRandom; // this is thread-safe // this is used to store the secret keys given the salt that was used in generating it private final Cache> keyBySaltCache; // the salt of the secret key which is used for encryption From 19a78286f6e7c6f47c38b4e5b0c232239a027542 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 14 Feb 2020 16:45:40 +0200 Subject: [PATCH 107/142] checkstyle --- .../blobstore/BlobStoreTestUtil.java | 1 - .../encrypted/EncryptedRepository.java | 4 ++-- ...ryptedFSBlobStoreRepositoryIntegTests.java | 19 +++---------------- 3 files changed, 5 insertions(+), 19 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java index 6c0c8ebb6bb5f..f35f6e8dfb8e9 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java @@ -52,7 +52,6 @@ import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.threadpool.ThreadPool; -import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.NoSuchFileException; diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 49383fbaab945..73b16f61a2f1c 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -3,7 +3,6 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - package org.elasticsearch.repositories.encrypted; import org.apache.logging.log4j.LogManager; @@ -253,7 +252,8 @@ protected void doClose() { @Override protected long readSnapshotIndexLatestBlob() throws IOException { EncryptedBlobContainer encryptedBlobContainer = (EncryptedBlobContainer) blobContainer(); - return Numbers.bytesToLong(Streams.readFully(encryptedBlobContainer.delegatedBlobContainer.readBlob(INDEX_LATEST_BLOB)).toBytesRef()); + return Numbers.bytesToLong(Streams.readFully(encryptedBlobContainer.delegatedBlobContainer.readBlob(INDEX_LATEST_BLOB)) + .toBytesRef()); } @Override diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java index e7c2b39b79500..4b85855f50fd6 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java @@ -1,20 +1,7 @@ /* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.repositories.encrypted; From 63c685553c0ea13aacee96e1f6d65e46c9f8ecd7 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sat, 15 Feb 2020 18:07:53 +0200 Subject: [PATCH 108/142] invert stale-surviving semantics --- .../blobstore/BlobStoreRepository.java | 98 ++++++++++--------- 1 file changed, 50 insertions(+), 48 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 14d48b14dac4f..fd876ef298950 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -593,11 +593,14 @@ private void doDeleteShardSnapshots(SnapshotId snapshotId, long repositoryStateI } } - private void asyncCleanupUnlinkedRootAndIndicesBlobs(Map foundIndices, Map rootBlobs, + private void asyncCleanupUnlinkedRootAndIndicesBlobs(Map foundIndices, Map foundRootBlobs, RepositoryData updatedRepoData, ActionListener listener) { + final Set survivingIndexIds = getSurvivingIndexIds(updatedRepoData, foundIndices.keySet()); + final Set survivingRootBlobNames = getSurvivingRootBlobNames(updatedRepoData, foundRootBlobs.keySet()); threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(ActionRunnable.wrap( listener, - l -> cleanupStaleBlobs(foundIndices, rootBlobs, updatedRepoData, ActionListener.map(l, ignored -> null)))); + l -> cleanupStaleBlobs(foundIndices, survivingIndexIds, foundRootBlobs, survivingRootBlobNames, + ActionListener.map(l, ignored -> null)))); } private void asyncCleanupUnlinkedShardLevelBlobs(SnapshotId snapshotId, Collection deleteResults, @@ -714,16 +717,11 @@ private List resolveFilesToDelete(SnapshotId snapshotId, Collection foundIndices, Map rootBlobs, - RepositoryData newRepoData, ActionListener listener) { + void cleanupStaleBlobs(Map foundIndices, + Set survivingIndexIds, Map foundRootBlobs, + Set survivingRootBlobNames, ActionListener listener) { final GroupedActionListener groupedListener = new GroupedActionListener<>(ActionListener.wrap(deleteResults -> { DeleteResult deleteResult = DeleteResult.ZERO; for (DeleteResult result : deleteResults) { @@ -734,11 +732,10 @@ private void cleanupStaleBlobs(Map foundIndices, Map { - List deletedBlobs = cleanupStaleRootFiles(staleRootBlobs(newRepoData, rootBlobs.keySet())); - return new DeleteResult(deletedBlobs.size(), deletedBlobs.stream().mapToLong(name -> rootBlobs.get(name).length()).sum()); + List deletedBlobs = cleanupStaleRootFiles(foundRootBlobs, survivingRootBlobNames); + return new DeleteResult(deletedBlobs.size(), deletedBlobs.stream().mapToLong(name -> foundRootBlobs.get(name).length()).sum()); })); - final Set survivingIndexIds = newRepoData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); executor.execute(ActionRunnable.supply(groupedListener, () -> cleanupStaleIndices(foundIndices, survivingIndexIds))); } @@ -759,57 +756,62 @@ public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListen if (isReadOnly()) { throw new RepositoryException(metadata.name(), "cannot run cleanup on readonly repository"); } - Map rootBlobs = blobContainer().listBlobs(); - final RepositoryData repositoryData = safeRepositoryData(repositoryStateId, rootBlobs); + Map foundRootBlobs = blobContainer().listBlobs(); + final RepositoryData repositoryData = safeRepositoryData(repositoryStateId, foundRootBlobs); final Map foundIndices = blobStore().blobContainer(indicesPath()).children(); - final Set survivingIndexIds = - repositoryData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); - final List staleRootBlobs = staleRootBlobs(repositoryData, rootBlobs.keySet()); - if (survivingIndexIds.equals(foundIndices.keySet()) && staleRootBlobs.isEmpty()) { + final Set survivingIndexIds = getSurvivingIndexIds(repositoryData, foundIndices.keySet()); + final Set survivingRootBlobNames = getSurvivingRootBlobNames(repositoryData, foundRootBlobs.keySet()); + if (survivingIndexIds.containsAll(foundIndices.keySet()) && survivingRootBlobNames.containsAll(foundRootBlobs.keySet())) { // Nothing to clean up we return listener.onResponse(new RepositoryCleanupResult(DeleteResult.ZERO)); } else { // write new index-N blob to ensure concurrent operations will fail writeIndexGen(repositoryData, repositoryStateId, writeShardGens, - ActionListener.wrap(v -> cleanupStaleBlobs(foundIndices, rootBlobs, repositoryData, - ActionListener.map(listener, RepositoryCleanupResult::new)), listener::onFailure)); + ActionListener.wrap(v -> cleanupStaleBlobs(foundIndices, survivingIndexIds, foundRootBlobs, survivingRootBlobNames, + ActionListener.map(listener, RepositoryCleanupResult::new)), listener::onFailure)); + } } catch (Exception e) { listener.onFailure(e); } } - // Finds all blobs directly under the repository root path that are not referenced by the current RepositoryData - private List staleRootBlobs(RepositoryData repositoryData, Set rootBlobNames) { - final Set allSnapshotIds = - repositoryData.getSnapshotIds().stream().map(SnapshotId::getUUID).collect(Collectors.toSet()); - return rootBlobNames.stream().filter( - blob -> { - if (FsBlobContainer.isTempBlobName(blob)) { - return true; - } - if (blob.endsWith(".dat")) { - final String foundUUID; - if (blob.startsWith(SNAPSHOT_PREFIX)) { - foundUUID = blob.substring(SNAPSHOT_PREFIX.length(), blob.length() - ".dat".length()); - assert snapshotFormat.blobName(foundUUID).equals(blob); - } else if (blob.startsWith(METADATA_PREFIX)) { - foundUUID = blob.substring(METADATA_PREFIX.length(), blob.length() - ".dat".length()); - assert globalMetaDataFormat.blobName(foundUUID).equals(blob); - } else { + Set getSurvivingIndexIds(RepositoryData repositoryData, Set foundIndexIds) { + Set survivingIndexIds = repositoryData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); + return foundIndexIds.stream().filter(survivingIndexIds::contains).collect(Collectors.toSet()); + } + + Set getSurvivingRootBlobNames(RepositoryData repositoryData, Set foundRootBlobNames) { + Set survivingSnapshotIds = repositoryData.getSnapshotIds().stream().map(SnapshotId::getUUID).collect(Collectors.toSet()); + return foundRootBlobNames.stream().filter( + blob -> { + if (FsBlobContainer.isTempBlobName(blob)) { return false; + } else if (blob.endsWith(".dat")) { + final String foundUUID; + if (blob.startsWith(SNAPSHOT_PREFIX)) { + foundUUID = blob.substring(SNAPSHOT_PREFIX.length(), blob.length() - ".dat".length()); + assert snapshotFormat.blobName(foundUUID).equals(blob); + } else if (blob.startsWith(METADATA_PREFIX)) { + foundUUID = blob.substring(METADATA_PREFIX.length(), blob.length() - ".dat".length()); + assert globalMetaDataFormat.blobName(foundUUID).equals(blob); + } else { + return true; + } + return survivingSnapshotIds.contains(foundUUID); + } else if (blob.startsWith(INDEX_FILE_PREFIX)) { + // TODO: Include the current generation here once we remove keeping index-(N-1) around from #writeIndexGen + return repositoryData.getGenId() <= Long.parseLong(blob.substring(INDEX_FILE_PREFIX.length())); + } else { + return true; } - return allSnapshotIds.contains(foundUUID) == false; - } else if (blob.startsWith(INDEX_FILE_PREFIX)) { - // TODO: Include the current generation here once we remove keeping index-(N-1) around from #writeIndexGen - return repositoryData.getGenId() > Long.parseLong(blob.substring(INDEX_FILE_PREFIX.length())); } - return false; - } - ).collect(Collectors.toList()); + ).collect(Collectors.toSet()); } - private List cleanupStaleRootFiles(List blobsToDelete) { + private List cleanupStaleRootFiles(Map foundRootBlobs, Set survivingRootBlobNames) { + List blobsToDelete = new ArrayList<>(foundRootBlobs.keySet()); + blobsToDelete.removeAll(survivingRootBlobNames); if (blobsToDelete.isEmpty()) { return blobsToDelete; } From b593d66d67095fd072c4bea23350f47ceeb00bc5 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 16 Feb 2020 13:54:06 +0200 Subject: [PATCH 109/142] Cleanup WIP --- .../blobstore/BlobStoreRepository.java | 8 +- .../encrypted/EncryptedRepository.java | 104 +++++++++++++++++- 2 files changed, 102 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index fd876ef298950..0dafe5f70c081 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -215,7 +215,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp private final CounterMetric restoreRateLimitingTimeInNanos = new CounterMetric(); - private final ChecksumBlobStoreFormat globalMetaDataFormat; + protected final ChecksumBlobStoreFormat globalMetaDataFormat; private final ChecksumBlobStoreFormat indexMetaDataFormat; @@ -508,7 +508,7 @@ public void deleteSnapshot(SnapshotId snapshotId, long repositoryStateId, boolea * @param rootBlobs Blobs at the repository root * @return RepositoryData */ - private RepositoryData safeRepositoryData(long repositoryStateId, Map rootBlobs) { + protected RepositoryData safeRepositoryData(long repositoryStateId, Map rootBlobs) { final long generation = latestGeneration(rootBlobs.keySet()); final long genToLoad; if (bestEffortConsistency) { @@ -756,7 +756,7 @@ public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListen if (isReadOnly()) { throw new RepositoryException(metadata.name(), "cannot run cleanup on readonly repository"); } - Map foundRootBlobs = blobContainer().listBlobs(); + final Map foundRootBlobs = blobContainer().listBlobs(); final RepositoryData repositoryData = safeRepositoryData(repositoryStateId, foundRootBlobs); final Map foundIndices = blobStore().blobContainer(indicesPath()).children(); final Set survivingIndexIds = getSurvivingIndexIds(repositoryData, foundIndices.keySet()); @@ -969,7 +969,7 @@ public IndexMetaData getSnapshotIndexMetaData(final SnapshotId snapshotId, final } } - private BlobPath indicesPath() { + protected BlobPath indicesPath() { return basePath().add("indices"); } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 73b16f61a2f1c..d88048b1b2e71 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexCommit; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.StepListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.service.ClusterService; @@ -19,6 +20,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.fs.FsBlobContainer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -30,6 +32,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoryCleanupResult; +import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.ShardGenerations; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; @@ -53,6 +56,7 @@ import java.util.Base64; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -60,6 +64,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Supplier; +import java.util.stream.Collectors; public final class EncryptedRepository extends BlobStoreRepository { static final Logger logger = LogManager.getLogger(EncryptedRepository.class); @@ -207,11 +212,98 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s @Override public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListener listener) { - super.cleanup(repositoryStateId, writeShardGens, ActionListener.wrap(repositoryCleanupResult -> { - EncryptedBlobContainer encryptedBlobContainer = (EncryptedBlobContainer) blobContainer(); - cleanUpOrphanedMetadataRecursively(encryptedBlobContainer); - listener.onResponse(repositoryCleanupResult); - }, listener::onFailure)); + if (isReadOnly()) { + listener.onFailure(new RepositoryException(metadata.name(), "cannot run cleanup on readonly repository")); + return; + } + StepListener cleanupDelegatedRepositoryStep = new StepListener<>(); + super.cleanup(repositoryStateId, writeShardGens, cleanupDelegatedRepositoryStep); + cleanupDelegatedRepositoryStep.whenComplete(delegatedRepositoryCleanupResult -> { + RepositoryData repositoryData = safeRepositoryData(repositoryStateId, blobContainer().listBlobs()); + // list all encryption metadata indices blob container + Map staleMetadataIndices = + ((EncryptedBlobContainer) blobStore().blobContainer(indicesPath())).encryptionMetadataBlobContainer.children(); + repositoryData.getIndices().values().stream().map(IndexId::getId).forEach(survivingIndexId -> { + // stale blob containers are those which are not surviving + staleMetadataIndices.remove(survivingIndexId); + }); + // list all encryption metadata root blobs + Map staleRootMetadataBlobs = + ((EncryptedBlobContainer) blobContainer()).encryptionMetadataBlobContainer.listBlobs(); + Iterator> it = staleRootMetadataBlobs.entrySet().iterator(); +// staleRootMetadataBlobs.entrySet().removeIf(rootMetadata -> { +// String metadataBlobName = rootMetadata.getKey(); +// // unrecognized metadata blob, do NOT remove +// if (metadataBlobName.length() <= METADATA_UID_LENGTH_IN_CHARS) { +// return false; +// } +// String blobName = metadataBlobName.substring(0, metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS); +// }); +// while (it.hasNext()) { +// String metadataBlobName = it.next().getKey(); +// // unrecognized metadata blob, do NOT remove +// if (metadataBlobName.length() < METADATA_UID_LENGTH_IN_CHARS) { +// it.remove(); +// } else { +// String blobName = metadataBlobName.substring(0, metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS); +// if (FsBlobContainer.isTempBlobName(blobName)) { +// // this must be removed +// } else if (blobName.endsWith(".dat")) { +// final String foundUUID; +// if (blobName.startsWith(SNAPSHOT_PREFIX)) { +// foundUUID = blobName.substring(SNAPSHOT_PREFIX.length(), blobName.length() - ".dat".length()); +// assert snapshotFormat.blobName(foundUUID).equals(blobName); +// } else if (blobName.startsWith(METADATA_PREFIX)) { +// foundUUID = blobName.substring(METADATA_PREFIX.length(), blobName.length() - ".dat".length()); +// assert globalMetaDataFormat.blobName(foundUUID).equals(blobName); +// } else { +// +// return true; +// } +// return survivingSnapshotIds.contains(foundUUID); +// +// } +// } +// } +// Set repositoryData.getSnapshotIds().stream().map(SnapshotId::getUUID).collect(Collectors.toSet()); +// return foundRootBlobNames.stream().filter( +// blob -> { +// if (FsBlobContainer.isTempBlobName(blob)) { +// return false; +// } else if (blob.endsWith(".dat")) { +// final String foundUUID; +// if (blob.startsWith(SNAPSHOT_PREFIX)) { +// foundUUID = blob.substring(SNAPSHOT_PREFIX.length(), blob.length() - ".dat".length()); +// assert snapshotFormat.blobName(foundUUID).equals(blob); +// } else if (blob.startsWith(METADATA_PREFIX)) { +// foundUUID = blob.substring(METADATA_PREFIX.length(), blob.length() - ".dat".length()); +// assert globalMetaDataFormat.blobName(foundUUID).equals(blob); +// } else { +// return true; +// } +// return survivingSnapshotIds.contains(foundUUID); +// } else if (blob.startsWith(INDEX_FILE_PREFIX)) { +// // TODO: Include the current generation here once we remove keeping index-(N-1) around from #writeIndexGen +// return repositoryData.getGenId() <= Long.parseLong(blob.substring(INDEX_FILE_PREFIX.length())); +// } else { +// return true; +// } +// } +// ).collect(Collectors.toSet()); +// final Set survivingRootBlobNames = getSurvivingRootBlobNames(repositoryData, foundRootBlobs.keySet()); +// if (survivingIndexIds.containsAll(foundIndices.keySet()) && survivingRootBlobNames.containsAll(foundRootBlobs.keySet())) { +// // Nothing to clean up we return +// listener.onResponse(new RepositoryCleanupResult(DeleteResult.ZERO)); +// } else { +// // write new index-N blob to ensure concurrent operations will fail +// writeIndexGen(repositoryData, repositoryStateId, writeShardGens, +// ActionListener.wrap(v -> cleanupStaleBlobs(foundIndices, survivingIndexIds, foundRootBlobs, survivingRootBlobNames, +// ActionListener.map(listener, RepositoryCleanupResult::new)), listener::onFailure)); +// +// } + + }, listener::onFailure); + } private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainer encryptedBlobContainer) throws IOException{ @@ -298,8 +390,8 @@ public BlobContainer blobContainer(BlobPath path) { return new EncryptedBlobContainer(delegatedBlobStore, delegatedBasePath, path, dataEncryptionKeySupplier, metadataEncryption, encryptionNonceSupplier, metadataIdentifierSupplier); } - } + private static class EncryptedBlobContainer implements BlobContainer { private final BlobStore delegatedBlobStore; From 30d4f58252fd3814ced82823620a1b246a69107d Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 16 Feb 2020 13:56:02 +0200 Subject: [PATCH 110/142] Revert "invert stale-surviving semantics" This reverts commit 63c685553c0ea13aacee96e1f6d65e46c9f8ecd7. --- .../blobstore/BlobStoreRepository.java | 98 +++++++++---------- 1 file changed, 48 insertions(+), 50 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 0dafe5f70c081..6443781cc5efc 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -593,14 +593,11 @@ private void doDeleteShardSnapshots(SnapshotId snapshotId, long repositoryStateI } } - private void asyncCleanupUnlinkedRootAndIndicesBlobs(Map foundIndices, Map foundRootBlobs, + private void asyncCleanupUnlinkedRootAndIndicesBlobs(Map foundIndices, Map rootBlobs, RepositoryData updatedRepoData, ActionListener listener) { - final Set survivingIndexIds = getSurvivingIndexIds(updatedRepoData, foundIndices.keySet()); - final Set survivingRootBlobNames = getSurvivingRootBlobNames(updatedRepoData, foundRootBlobs.keySet()); threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(ActionRunnable.wrap( listener, - l -> cleanupStaleBlobs(foundIndices, survivingIndexIds, foundRootBlobs, survivingRootBlobNames, - ActionListener.map(l, ignored -> null)))); + l -> cleanupStaleBlobs(foundIndices, rootBlobs, updatedRepoData, ActionListener.map(l, ignored -> null)))); } private void asyncCleanupUnlinkedShardLevelBlobs(SnapshotId snapshotId, Collection deleteResults, @@ -717,11 +714,16 @@ private List resolveFilesToDelete(SnapshotId snapshotId, Collection foundIndices, - Set survivingIndexIds, Map foundRootBlobs, - Set survivingRootBlobNames, ActionListener listener) { + private void cleanupStaleBlobs(Map foundIndices, Map rootBlobs, + RepositoryData newRepoData, ActionListener listener) { final GroupedActionListener groupedListener = new GroupedActionListener<>(ActionListener.wrap(deleteResults -> { DeleteResult deleteResult = DeleteResult.ZERO; for (DeleteResult result : deleteResults) { @@ -732,10 +734,11 @@ void cleanupStaleBlobs(Map foundIndices, final Executor executor = threadPool.executor(ThreadPool.Names.SNAPSHOT); executor.execute(ActionRunnable.supply(groupedListener, () -> { - List deletedBlobs = cleanupStaleRootFiles(foundRootBlobs, survivingRootBlobNames); - return new DeleteResult(deletedBlobs.size(), deletedBlobs.stream().mapToLong(name -> foundRootBlobs.get(name).length()).sum()); + List deletedBlobs = cleanupStaleRootFiles(staleRootBlobs(newRepoData, rootBlobs.keySet())); + return new DeleteResult(deletedBlobs.size(), deletedBlobs.stream().mapToLong(name -> rootBlobs.get(name).length()).sum()); })); + final Set survivingIndexIds = newRepoData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); executor.execute(ActionRunnable.supply(groupedListener, () -> cleanupStaleIndices(foundIndices, survivingIndexIds))); } @@ -756,62 +759,57 @@ public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListen if (isReadOnly()) { throw new RepositoryException(metadata.name(), "cannot run cleanup on readonly repository"); } - final Map foundRootBlobs = blobContainer().listBlobs(); - final RepositoryData repositoryData = safeRepositoryData(repositoryStateId, foundRootBlobs); + Map rootBlobs = blobContainer().listBlobs(); + final RepositoryData repositoryData = safeRepositoryData(repositoryStateId, rootBlobs); final Map foundIndices = blobStore().blobContainer(indicesPath()).children(); - final Set survivingIndexIds = getSurvivingIndexIds(repositoryData, foundIndices.keySet()); - final Set survivingRootBlobNames = getSurvivingRootBlobNames(repositoryData, foundRootBlobs.keySet()); - if (survivingIndexIds.containsAll(foundIndices.keySet()) && survivingRootBlobNames.containsAll(foundRootBlobs.keySet())) { + final Set survivingIndexIds = + repositoryData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); + final List staleRootBlobs = staleRootBlobs(repositoryData, rootBlobs.keySet()); + if (survivingIndexIds.equals(foundIndices.keySet()) && staleRootBlobs.isEmpty()) { // Nothing to clean up we return listener.onResponse(new RepositoryCleanupResult(DeleteResult.ZERO)); } else { // write new index-N blob to ensure concurrent operations will fail writeIndexGen(repositoryData, repositoryStateId, writeShardGens, - ActionListener.wrap(v -> cleanupStaleBlobs(foundIndices, survivingIndexIds, foundRootBlobs, survivingRootBlobNames, - ActionListener.map(listener, RepositoryCleanupResult::new)), listener::onFailure)); - + ActionListener.wrap(v -> cleanupStaleBlobs(foundIndices, rootBlobs, repositoryData, + ActionListener.map(listener, RepositoryCleanupResult::new)), listener::onFailure)); } } catch (Exception e) { listener.onFailure(e); } } - Set getSurvivingIndexIds(RepositoryData repositoryData, Set foundIndexIds) { - Set survivingIndexIds = repositoryData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); - return foundIndexIds.stream().filter(survivingIndexIds::contains).collect(Collectors.toSet()); - } - - Set getSurvivingRootBlobNames(RepositoryData repositoryData, Set foundRootBlobNames) { - Set survivingSnapshotIds = repositoryData.getSnapshotIds().stream().map(SnapshotId::getUUID).collect(Collectors.toSet()); - return foundRootBlobNames.stream().filter( - blob -> { - if (FsBlobContainer.isTempBlobName(blob)) { - return false; - } else if (blob.endsWith(".dat")) { - final String foundUUID; - if (blob.startsWith(SNAPSHOT_PREFIX)) { - foundUUID = blob.substring(SNAPSHOT_PREFIX.length(), blob.length() - ".dat".length()); - assert snapshotFormat.blobName(foundUUID).equals(blob); - } else if (blob.startsWith(METADATA_PREFIX)) { - foundUUID = blob.substring(METADATA_PREFIX.length(), blob.length() - ".dat".length()); - assert globalMetaDataFormat.blobName(foundUUID).equals(blob); - } else { - return true; - } - return survivingSnapshotIds.contains(foundUUID); - } else if (blob.startsWith(INDEX_FILE_PREFIX)) { - // TODO: Include the current generation here once we remove keeping index-(N-1) around from #writeIndexGen - return repositoryData.getGenId() <= Long.parseLong(blob.substring(INDEX_FILE_PREFIX.length())); + // Finds all blobs directly under the repository root path that are not referenced by the current RepositoryData + private List staleRootBlobs(RepositoryData repositoryData, Set rootBlobNames) { + final Set allSnapshotIds = + repositoryData.getSnapshotIds().stream().map(SnapshotId::getUUID).collect(Collectors.toSet()); + return rootBlobNames.stream().filter( + blob -> { + if (FsBlobContainer.isTempBlobName(blob)) { + return true; + } + if (blob.endsWith(".dat")) { + final String foundUUID; + if (blob.startsWith(SNAPSHOT_PREFIX)) { + foundUUID = blob.substring(SNAPSHOT_PREFIX.length(), blob.length() - ".dat".length()); + assert snapshotFormat.blobName(foundUUID).equals(blob); + } else if (blob.startsWith(METADATA_PREFIX)) { + foundUUID = blob.substring(METADATA_PREFIX.length(), blob.length() - ".dat".length()); + assert globalMetaDataFormat.blobName(foundUUID).equals(blob); } else { - return true; + return false; } + return allSnapshotIds.contains(foundUUID) == false; + } else if (blob.startsWith(INDEX_FILE_PREFIX)) { + // TODO: Include the current generation here once we remove keeping index-(N-1) around from #writeIndexGen + return repositoryData.getGenId() > Long.parseLong(blob.substring(INDEX_FILE_PREFIX.length())); } - ).collect(Collectors.toSet()); + return false; + } + ).collect(Collectors.toList()); } - private List cleanupStaleRootFiles(Map foundRootBlobs, Set survivingRootBlobNames) { - List blobsToDelete = new ArrayList<>(foundRootBlobs.keySet()); - blobsToDelete.removeAll(survivingRootBlobNames); + private List cleanupStaleRootFiles(List blobsToDelete) { if (blobsToDelete.isEmpty()) { return blobsToDelete; } From 17453457db1e02e72f24f537e4e0f123717f2214 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 16 Feb 2020 14:50:44 +0200 Subject: [PATCH 111/142] BlobStoreRepository reshuffle stale root blobs and indices --- .../blobstore/BlobStoreRepository.java | 91 ++++++++++--------- 1 file changed, 48 insertions(+), 43 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 6443781cc5efc..0892ab5a526f4 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -595,9 +595,11 @@ private void doDeleteShardSnapshots(SnapshotId snapshotId, long repositoryStateI private void asyncCleanupUnlinkedRootAndIndicesBlobs(Map foundIndices, Map rootBlobs, RepositoryData updatedRepoData, ActionListener listener) { + final Map staleIndices = staleIndices(updatedRepoData, foundIndices); + final Map staleRootBlobs = staleRootBlobs(updatedRepoData, rootBlobs); threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(ActionRunnable.wrap( listener, - l -> cleanupStaleBlobs(foundIndices, rootBlobs, updatedRepoData, ActionListener.map(l, ignored -> null)))); + l -> cleanupStaleBlobs(staleIndices, staleRootBlobs, ActionListener.map(l, ignored -> null)))); } private void asyncCleanupUnlinkedShardLevelBlobs(SnapshotId snapshotId, Collection deleteResults, @@ -714,16 +716,14 @@ private List resolveFilesToDelete(SnapshotId snapshotId, Collection foundIndices, Map rootBlobs, - RepositoryData newRepoData, ActionListener listener) { + protected void cleanupStaleBlobs(Map staleIndices, Map staleRootBlobs, + ActionListener listener) { final GroupedActionListener groupedListener = new GroupedActionListener<>(ActionListener.wrap(deleteResults -> { DeleteResult deleteResult = DeleteResult.ZERO; for (DeleteResult result : deleteResults) { @@ -734,12 +734,11 @@ private void cleanupStaleBlobs(Map foundIndices, Map { - List deletedBlobs = cleanupStaleRootFiles(staleRootBlobs(newRepoData, rootBlobs.keySet())); - return new DeleteResult(deletedBlobs.size(), deletedBlobs.stream().mapToLong(name -> rootBlobs.get(name).length()).sum()); + Map deletedBlobs = cleanupStaleRootFiles(staleRootBlobs); + return new DeleteResult(deletedBlobs.size(), deletedBlobs.values().stream().mapToLong(BlobMetaData::length).sum()); })); - final Set survivingIndexIds = newRepoData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); - executor.execute(ActionRunnable.supply(groupedListener, () -> cleanupStaleIndices(foundIndices, survivingIndexIds))); + executor.execute(ActionRunnable.supply(groupedListener, () -> cleanupStaleIndices(staleIndices))); } /** @@ -762,65 +761,73 @@ public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListen Map rootBlobs = blobContainer().listBlobs(); final RepositoryData repositoryData = safeRepositoryData(repositoryStateId, rootBlobs); final Map foundIndices = blobStore().blobContainer(indicesPath()).children(); - final Set survivingIndexIds = - repositoryData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); - final List staleRootBlobs = staleRootBlobs(repositoryData, rootBlobs.keySet()); - if (survivingIndexIds.equals(foundIndices.keySet()) && staleRootBlobs.isEmpty()) { + final Map staleIndices = staleIndices(repositoryData, foundIndices); + final Map staleRootBlobs = staleRootBlobs(repositoryData, rootBlobs); + if (staleIndices.isEmpty() && staleRootBlobs.isEmpty()) { // Nothing to clean up we return listener.onResponse(new RepositoryCleanupResult(DeleteResult.ZERO)); } else { // write new index-N blob to ensure concurrent operations will fail writeIndexGen(repositoryData, repositoryStateId, writeShardGens, - ActionListener.wrap(v -> cleanupStaleBlobs(foundIndices, rootBlobs, repositoryData, - ActionListener.map(listener, RepositoryCleanupResult::new)), listener::onFailure)); + ActionListener.wrap(v -> cleanupStaleBlobs(staleIndices, staleRootBlobs, ActionListener.map(listener, + RepositoryCleanupResult::new)), listener::onFailure)); } } catch (Exception e) { listener.onFailure(e); } } + protected Map staleIndices(RepositoryData repositoryData, Map foundIndices) { + final Set survivingIndexIds = + repositoryData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); + return foundIndices.entrySet().stream() + .filter(index -> false == survivingIndexIds.contains(index.getKey())) + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + } + // Finds all blobs directly under the repository root path that are not referenced by the current RepositoryData - private List staleRootBlobs(RepositoryData repositoryData, Set rootBlobNames) { + protected Map staleRootBlobs(RepositoryData repositoryData, Map rootBlobs) { final Set allSnapshotIds = repositoryData.getSnapshotIds().stream().map(SnapshotId::getUUID).collect(Collectors.toSet()); - return rootBlobNames.stream().filter( + return rootBlobs.entrySet().stream().filter( blob -> { - if (FsBlobContainer.isTempBlobName(blob)) { + final String blobName = blob.getKey(); + if (FsBlobContainer.isTempBlobName(blobName)) { return true; } - if (blob.endsWith(".dat")) { + if (blobName.endsWith(".dat")) { final String foundUUID; - if (blob.startsWith(SNAPSHOT_PREFIX)) { - foundUUID = blob.substring(SNAPSHOT_PREFIX.length(), blob.length() - ".dat".length()); + if (blobName.startsWith(SNAPSHOT_PREFIX)) { + foundUUID = blobName.substring(SNAPSHOT_PREFIX.length(), blobName.length() - ".dat".length()); assert snapshotFormat.blobName(foundUUID).equals(blob); - } else if (blob.startsWith(METADATA_PREFIX)) { - foundUUID = blob.substring(METADATA_PREFIX.length(), blob.length() - ".dat".length()); + } else if (blobName.startsWith(METADATA_PREFIX)) { + foundUUID = blobName.substring(METADATA_PREFIX.length(), blobName.length() - ".dat".length()); assert globalMetaDataFormat.blobName(foundUUID).equals(blob); } else { return false; } return allSnapshotIds.contains(foundUUID) == false; - } else if (blob.startsWith(INDEX_FILE_PREFIX)) { + } else if (blobName.startsWith(INDEX_FILE_PREFIX)) { // TODO: Include the current generation here once we remove keeping index-(N-1) around from #writeIndexGen - return repositoryData.getGenId() > Long.parseLong(blob.substring(INDEX_FILE_PREFIX.length())); + return repositoryData.getGenId() > Long.parseLong(blobName.substring(INDEX_FILE_PREFIX.length())); } return false; } - ).collect(Collectors.toList()); + ).collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); } - private List cleanupStaleRootFiles(List blobsToDelete) { + private Map cleanupStaleRootFiles(Map blobsToDelete) { if (blobsToDelete.isEmpty()) { return blobsToDelete; } try { - logger.info("[{}] Found stale root level blobs {}. Cleaning them up", metadata.name(), blobsToDelete); - blobContainer().deleteBlobsIgnoringIfNotExists(blobsToDelete); + logger.info("[{}] Found stale root level blobs {}. Cleaning them up", metadata.name(), blobsToDelete.keySet()); + blobContainer().deleteBlobsIgnoringIfNotExists(new ArrayList<>(blobsToDelete.keySet())); return blobsToDelete; } catch (IOException e) { logger.warn(() -> new ParameterizedMessage( "[{}] The following blobs are no longer part of any snapshot [{}] but failed to remove them", - metadata.name(), blobsToDelete), e); + metadata.name(), blobsToDelete.keySet()), e); } catch (Exception e) { // TODO: We shouldn't be blanket catching and suppressing all exceptions here and instead handle them safely upstream. // Currently this catch exists as a stop gap solution to tackle unexpected runtime exceptions from implementations @@ -828,24 +835,22 @@ private List cleanupStaleRootFiles(List blobsToDelete) { assert false : e; logger.warn(new ParameterizedMessage("[{}] Exception during cleanup of root level blobs", metadata.name()), e); } - return Collections.emptyList(); + return Collections.emptyMap(); } - private DeleteResult cleanupStaleIndices(Map foundIndices, Set survivingIndexIds) { + protected DeleteResult cleanupStaleIndices(Map staleIndices) { DeleteResult deleteResult = DeleteResult.ZERO; try { - for (Map.Entry indexEntry : foundIndices.entrySet()) { + for (Map.Entry indexEntry : staleIndices.entrySet()) { final String indexSnId = indexEntry.getKey(); try { - if (survivingIndexIds.contains(indexSnId) == false) { - logger.debug("[{}] Found stale index [{}]. Cleaning it up", metadata.name(), indexSnId); - deleteResult = deleteResult.add(indexEntry.getValue().delete()); - logger.debug("[{}] Cleaned up stale index [{}]", metadata.name(), indexSnId); - } + logger.debug("[{}] Found stale index [{}]. Cleaning it up", metadata.name(), indexSnId); + deleteResult = deleteResult.add(indexEntry.getValue().delete()); + logger.debug("[{}] Cleaned up stale index [{}]", metadata.name(), indexSnId); } catch (IOException e) { logger.warn(() -> new ParameterizedMessage( - "[{}] index {} is no longer part of any snapshots in the repository, " + - "but failed to clean up their index folders", metadata.name(), indexSnId), e); + "[{}] index {} is no longer part of any snapshots in the repository, " + + "but failed to clean up their index folders", metadata.name(), indexSnId), e); } } } catch (Exception e) { From 68b923502f41d64c764b4af4b6a0e4c8a28fc174 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 17 Feb 2020 13:01:23 +0200 Subject: [PATCH 112/142] Revert "BlobStoreRepository reshuffle stale root blobs and indices" This reverts commit 17453457db1e02e72f24f537e4e0f123717f2214. --- .../blobstore/BlobStoreRepository.java | 91 +++++++++---------- 1 file changed, 43 insertions(+), 48 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 0892ab5a526f4..6443781cc5efc 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -595,11 +595,9 @@ private void doDeleteShardSnapshots(SnapshotId snapshotId, long repositoryStateI private void asyncCleanupUnlinkedRootAndIndicesBlobs(Map foundIndices, Map rootBlobs, RepositoryData updatedRepoData, ActionListener listener) { - final Map staleIndices = staleIndices(updatedRepoData, foundIndices); - final Map staleRootBlobs = staleRootBlobs(updatedRepoData, rootBlobs); threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(ActionRunnable.wrap( listener, - l -> cleanupStaleBlobs(staleIndices, staleRootBlobs, ActionListener.map(l, ignored -> null)))); + l -> cleanupStaleBlobs(foundIndices, rootBlobs, updatedRepoData, ActionListener.map(l, ignored -> null)))); } private void asyncCleanupUnlinkedShardLevelBlobs(SnapshotId snapshotId, Collection deleteResults, @@ -716,14 +714,16 @@ private List resolveFilesToDelete(SnapshotId snapshotId, Collection staleIndices, Map staleRootBlobs, - ActionListener listener) { + private void cleanupStaleBlobs(Map foundIndices, Map rootBlobs, + RepositoryData newRepoData, ActionListener listener) { final GroupedActionListener groupedListener = new GroupedActionListener<>(ActionListener.wrap(deleteResults -> { DeleteResult deleteResult = DeleteResult.ZERO; for (DeleteResult result : deleteResults) { @@ -734,11 +734,12 @@ protected void cleanupStaleBlobs(Map staleIndices, Map { - Map deletedBlobs = cleanupStaleRootFiles(staleRootBlobs); - return new DeleteResult(deletedBlobs.size(), deletedBlobs.values().stream().mapToLong(BlobMetaData::length).sum()); + List deletedBlobs = cleanupStaleRootFiles(staleRootBlobs(newRepoData, rootBlobs.keySet())); + return new DeleteResult(deletedBlobs.size(), deletedBlobs.stream().mapToLong(name -> rootBlobs.get(name).length()).sum()); })); - executor.execute(ActionRunnable.supply(groupedListener, () -> cleanupStaleIndices(staleIndices))); + final Set survivingIndexIds = newRepoData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); + executor.execute(ActionRunnable.supply(groupedListener, () -> cleanupStaleIndices(foundIndices, survivingIndexIds))); } /** @@ -761,73 +762,65 @@ public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListen Map rootBlobs = blobContainer().listBlobs(); final RepositoryData repositoryData = safeRepositoryData(repositoryStateId, rootBlobs); final Map foundIndices = blobStore().blobContainer(indicesPath()).children(); - final Map staleIndices = staleIndices(repositoryData, foundIndices); - final Map staleRootBlobs = staleRootBlobs(repositoryData, rootBlobs); - if (staleIndices.isEmpty() && staleRootBlobs.isEmpty()) { + final Set survivingIndexIds = + repositoryData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); + final List staleRootBlobs = staleRootBlobs(repositoryData, rootBlobs.keySet()); + if (survivingIndexIds.equals(foundIndices.keySet()) && staleRootBlobs.isEmpty()) { // Nothing to clean up we return listener.onResponse(new RepositoryCleanupResult(DeleteResult.ZERO)); } else { // write new index-N blob to ensure concurrent operations will fail writeIndexGen(repositoryData, repositoryStateId, writeShardGens, - ActionListener.wrap(v -> cleanupStaleBlobs(staleIndices, staleRootBlobs, ActionListener.map(listener, - RepositoryCleanupResult::new)), listener::onFailure)); + ActionListener.wrap(v -> cleanupStaleBlobs(foundIndices, rootBlobs, repositoryData, + ActionListener.map(listener, RepositoryCleanupResult::new)), listener::onFailure)); } } catch (Exception e) { listener.onFailure(e); } } - protected Map staleIndices(RepositoryData repositoryData, Map foundIndices) { - final Set survivingIndexIds = - repositoryData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); - return foundIndices.entrySet().stream() - .filter(index -> false == survivingIndexIds.contains(index.getKey())) - .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); - } - // Finds all blobs directly under the repository root path that are not referenced by the current RepositoryData - protected Map staleRootBlobs(RepositoryData repositoryData, Map rootBlobs) { + private List staleRootBlobs(RepositoryData repositoryData, Set rootBlobNames) { final Set allSnapshotIds = repositoryData.getSnapshotIds().stream().map(SnapshotId::getUUID).collect(Collectors.toSet()); - return rootBlobs.entrySet().stream().filter( + return rootBlobNames.stream().filter( blob -> { - final String blobName = blob.getKey(); - if (FsBlobContainer.isTempBlobName(blobName)) { + if (FsBlobContainer.isTempBlobName(blob)) { return true; } - if (blobName.endsWith(".dat")) { + if (blob.endsWith(".dat")) { final String foundUUID; - if (blobName.startsWith(SNAPSHOT_PREFIX)) { - foundUUID = blobName.substring(SNAPSHOT_PREFIX.length(), blobName.length() - ".dat".length()); + if (blob.startsWith(SNAPSHOT_PREFIX)) { + foundUUID = blob.substring(SNAPSHOT_PREFIX.length(), blob.length() - ".dat".length()); assert snapshotFormat.blobName(foundUUID).equals(blob); - } else if (blobName.startsWith(METADATA_PREFIX)) { - foundUUID = blobName.substring(METADATA_PREFIX.length(), blobName.length() - ".dat".length()); + } else if (blob.startsWith(METADATA_PREFIX)) { + foundUUID = blob.substring(METADATA_PREFIX.length(), blob.length() - ".dat".length()); assert globalMetaDataFormat.blobName(foundUUID).equals(blob); } else { return false; } return allSnapshotIds.contains(foundUUID) == false; - } else if (blobName.startsWith(INDEX_FILE_PREFIX)) { + } else if (blob.startsWith(INDEX_FILE_PREFIX)) { // TODO: Include the current generation here once we remove keeping index-(N-1) around from #writeIndexGen - return repositoryData.getGenId() > Long.parseLong(blobName.substring(INDEX_FILE_PREFIX.length())); + return repositoryData.getGenId() > Long.parseLong(blob.substring(INDEX_FILE_PREFIX.length())); } return false; } - ).collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + ).collect(Collectors.toList()); } - private Map cleanupStaleRootFiles(Map blobsToDelete) { + private List cleanupStaleRootFiles(List blobsToDelete) { if (blobsToDelete.isEmpty()) { return blobsToDelete; } try { - logger.info("[{}] Found stale root level blobs {}. Cleaning them up", metadata.name(), blobsToDelete.keySet()); - blobContainer().deleteBlobsIgnoringIfNotExists(new ArrayList<>(blobsToDelete.keySet())); + logger.info("[{}] Found stale root level blobs {}. Cleaning them up", metadata.name(), blobsToDelete); + blobContainer().deleteBlobsIgnoringIfNotExists(blobsToDelete); return blobsToDelete; } catch (IOException e) { logger.warn(() -> new ParameterizedMessage( "[{}] The following blobs are no longer part of any snapshot [{}] but failed to remove them", - metadata.name(), blobsToDelete.keySet()), e); + metadata.name(), blobsToDelete), e); } catch (Exception e) { // TODO: We shouldn't be blanket catching and suppressing all exceptions here and instead handle them safely upstream. // Currently this catch exists as a stop gap solution to tackle unexpected runtime exceptions from implementations @@ -835,22 +828,24 @@ private Map cleanupStaleRootFiles(Map staleIndices) { + private DeleteResult cleanupStaleIndices(Map foundIndices, Set survivingIndexIds) { DeleteResult deleteResult = DeleteResult.ZERO; try { - for (Map.Entry indexEntry : staleIndices.entrySet()) { + for (Map.Entry indexEntry : foundIndices.entrySet()) { final String indexSnId = indexEntry.getKey(); try { - logger.debug("[{}] Found stale index [{}]. Cleaning it up", metadata.name(), indexSnId); - deleteResult = deleteResult.add(indexEntry.getValue().delete()); - logger.debug("[{}] Cleaned up stale index [{}]", metadata.name(), indexSnId); + if (survivingIndexIds.contains(indexSnId) == false) { + logger.debug("[{}] Found stale index [{}]. Cleaning it up", metadata.name(), indexSnId); + deleteResult = deleteResult.add(indexEntry.getValue().delete()); + logger.debug("[{}] Cleaned up stale index [{}]", metadata.name(), indexSnId); + } } catch (IOException e) { logger.warn(() -> new ParameterizedMessage( - "[{}] index {} is no longer part of any snapshots in the repository, " + - "but failed to clean up their index folders", metadata.name(), indexSnId), e); + "[{}] index {} is no longer part of any snapshots in the repository, " + + "but failed to clean up their index folders", metadata.name(), indexSnId), e); } } } catch (Exception e) { From 74c3d58a8ef59a62bfa90f1d299619708820ef8c Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 17 Feb 2020 13:02:46 +0200 Subject: [PATCH 113/142] Revert "Cleanup WIP" This reverts commit b593d66d67095fd072c4bea23350f47ceeb00bc5. --- .../blobstore/BlobStoreRepository.java | 6 +- .../encrypted/EncryptedRepository.java | 104 +----------------- 2 files changed, 9 insertions(+), 101 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 6443781cc5efc..14d48b14dac4f 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -215,7 +215,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp private final CounterMetric restoreRateLimitingTimeInNanos = new CounterMetric(); - protected final ChecksumBlobStoreFormat globalMetaDataFormat; + private final ChecksumBlobStoreFormat globalMetaDataFormat; private final ChecksumBlobStoreFormat indexMetaDataFormat; @@ -508,7 +508,7 @@ public void deleteSnapshot(SnapshotId snapshotId, long repositoryStateId, boolea * @param rootBlobs Blobs at the repository root * @return RepositoryData */ - protected RepositoryData safeRepositoryData(long repositoryStateId, Map rootBlobs) { + private RepositoryData safeRepositoryData(long repositoryStateId, Map rootBlobs) { final long generation = latestGeneration(rootBlobs.keySet()); final long genToLoad; if (bestEffortConsistency) { @@ -967,7 +967,7 @@ public IndexMetaData getSnapshotIndexMetaData(final SnapshotId snapshotId, final } } - protected BlobPath indicesPath() { + private BlobPath indicesPath() { return basePath().add("indices"); } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index d88048b1b2e71..73b16f61a2f1c 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexCommit; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.StepListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.service.ClusterService; @@ -20,7 +19,6 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; -import org.elasticsearch.common.blobstore.fs.FsBlobContainer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -32,7 +30,6 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoryCleanupResult; -import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.ShardGenerations; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; @@ -56,7 +53,6 @@ import java.util.Base64; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -64,7 +60,6 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Supplier; -import java.util.stream.Collectors; public final class EncryptedRepository extends BlobStoreRepository { static final Logger logger = LogManager.getLogger(EncryptedRepository.class); @@ -212,98 +207,11 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s @Override public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListener listener) { - if (isReadOnly()) { - listener.onFailure(new RepositoryException(metadata.name(), "cannot run cleanup on readonly repository")); - return; - } - StepListener cleanupDelegatedRepositoryStep = new StepListener<>(); - super.cleanup(repositoryStateId, writeShardGens, cleanupDelegatedRepositoryStep); - cleanupDelegatedRepositoryStep.whenComplete(delegatedRepositoryCleanupResult -> { - RepositoryData repositoryData = safeRepositoryData(repositoryStateId, blobContainer().listBlobs()); - // list all encryption metadata indices blob container - Map staleMetadataIndices = - ((EncryptedBlobContainer) blobStore().blobContainer(indicesPath())).encryptionMetadataBlobContainer.children(); - repositoryData.getIndices().values().stream().map(IndexId::getId).forEach(survivingIndexId -> { - // stale blob containers are those which are not surviving - staleMetadataIndices.remove(survivingIndexId); - }); - // list all encryption metadata root blobs - Map staleRootMetadataBlobs = - ((EncryptedBlobContainer) blobContainer()).encryptionMetadataBlobContainer.listBlobs(); - Iterator> it = staleRootMetadataBlobs.entrySet().iterator(); -// staleRootMetadataBlobs.entrySet().removeIf(rootMetadata -> { -// String metadataBlobName = rootMetadata.getKey(); -// // unrecognized metadata blob, do NOT remove -// if (metadataBlobName.length() <= METADATA_UID_LENGTH_IN_CHARS) { -// return false; -// } -// String blobName = metadataBlobName.substring(0, metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS); -// }); -// while (it.hasNext()) { -// String metadataBlobName = it.next().getKey(); -// // unrecognized metadata blob, do NOT remove -// if (metadataBlobName.length() < METADATA_UID_LENGTH_IN_CHARS) { -// it.remove(); -// } else { -// String blobName = metadataBlobName.substring(0, metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS); -// if (FsBlobContainer.isTempBlobName(blobName)) { -// // this must be removed -// } else if (blobName.endsWith(".dat")) { -// final String foundUUID; -// if (blobName.startsWith(SNAPSHOT_PREFIX)) { -// foundUUID = blobName.substring(SNAPSHOT_PREFIX.length(), blobName.length() - ".dat".length()); -// assert snapshotFormat.blobName(foundUUID).equals(blobName); -// } else if (blobName.startsWith(METADATA_PREFIX)) { -// foundUUID = blobName.substring(METADATA_PREFIX.length(), blobName.length() - ".dat".length()); -// assert globalMetaDataFormat.blobName(foundUUID).equals(blobName); -// } else { -// -// return true; -// } -// return survivingSnapshotIds.contains(foundUUID); -// -// } -// } -// } -// Set repositoryData.getSnapshotIds().stream().map(SnapshotId::getUUID).collect(Collectors.toSet()); -// return foundRootBlobNames.stream().filter( -// blob -> { -// if (FsBlobContainer.isTempBlobName(blob)) { -// return false; -// } else if (blob.endsWith(".dat")) { -// final String foundUUID; -// if (blob.startsWith(SNAPSHOT_PREFIX)) { -// foundUUID = blob.substring(SNAPSHOT_PREFIX.length(), blob.length() - ".dat".length()); -// assert snapshotFormat.blobName(foundUUID).equals(blob); -// } else if (blob.startsWith(METADATA_PREFIX)) { -// foundUUID = blob.substring(METADATA_PREFIX.length(), blob.length() - ".dat".length()); -// assert globalMetaDataFormat.blobName(foundUUID).equals(blob); -// } else { -// return true; -// } -// return survivingSnapshotIds.contains(foundUUID); -// } else if (blob.startsWith(INDEX_FILE_PREFIX)) { -// // TODO: Include the current generation here once we remove keeping index-(N-1) around from #writeIndexGen -// return repositoryData.getGenId() <= Long.parseLong(blob.substring(INDEX_FILE_PREFIX.length())); -// } else { -// return true; -// } -// } -// ).collect(Collectors.toSet()); -// final Set survivingRootBlobNames = getSurvivingRootBlobNames(repositoryData, foundRootBlobs.keySet()); -// if (survivingIndexIds.containsAll(foundIndices.keySet()) && survivingRootBlobNames.containsAll(foundRootBlobs.keySet())) { -// // Nothing to clean up we return -// listener.onResponse(new RepositoryCleanupResult(DeleteResult.ZERO)); -// } else { -// // write new index-N blob to ensure concurrent operations will fail -// writeIndexGen(repositoryData, repositoryStateId, writeShardGens, -// ActionListener.wrap(v -> cleanupStaleBlobs(foundIndices, survivingIndexIds, foundRootBlobs, survivingRootBlobNames, -// ActionListener.map(listener, RepositoryCleanupResult::new)), listener::onFailure)); -// -// } - - }, listener::onFailure); - + super.cleanup(repositoryStateId, writeShardGens, ActionListener.wrap(repositoryCleanupResult -> { + EncryptedBlobContainer encryptedBlobContainer = (EncryptedBlobContainer) blobContainer(); + cleanUpOrphanedMetadataRecursively(encryptedBlobContainer); + listener.onResponse(repositoryCleanupResult); + }, listener::onFailure)); } private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainer encryptedBlobContainer) throws IOException{ @@ -390,8 +298,8 @@ public BlobContainer blobContainer(BlobPath path) { return new EncryptedBlobContainer(delegatedBlobStore, delegatedBasePath, path, dataEncryptionKeySupplier, metadataEncryption, encryptionNonceSupplier, metadataIdentifierSupplier); } - } + } private static class EncryptedBlobContainer implements BlobContainer { private final BlobStore delegatedBlobStore; From 1889abae91a34e21ce67473b5f5c62bbb024e5cd Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 17 Feb 2020 20:09:07 +0200 Subject: [PATCH 114/142] Append repository generation to metadata blob name --- .../blobstore/BlobStoreRepository.java | 2 +- .../encrypted/EncryptedRepository.java | 109 ++++++++++++++---- ...ryptedFSBlobStoreRepositoryIntegTests.java | 2 +- 3 files changed, 90 insertions(+), 23 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 14d48b14dac4f..019c819117372 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -1052,7 +1052,7 @@ public void endVerification(String seed) { // Tracks the latest known repository generation in a best-effort way to detect inconsistent listing of root level index-N blobs // and concurrent modifications. - private final AtomicLong latestKnownRepoGen = new AtomicLong(RepositoryData.UNKNOWN_REPO_GEN); + protected final AtomicLong latestKnownRepoGen = new AtomicLong(RepositoryData.UNKNOWN_REPO_GEN); @Override public void getRepositoryData(ActionListener listener) { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 73b16f61a2f1c..585ea621fea9a 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -45,6 +46,8 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.nio.charset.StandardCharsets; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; @@ -73,7 +76,7 @@ public final class EncryptedRepository extends BlobStoreRepository { static final int DATA_KEY_LENGTH_IN_BITS = 256; static final long PACKET_START_COUNTER = Long.MIN_VALUE; static final int MAX_PACKET_LENGTH_IN_BYTES = 8 << 20; // 8MB - static final int METADATA_UID_LENGTH_IN_BYTES = 9; // the length of unique tag appended to the metadata blob name to render it unique + static final int METADATA_UID_LENGTH_IN_BYTES = 9; // the length of random tag part of the metadata blob name static final int METADATA_UID_LENGTH_IN_CHARS = 12; // base64 encoding with no padding // this can be changed freely (can be made a repository parameter) without adjusting // the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER}, as long as it stays under the value @@ -115,7 +118,7 @@ public final class EncryptedRepository extends BlobStoreRepository { // password is changed) it does not incur updates to the encrypted blob, but only recreating a new metadata blob. // However, the encrypted blob's contents is prepended a fixed length identifier which is used to locate the corresponding metadata. // This identifier is static for a given encrypted blob, i.e. it will not change when the metadata is recreated. - private final Supplier metadataIdentifierSupplier; + private final Supplier metadataIdentifierSupplier; private final Supplier licenseStateSupplier; // the salted hash of this repository's password on the local node. The password is fixed for the lifetime of the repository. private final String repositoryPasswordSaltedHash; @@ -137,12 +140,12 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry // don't use a {@code SecureRandom} though, it would be an unnecessary entropy drain this.encryptionNonceSupplier = () -> Randomness.get().nextInt(); // the metadata used to decrypt the encrypted blob resides in a different blob, one for each encrypted data blob - // the metadata blob name is formed from the encrypted data blob name by appending a random tag, enough to make it unique - // in the unusual case of data blob overwrite (encrypted data overwrite does not imply metadata overwrite) + // the metadata blob name is formed from the encrypted data blob name by appending a random tag and the repository generation, + // so as to the metadata blob name unique in the unusual cases of data blob overwrite this.metadataIdentifierSupplier = () -> { byte[] randomMetadataNameTag = new byte[METADATA_UID_LENGTH_IN_BYTES]; Randomness.get().nextBytes(randomMetadataNameTag); - return randomMetadataNameTag; + return new MetadataIdentifier(randomMetadataNameTag, latestKnownRepoGen.get()); }; this.licenseStateSupplier = licenseStateSupplier; // the salted password hash for this encrypted repository password, on the local node (this is constant) @@ -152,6 +155,70 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry this.passwordHashVerifier = new HashVerifier(password); } + static class MetadataIdentifier { + + final byte[] id; + final long repositoryGeneration; + + MetadataIdentifier(byte[] id, long repositoryGeneration) { + if (Objects.requireNonNull(id).length != METADATA_UID_LENGTH_IN_BYTES) { + throw new IllegalStateException("invalid metadata id"); + } + this.id = id; + this.repositoryGeneration = repositoryGeneration; + } + + byte[] asByteArray() { + ByteBuffer byteBuffer = ByteBuffer.allocate(byteLength()).order(ByteOrder.LITTLE_ENDIAN); + byteBuffer.put(id); + byteBuffer.putLong(METADATA_UID_LENGTH_IN_BYTES, repositoryGeneration); + return byteBuffer.array(); + } + + String asString() { + StringBuilder sb = new StringBuilder(); + sb.append('.'); + sb.append(new String(Base64.getUrlEncoder().withoutPadding().encode(id), StandardCharsets.UTF_8)); + sb.append('.'); + sb.append(repositoryGeneration); + return sb.toString(); + } + + static int byteLength() { + return METADATA_UID_LENGTH_IN_BYTES + Long.BYTES; + } + + static String formMetadataBlobName(String blobName, MetadataIdentifier metaId) { + return blobName + metaId.asString(); + } + + static Tuple parseFromMetadataBlobName(String metadataBlobName) { + int generationPos = metadataBlobName.lastIndexOf('.'); + if (generationPos <= 0 || generationPos == metadataBlobName.length() - 1) { + throw new IllegalArgumentException("Unrecognized metadata blob name"); + } + long generation = Long.parseLong(metadataBlobName.substring(generationPos + 1)); + int idPos = metadataBlobName.lastIndexOf('.', generationPos - 1); + if (idPos <= 0 || generationPos - idPos != METADATA_UID_LENGTH_IN_CHARS) { + throw new IllegalArgumentException("Unrecognized metadata blob name"); + } + byte[] id = Base64.getUrlDecoder().decode(metadataBlobName.substring(idPos + 1, generationPos)); + MetadataIdentifier metaId = new MetadataIdentifier(id, generation); + return new Tuple<>(metadataBlobName.substring(0, idPos), metaId); + } + + static MetadataIdentifier fromByteArray(byte[] idAsByteArray) { + if (Objects.requireNonNull(idAsByteArray).length != byteLength()) { + throw new IllegalArgumentException("Unrecognized metadata blob name"); + } + ByteBuffer byteBuffer = ByteBuffer.wrap(idAsByteArray).order(ByteOrder.LITTLE_ENDIAN); + byte[] id = new byte[METADATA_UID_LENGTH_IN_BYTES]; + byteBuffer.get(id); + long generation = byteBuffer.getLong(METADATA_UID_LENGTH_IN_BYTES); + return new MetadataIdentifier(id, generation); + } + } + /** * The repository hook method which populates the snapshot metadata with the salted password hash of the repository on the (master) * node that starts of the snapshot operation. All the other actions associated with the same snapshot operation will first verify @@ -276,10 +343,10 @@ private static class EncryptedBlobStore implements BlobStore { private final Supplier dataEncryptionKeySupplier; private final PasswordBasedEncryption metadataEncryption; private final Supplier encryptionNonceSupplier; - private final Supplier metadataIdentifierSupplier; + private final Supplier metadataIdentifierSupplier; EncryptedBlobStore(BlobStoreRepository delegatedBlobStoreRepository, Supplier dataEncryptionKeySupplier, PasswordBasedEncryption metadataEncryption, Supplier encryptionNonceSupplier, - Supplier metadataIdentifierSupplier) { + Supplier metadataIdentifierSupplier) { this.delegatedBlobStore = delegatedBlobStoreRepository.blobStore(); this.delegatedBasePath = delegatedBlobStoreRepository.basePath(); this.dataEncryptionKeySupplier = dataEncryptionKeySupplier; @@ -308,12 +375,13 @@ private static class EncryptedBlobContainer implements BlobContainer { private final Supplier dataEncryptionKeySupplier; private final PasswordBasedEncryption metadataEncryption; private final Supplier encryptionNonceSupplier; - private final Supplier metadataIdentifierSupplier; + private final Supplier metadataIdentifierSupplier; private final BlobContainer delegatedBlobContainer; private final BlobContainer encryptionMetadataBlobContainer; + EncryptedBlobContainer(BlobStore delegatedBlobStore, BlobPath delegatedBasePath, BlobPath path, Supplier dataEncryptionKeySupplier, PasswordBasedEncryption metadataEncryption, - Supplier encryptionNonceSupplier, Supplier metadataIdentifierSupplier) { + Supplier encryptionNonceSupplier, Supplier metadataIdentifierSupplier) { this.delegatedBlobStore = delegatedBlobStore; this.delegatedBasePath = delegatedBasePath; this.path = path; @@ -354,15 +422,15 @@ public BlobPath path() { public InputStream readBlob(String blobName) throws IOException { final InputStream encryptedDataInputStream = delegatedBlobContainer.readBlob(blobName); // read the metadata identifier (fixed length) which is prepended to the encrypted blob - final byte[] metadataIdentifier = encryptedDataInputStream.readNBytes(METADATA_UID_LENGTH_IN_BYTES); - if (metadataIdentifier.length != METADATA_UID_LENGTH_IN_BYTES) { + final byte[] metaId = encryptedDataInputStream.readNBytes(MetadataIdentifier.byteLength()); + if (metaId.length != MetadataIdentifier.byteLength()) { throw new IOException("Failure to read encrypted blob metadata identifier"); } + final MetadataIdentifier metadataIdentifier = MetadataIdentifier.fromByteArray(metaId); // the metadata blob name is the name of the data blob followed by the base64 encoding (URL safe) of the metadata identifier - final String metadataBlobName = blobName + new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), - StandardCharsets.UTF_8); + final String metadataBlobName = MetadataIdentifier.formMetadataBlobName(blobName, metadataIdentifier); // read the encrypted metadata contents - BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(metadataBlobName)); + final BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(metadataBlobName)); final BlobEncryptionMetadata metadata; try { // decrypt and parse metadata @@ -405,7 +473,7 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b final SecretKey dataEncryptionKey = dataEncryptionKeySupplier.get(); final int nonce = encryptionNonceSupplier.get(); // this is the metadata required to decrypt back the (soon to be) encrypted blob - BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(nonce, PACKET_LENGTH_IN_BYTES, dataEncryptionKey); + final BlobEncryptionMetadata metadata = new BlobEncryptionMetadata(nonce, PACKET_LENGTH_IN_BYTES, dataEncryptionKey); // encrypt the metadata final byte[] encryptedMetadata; try { @@ -415,9 +483,8 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } // the metadata identifier is a sufficiently long random byte array so as to make it practically unique // the goal is to avoid overwriting metadata blobs even if the encrypted data blobs are overwritten - final byte[] metadataIdentifier = metadataIdentifierSupplier.get(); - final String metadataBlobName = blobName + new String(Base64.getUrlEncoder().withoutPadding().encode(metadataIdentifier), - StandardCharsets.UTF_8); + final MetadataIdentifier metadataIdentifier = metadataIdentifierSupplier.get(); + final String metadataBlobName = MetadataIdentifier.formMetadataBlobName(blobName, metadataIdentifier); // first write the encrypted metadata to a UNIQUE blob name try (ByteArrayInputStream encryptedMetadataInputStream = new ByteArrayInputStream(encryptedMetadata)) { encryptionMetadataBlobContainer.writeBlob(metadataBlobName, encryptedMetadataInputStream, encryptedMetadata.length, true @@ -425,9 +492,9 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } // afterwards write the encrypted data blob // prepended to the encrypted data blob is the unique identifier (fixed length) of the metadata blob - final long encryptedBlobSize = metadataIdentifier.length + EncryptionPacketsInputStream.getEncryptionLength(blobSize, - PACKET_LENGTH_IN_BYTES); - try (InputStream encryptedInputStream = ChainingInputStream.chain(new ByteArrayInputStream(metadataIdentifier), + final long encryptedBlobSize = (long) MetadataIdentifier.byteLength() + + EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); + try (InputStream encryptedInputStream = ChainingInputStream.chain(new ByteArrayInputStream(metadataIdentifier.asByteArray()), new EncryptionPacketsInputStream(inputStream, dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES), true)) { delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java index 4b85855f50fd6..558b0e637d4ea 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java @@ -61,7 +61,7 @@ protected long blobLengthFromDiskLength(BlobMetaData blobMetaData) { return blobMetaData.length(); } else { return DecryptionPacketsInputStream.getDecryptionLength(blobMetaData.length() - - EncryptedRepository.METADATA_UID_LENGTH_IN_BYTES, EncryptedRepository.PACKET_LENGTH_IN_BYTES); + EncryptedRepository.MetadataIdentifier.byteLength(), EncryptedRepository.PACKET_LENGTH_IN_BYTES); } } From 3d82aa4e6d8ebd9f0ce2176cf54b974e0fe6cd6a Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 18 Feb 2020 14:51:26 +0200 Subject: [PATCH 115/142] deleteBlobsIgnoringIfNotExists refactor for new metadata blob name --- .../encrypted/EncryptedRepository.java | 78 ++++++++++++------- 1 file changed, 50 insertions(+), 28 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 585ea621fea9a..f36114c131657 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -514,7 +514,7 @@ public DeleteResult delete() throws IOException { DeleteResult deleteResult = delegatedBlobContainer.delete(); // then delete metadata try { - encryptionMetadataBlobContainer.delete(); + deleteResult = deleteResult.add(encryptionMetadataBlobContainer.delete()); } catch (IOException e) { // the encryption metadata blob container might not exist at all logger.warn("Failure to delete metadata blob container " + encryptionMetadataBlobContainer.path(), e); @@ -525,46 +525,77 @@ public DeleteResult delete() throws IOException { @Override public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOException { Objects.requireNonNull(blobNames); - // first delete the encrypted data blob - delegatedBlobContainer.deleteBlobsIgnoringIfNotExists(blobNames); - // then delete metadata + + // find all the blob names that must be deleted Set blobNamesSet = new HashSet<>(blobNames); - List metadataBlobsToDelete = new ArrayList<>(blobNames.size()); - final Set allMetadataBlobs; + Set blobNamesToDelete = new HashSet<>(); + for (String existingBlobName : delegatedBlobContainer.listBlobs().keySet()) { + if (blobNamesSet.contains(existingBlobName)) { + blobNamesToDelete.add(existingBlobName); + } + } + + // find all the metadata blob names that must be deleted + Map> blobNamesToMetadataNamesToDelete = new HashMap<>(blobNamesToDelete.size()); + Set allMetadataBlobs = new HashSet<>(); try { allMetadataBlobs = encryptionMetadataBlobContainer.listBlobs().keySet(); } catch (IOException e) { - // the encryption metadata blob container might not exist at all + // the metadata blob container might not even exist + // the encrypted data is the "anchor" for encrypted blobs, if those are removed, the encrypted blob as a whole is + // considered removed, even if, technically, the metadata is still lingering (it should later be removed by cleanup) + // therefore this tolerates metadata delete failures, when data deletes are successful logger.warn("Failure to list blobs of metadata blob container " + encryptionMetadataBlobContainer.path(), e); - return; } for (String metadataBlobName : allMetadataBlobs) { - boolean invalidMetadataName = metadataBlobName.length() <= METADATA_UID_LENGTH_IN_CHARS; - if (invalidMetadataName) { + final String blobNameForMetadata; + try { + blobNameForMetadata = MetadataIdentifier.parseFromMetadataBlobName(metadataBlobName).v1(); + } catch (IllegalArgumentException e) { + // ignore invalid metadata blob names, which most likely have been created externally continue; } - String blobName = metadataBlobName.substring(0, metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS); - if (blobNamesSet.contains(blobName)) { - metadataBlobsToDelete.add(metadataBlobName); + // group metadata blob names to their associated blob name + if (blobNamesToDelete.contains(blobNameForMetadata)) { + blobNamesToMetadataNamesToDelete.putIfAbsent(blobNameForMetadata, new ArrayList<>(1)).add(metadataBlobName); } } + // Metadata deletes when there are multiple for the same blob is un-safe, so don't try it now. + // It is unsafe because metadata "appears" before the data and there could be an overwrite in progress for which only + // the metadata, but not the encrypted data, shows up. + List metadataBlobNamesToDelete = new ArrayList<>(blobNamesToMetadataNamesToDelete.size()); + blobNamesToMetadataNamesToDelete.entrySet().forEach(entry -> { + if (entry.getValue().size() == 1) { + metadataBlobNamesToDelete.add(entry.getValue().get(0)); + } + }); + + // then delete the encrypted data blobs + delegatedBlobContainer.deleteBlobsIgnoringIfNotExists(new ArrayList<>(blobNamesToDelete)); + + // lastly delete metadata blobs try { - encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(metadataBlobsToDelete); + encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(metadataBlobNamesToDelete); } catch (IOException e) { - logger.warn("Failure to delete metadata blobs " + metadataBlobsToDelete + " from blob container " + logger.warn("Failure to delete metadata blobs " + metadataBlobNamesToDelete + " from blob container " + encryptionMetadataBlobContainer.path(), e); } } @Override public Map listBlobs() throws IOException { - // the encrypted data blob container is the source-of-truth for list operations - // the metadata blob container mirrors its structure, but in some failure cases it might contain - // additional orphaned metadata blobs - // can list blobs that cannot be decrypted (because metadata is missing or corrupted) + // The encrypted data blobs "anchor" the metadata-data blob pair, i.e. the encrypted blob "exists" if only the data exists. + // In all circumstances, barring an "external" access to the repository, the metadata associated to the data must exist. return delegatedBlobContainer.listBlobs(); } + @Override + public Map listBlobsByPrefix(String blobNamePrefix) throws IOException { + // The encrypted data blobs "anchor" the metadata-data blob pair, i.e. the encrypted blob "exists" if only the data exists. + // In all circumstances, barring an "external" access to the repository, the metadata associated to the data must exist. + return delegatedBlobContainer.listBlobsByPrefix(blobNamePrefix); + } + @Override public Map children() throws IOException { // the encrypted data blob container is the source-of-truth for child container operations @@ -586,15 +617,6 @@ public Map children() throws IOException { return result; } - @Override - public Map listBlobsByPrefix(String blobNamePrefix) throws IOException { - // the encrypted data blob container is the source-of-truth for list operations - // the metadata blob container mirrors its structure, but in some failure cases it might contain - // additional orphaned metadata blobs - // can list blobs that cannot be decrypted (because metadata is missing or corrupted) - return delegatedBlobContainer.listBlobsByPrefix(blobNamePrefix); - } - public void cleanUpOrphanedMetadata() throws IOException { // delete encryption metadata blobs which don't pair with any data blobs Set foundEncryptedBlobs = delegatedBlobContainer.listBlobs().keySet(); From b085c12fa3e03d1861719e4c84d230f2df264b7e Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 18 Feb 2020 14:59:50 +0200 Subject: [PATCH 116/142] nothing --- .../repositories/encrypted/EncryptedRepository.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index f36114c131657..615a23e06feb9 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -318,6 +318,7 @@ protected void doClose() { @Override protected long readSnapshotIndexLatestBlob() throws IOException { + // the index.latest blob is not encrypted EncryptedBlobContainer encryptedBlobContainer = (EncryptedBlobContainer) blobContainer(); return Numbers.bytesToLong(Streams.readFully(encryptedBlobContainer.delegatedBlobContainer.readBlob(INDEX_LATEST_BLOB)) .toBytesRef()); @@ -330,6 +331,7 @@ protected void writeSnapshotIndexLatestBlob(long newGen) throws IOException { bStream.writeLong(newGen); genBytes = bStream.bytes(); } + // the index.latest blob is not encrypted EncryptedBlobContainer encryptedBlobContainer = (EncryptedBlobContainer) blobContainer(); try (InputStream stream = genBytes.streamInput()) { encryptedBlobContainer.delegatedBlobContainer.writeBlobAtomic(INDEX_LATEST_BLOB, stream, genBytes.length(), false); From 901b04a664d0401be7f69f1949200464d8a5ed8d Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 18 Feb 2020 19:06:37 +0200 Subject: [PATCH 117/142] cleanUnreferencedEncryptionMetadata --- .../encrypted/EncryptedRepository.java | 52 +++++++++++++++++-- 1 file changed, 49 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 615a23e06feb9..47452a2482f93 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -338,6 +338,49 @@ protected void writeSnapshotIndexLatestBlob(long newGen) throws IOException { } } + private DeleteResult cleanUnreferencedEncryptionMetadata(EncryptedBlobContainer blobContainer) throws IOException { + Map allMetadataBlobs = blobContainer.encryptionMetadataBlobContainer.listBlobs(); + Map allDataBlobs = blobContainer.delegatedBlobContainer.listBlobs(); + // map from the data blob name to all the associated metadata + Map>> metaDataByBlobName = new HashMap<>(); + List metadataBlobsToDelete = new ArrayList<>(); + for (String metadataBlobName : allMetadataBlobs.keySet()) { + final Tuple blobNameAndMetaId; + try { + blobNameAndMetaId = MetadataIdentifier.parseFromMetadataBlobName(metadataBlobName); + } catch (IllegalArgumentException e) { + // ignore invalid metadata blob names, which most likely have been created externally + continue; + } + if (false == allDataBlobs.containsKey(blobNameAndMetaId.v1()) && blobNameAndMetaId.v2().repositoryGeneration < latestKnownRepoGen.get()) { + // the data blob for this metadata blob is not going to appear, the repo moved to a new generation, which means that a + // "parent" blob of it appeared + metadataBlobsToDelete.add(blobNameAndMetaId.v1()); + } + // group metadata blobs by their associated blob name + metaDataByBlobName.putIfAbsent(blobNameAndMetaId.v1(), new ArrayList<>()).add(new Tuple<>(blobNameAndMetaId.v2(), + metadataBlobName)); + } + metaDataByBlobName.entrySet().forEach(entry -> { + if (entry.getValue().size() > 1) { + // if there are multiple versions of the metadata, then remove ones created in olden repository generations + // since overwrites cannot happen across repository generations + long maxRepositoryGeneration = + entry.getValue().stream().map(meta -> meta.v1().repositoryGeneration).max(Long::compare).get(); + entry.getValue().forEach(meta -> { + if (meta.v1().repositoryGeneration < maxRepositoryGeneration) { + metadataBlobsToDelete.add(meta.v2()); + } + }); + } + }); + logger.info("[{}] Found unreferenced metadata blobs {} at path {}. Cleaning them up", metadata.name(), metadataBlobsToDelete, + blobContainer.encryptionMetadataBlobContainer.path()); + blobContainer().deleteBlobsIgnoringIfNotExists(metadataBlobsToDelete); + return new DeleteResult(metadataBlobsToDelete.size(), + metadataBlobsToDelete.stream().mapToLong(name -> allMetadataBlobs.get(name).length()).sum()); + } + private static class EncryptedBlobStore implements BlobStore { private final BlobStore delegatedBlobStore; @@ -539,9 +582,9 @@ public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOExce // find all the metadata blob names that must be deleted Map> blobNamesToMetadataNamesToDelete = new HashMap<>(blobNamesToDelete.size()); - Set allMetadataBlobs = new HashSet<>(); + Set allMetadataBlobNames = new HashSet<>(); try { - allMetadataBlobs = encryptionMetadataBlobContainer.listBlobs().keySet(); + allMetadataBlobNames = encryptionMetadataBlobContainer.listBlobs().keySet(); } catch (IOException e) { // the metadata blob container might not even exist // the encrypted data is the "anchor" for encrypted blobs, if those are removed, the encrypted blob as a whole is @@ -549,7 +592,7 @@ public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOExce // therefore this tolerates metadata delete failures, when data deletes are successful logger.warn("Failure to list blobs of metadata blob container " + encryptionMetadataBlobContainer.path(), e); } - for (String metadataBlobName : allMetadataBlobs) { + for (String metadataBlobName : allMetadataBlobNames) { final String blobNameForMetadata; try { blobNameForMetadata = MetadataIdentifier.parseFromMetadataBlobName(metadataBlobName).v1(); @@ -570,6 +613,9 @@ public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOExce if (entry.getValue().size() == 1) { metadataBlobNamesToDelete.add(entry.getValue().get(0)); } + // technically, duplicate metadata written during olden repository generations could be removed here as well, + // but this code should not be aware of what a repository generation is, so let the metadata linger, it will + // be garbage collected by cleanup }); // then delete the encrypted data blobs From 0ba402a97d0aa2ff1c153c29000cdd5a5e01c849 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 18 Feb 2020 22:48:00 +0200 Subject: [PATCH 118/142] Refactor cleanup for the new metadata name --- .../blobstore/BlobStoreRepository.java | 2 +- .../encrypted/EncryptedRepository.java | 131 +++++++++--------- 2 files changed, 66 insertions(+), 67 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 019c819117372..ee190b7cc5770 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -967,7 +967,7 @@ public IndexMetaData getSnapshotIndexMetaData(final SnapshotId snapshotId, final } } - private BlobPath indicesPath() { + protected BlobPath indicesPath() { return basePath().add("indices"); } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 47452a2482f93..bdd3e9b3cc75c 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -9,6 +9,9 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexCommit; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.StepListener; +import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.service.ClusterService; @@ -37,6 +40,7 @@ import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotShardFailure; +import org.elasticsearch.threadpool.ThreadPool; import javax.crypto.AEADBadTagException; import javax.crypto.KeyGenerator; @@ -60,6 +64,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Supplier; @@ -274,22 +279,49 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s @Override public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListener listener) { - super.cleanup(repositoryStateId, writeShardGens, ActionListener.wrap(repositoryCleanupResult -> { - EncryptedBlobContainer encryptedBlobContainer = (EncryptedBlobContainer) blobContainer(); - cleanUpOrphanedMetadataRecursively(encryptedBlobContainer); - listener.onResponse(repositoryCleanupResult); - }, listener::onFailure)); - } + final StepListener baseCleanupStep = new StepListener<>(); + final Executor executor = threadPool.executor(ThreadPool.Names.SNAPSHOT); - private void cleanUpOrphanedMetadataRecursively(EncryptedBlobContainer encryptedBlobContainer) throws IOException{ - encryptedBlobContainer.cleanUpOrphanedMetadata(); - for (BlobContainer childEncryptedBlobContainer : encryptedBlobContainer.children().values()) { - try { - cleanUpOrphanedMetadataRecursively((EncryptedBlobContainer) childEncryptedBlobContainer); - } catch(IOException e) { - logger.warn("Failure to clean-up blob container [" + childEncryptedBlobContainer.path() + "]", e); - } - } + super.cleanup(repositoryStateId, writeShardGens, baseCleanupStep); + + baseCleanupStep.whenComplete(baseCleanupResult -> { + final GroupedActionListener groupedListener = new GroupedActionListener<>(ActionListener.wrap(deleteResults -> { + DeleteResult deleteResult = new DeleteResult(baseCleanupResult.blobs(), baseCleanupResult.bytes()); + for (DeleteResult result : deleteResults) { + deleteResult = deleteResult.add(result); + } + listener.onResponse(new RepositoryCleanupResult(deleteResult)); + }, listener::onFailure), 2); + + // clean unreferenced metadata blobs on the root blob container + executor.execute(ActionRunnable.supply(groupedListener, () -> { + EncryptedBlobContainer encryptedBlobContainer = (EncryptedBlobContainer) blobContainer(); + return cleanUnreferencedEncryptionMetadata(encryptedBlobContainer); + })); + + // clean indices blob containers + executor.execute(ActionRunnable.supply(groupedListener, () -> { + EncryptedBlobContainer indicesBlobContainer = (EncryptedBlobContainer) blobStore().blobContainer(indicesPath()); + Map metadataIndices = indicesBlobContainer.encryptionMetadataBlobContainer.children(); + Map dataIndices = indicesBlobContainer.delegatedBlobContainer.children(); + DeleteResult deleteResult = DeleteResult.ZERO; + for (Map.Entry metadataIndexContainer : metadataIndices.entrySet()) { + if (false == dataIndices.containsKey(metadataIndexContainer.getKey())) { + // the index metadata blob container exists but the encrypted data blob container does not + Long indexGeneration = findFirstGeneration(metadataIndexContainer.getValue()); + if (indexGeneration != null && indexGeneration < latestKnownRepoGen.get()) { + logger.debug("[{}] Found stale metadata index container [{}]. Cleaning it up", metadata.name(), + metadataIndexContainer.getValue().path()); + deleteResult = deleteResult.add(metadataIndexContainer.getValue().delete()); + logger.debug("[{}] Cleaned up stale metadata index container [{}]", metadata.name(), + metadataIndexContainer.getValue().path()); + } + } + } + return deleteResult; + })); + + }, listener::onFailure); } @Override @@ -381,6 +413,24 @@ private DeleteResult cleanUnreferencedEncryptionMetadata(EncryptedBlobContainer metadataBlobsToDelete.stream().mapToLong(name -> allMetadataBlobs.get(name).length()).sum()); } + // aux "ugly" function which infers the repository generation under which an index blob container has been created + private Long findFirstGeneration(BlobContainer metadataBlobContainer) throws IOException { + for (String metaBlobName : metadataBlobContainer.listBlobs().keySet()) { + try { + return MetadataIdentifier.parseFromMetadataBlobName(metaBlobName).v2().repositoryGeneration; + } catch (IllegalArgumentException e) { + // ignored, let's find another meta blob name we can parse + } + } + for (BlobContainer child : metadataBlobContainer.children().values()) { + Long generation = findFirstGeneration(child); + if (generation != null) { + return generation; + } + } + return null; + }; + private static class EncryptedBlobStore implements BlobStore { private final BlobStore delegatedBlobStore; @@ -664,57 +714,6 @@ public Map children() throws IOException { } return result; } - - public void cleanUpOrphanedMetadata() throws IOException { - // delete encryption metadata blobs which don't pair with any data blobs - Set foundEncryptedBlobs = delegatedBlobContainer.listBlobs().keySet(); - final Set foundMetadataBlobs; - try { - foundMetadataBlobs = encryptionMetadataBlobContainer.listBlobs().keySet(); - } catch (IOException e) { - logger.warn("Failure to list blobs of metadata blob container " + encryptionMetadataBlobContainer.path(), e); - return; - } - List orphanedMetadataBlobs = new ArrayList<>(); - for (String metadataBlobName : foundMetadataBlobs) { - // also remove unrecognized blobs in the metadata blob container (mainly because it's tedious in the general - // case to tell between bogus and legit stale metadata, and it would require reading the blobs, which is not worth it) - boolean invalidMetadataName = metadataBlobName.length() <= METADATA_UID_LENGTH_IN_CHARS; - if (invalidMetadataName) { - orphanedMetadataBlobs.add(metadataBlobName); - continue; - } - String blobName = metadataBlobName.substring(0, metadataBlobName.length() - METADATA_UID_LENGTH_IN_CHARS); - if (false == foundEncryptedBlobs.contains(blobName)) { - orphanedMetadataBlobs.add(metadataBlobName); - } - } - try { - encryptionMetadataBlobContainer.deleteBlobsIgnoringIfNotExists(orphanedMetadataBlobs); - } catch (IOException e) { - logger.warn("Failure to delete orphaned metadata blobs " + orphanedMetadataBlobs + " from blob container " - + encryptionMetadataBlobContainer.path(), e); - } - // delete encryption metadata blob containers which don't pair with any data blob containers - Set foundEncryptedBlobContainers = delegatedBlobContainer.children().keySet(); - final Map foundMetadataBlobContainers; - try { - foundMetadataBlobContainers = encryptionMetadataBlobContainer.children(); - } catch (IOException e) { - logger.warn("Failure to list child blob containers for metadata blob container " + encryptionMetadataBlobContainer.path(), - e); - return; - } - for (Map.Entry metadataBlobContainer : foundMetadataBlobContainers.entrySet()) { - if (false == foundEncryptedBlobContainers.contains(metadataBlobContainer.getKey())) { - try { - metadataBlobContainer.getValue().delete(); - } catch (IOException e) { - logger.warn("Failure to delete orphaned metadata blob container " + metadataBlobContainer.getValue().path(), e); - } - } - } - } } private static String computeSaltedPBKDF2Hash(SecureRandom secureRandom, char[] password) { From 0d7077de725a9e15bdf8a074f49842983f082091 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 19 Feb 2020 21:28:53 +0200 Subject: [PATCH 119/142] Cleanup verify readonly --- .../repositories/encrypted/EncryptedRepository.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index bdd3e9b3cc75c..c431ec188e049 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -279,6 +279,10 @@ public void snapshotShard(Store store, MapperService mapperService, SnapshotId s @Override public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListener listener) { + if (isReadOnly()) { + listener.onFailure(new RepositoryException(metadata.name(), "cannot run cleanup on readonly repository")); + return; + } final StepListener baseCleanupStep = new StepListener<>(); final Executor executor = threadPool.executor(ThreadPool.Names.SNAPSHOT); From 6cf581273a4b69f9859a891cd8506aba06c22015 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 19 Feb 2020 22:01:32 +0200 Subject: [PATCH 120/142] RepositoryMetaVersion from merge --- .../encrypted/EncryptedRepository.java | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index c431ec188e049..ed52c03089dbe 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexCommit; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.StepListener; @@ -259,26 +260,26 @@ public Map adaptUserMetadata(Map userMetadata) { public void finalizeSnapshot(SnapshotId snapshotId, ShardGenerations shardGenerations, long startTime, String failure, int totalShards, List shardFailures, long repositoryStateId, boolean includeGlobalState, MetaData clusterMetaData, Map userMetadata, - boolean writeShardGens, ActionListener listener) { + Version repositoryMetaVersion, ActionListener listener) { validateRepositoryPasswordHash(userMetadata, listener::onFailure); if (userMetadata != null && userMetadata.containsKey(PASSWORD_HASH_RESERVED_USER_METADATA_KEY)) { userMetadata.remove(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); } super.finalizeSnapshot(snapshotId, shardGenerations, startTime, failure, totalShards, shardFailures, repositoryStateId, - includeGlobalState, clusterMetaData, userMetadata, writeShardGens, listener); + includeGlobalState, clusterMetaData, userMetadata, repositoryMetaVersion, listener); } @Override public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, - IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, boolean writeShardGens, + IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, Version repositoryMetaVersion, Map userMetadata, ActionListener listener) { validateRepositoryPasswordHash(userMetadata, listener::onFailure); - super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, writeShardGens, userMetadata, - listener); + super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, repositoryMetaVersion, + userMetadata, listener); } @Override - public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListener listener) { + public void cleanup(long repositoryStateId, Version repositoryMetaVersion, ActionListener listener) { if (isReadOnly()) { listener.onFailure(new RepositoryException(metadata.name(), "cannot run cleanup on readonly repository")); return; @@ -286,7 +287,7 @@ public void cleanup(long repositoryStateId, boolean writeShardGens, ActionListen final StepListener baseCleanupStep = new StepListener<>(); final Executor executor = threadPool.executor(ThreadPool.Names.SNAPSHOT); - super.cleanup(repositoryStateId, writeShardGens, baseCleanupStep); + super.cleanup(repositoryStateId, repositoryMetaVersion, baseCleanupStep); baseCleanupStep.whenComplete(baseCleanupResult -> { final GroupedActionListener groupedListener = new GroupedActionListener<>(ActionListener.wrap(deleteResults -> { From 9e8001bfd6da8a83c872e5e619061a84859950d9 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 19 Feb 2020 22:08:21 +0200 Subject: [PATCH 121/142] checkstyle --- .../repositories/encrypted/EncryptedRepository.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index ed52c03089dbe..01860bf26b50c 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -389,7 +389,8 @@ private DeleteResult cleanUnreferencedEncryptionMetadata(EncryptedBlobContainer // ignore invalid metadata blob names, which most likely have been created externally continue; } - if (false == allDataBlobs.containsKey(blobNameAndMetaId.v1()) && blobNameAndMetaId.v2().repositoryGeneration < latestKnownRepoGen.get()) { + if (false == allDataBlobs.containsKey(blobNameAndMetaId.v1()) && + blobNameAndMetaId.v2().repositoryGeneration < latestKnownRepoGen.get()) { // the data blob for this metadata blob is not going to appear, the repo moved to a new generation, which means that a // "parent" blob of it appeared metadataBlobsToDelete.add(blobNameAndMetaId.v1()); From 7ffaefb2bafbf433a6884467dbcf46776ffa8b21 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 20 Feb 2020 10:59:57 +0200 Subject: [PATCH 122/142] Test nit --- .../encrypted/ChainingInputStreamTests.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java index 17f69dd461ce3..83b96320c29aa 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/ChainingInputStreamTests.java @@ -575,11 +575,15 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { // mark does not budge assertThat(test.markIn, Matchers.is(currentIn)); // mark again - readLimit = randomInt(63); - test.mark(readLimit); + int readLimit2 = randomInt(63); + test.mark(readLimit2); assertThat(test.markIn, Matchers.is(currentIn)); verify(currentIn, never()).close(); - verify(currentIn).mark(Mockito.eq(readLimit)); + if (readLimit != readLimit2) { + verify(currentIn).mark(Mockito.eq(readLimit2)); + } else { + verify(currentIn, times(2)).mark(Mockito.eq(readLimit)); + } // read more while switching the component for (int i = 0; i < randomIntBetween(4, 16) || test.currentIn == currentIn; i++) { test.readNBytes(randomInt(63)); From 950686f415209746821efe82fe51a8f5afe0e5f5 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 20 Feb 2020 16:26:41 +0200 Subject: [PATCH 123/142] Encrypted GCS --- plugins/repository-gcs/build.gradle | 15 +++ ...eCloudStorageBlobStoreRepositoryTests.java | 8 +- .../plugin/repository-encrypted/build.gradle | 3 +- .../encrypted/EncryptedRepository.java | 5 +- ...ryptedFSBlobStoreRepositoryIntegTests.java | 2 +- ...yptedGCSBlobStoreRepositoryIntegTests.java | 95 +++++++++++++++++++ 6 files changed, 122 insertions(+), 6 deletions(-) create mode 100644 x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedGCSBlobStoreRepositoryIntegTests.java diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 63bd92c9c9b11..f3a5a8dc288c8 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -124,3 +124,18 @@ check { // also execute the QA tests when testing the plugin dependsOn 'qa:google-cloud-storage:check' } + +// test jar is exported by the testArtifacts configuration to be used in the repository-encrypted module +configurations { + testArtifacts.extendsFrom testRuntime +} + +task testJar(type: Jar) { + appendix 'test' + from sourceSets.test.output +} + +artifacts { + archives jar + testArtifacts testJar +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 757df361bf3cf..1c296816a279e 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -106,11 +106,15 @@ protected Settings nodeSettings(int nodeOrdinal) { settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace("test").getKey(), httpServerUrl()); settings.put(TOKEN_URI_SETTING.getConcreteSettingForNamespace("test").getKey(), httpServerUrl() + "/token"); + settings.setSecureSettings(nodeSecureSettings(nodeOrdinal)); + return settings.build(); + } + + protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { final MockSecureSettings secureSettings = new MockSecureSettings(); final byte[] serviceAccount = TestUtils.createServiceAccount(random()); secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace("test").getKey(), serviceAccount); - settings.setSecureSettings(secureSettings); - return settings.build(); + return secureSettings; } public void testDeleteSingleItem() { diff --git a/x-pack/plugin/repository-encrypted/build.gradle b/x-pack/plugin/repository-encrypted/build.gradle index 4f57de1efe44e..1e56c8de1ca35 100644 --- a/x-pack/plugin/repository-encrypted/build.gradle +++ b/x-pack/plugin/repository-encrypted/build.gradle @@ -9,9 +9,10 @@ esplugin { } dependencies { - // necessary for the license check + // necessary for the license check compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: ':plugins:repository-gcs', configuration: 'testArtifacts') } integTest.enabled = false diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 01860bf26b50c..e0a1d741bce51 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -205,7 +205,7 @@ static Tuple parseFromMetadataBlobName(String metada } long generation = Long.parseLong(metadataBlobName.substring(generationPos + 1)); int idPos = metadataBlobName.lastIndexOf('.', generationPos - 1); - if (idPos <= 0 || generationPos - idPos != METADATA_UID_LENGTH_IN_CHARS) { + if (idPos <= 0 || generationPos - idPos != METADATA_UID_LENGTH_IN_CHARS + 1) { throw new IllegalArgumentException("Unrecognized metadata blob name"); } byte[] id = Base64.getUrlDecoder().decode(metadataBlobName.substring(idPos + 1, generationPos)); @@ -658,7 +658,8 @@ public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOExce } // group metadata blob names to their associated blob name if (blobNamesToDelete.contains(blobNameForMetadata)) { - blobNamesToMetadataNamesToDelete.putIfAbsent(blobNameForMetadata, new ArrayList<>(1)).add(metadataBlobName); + blobNamesToMetadataNamesToDelete.computeIfAbsent(blobNameForMetadata, k -> new ArrayList<>(1)) + .add(metadataBlobName); } } // Metadata deletes when there are multiple for the same blob is un-safe, so don't try it now. diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java index 558b0e637d4ea..3c70aa8c44848 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java @@ -65,11 +65,11 @@ protected long blobLengthFromDiskLength(BlobMetaData blobMetaData) { } } + @Override protected Collection> nodePlugins() { return Arrays.asList(LocalStateEncryptedRepositoryPlugin.class); } - @Override protected String repositoryType() { return EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME; diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedGCSBlobStoreRepositoryIntegTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedGCSBlobStoreRepositoryIntegTests.java new file mode 100644 index 0000000000000..714f08ccb3d76 --- /dev/null +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedGCSBlobStoreRepositoryIntegTests.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.repositories.encrypted; + +import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.repositories.gcs.GoogleCloudStorageBlobStoreRepositoryTests; +import org.junit.BeforeClass; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +public class EncryptedGCSBlobStoreRepositoryIntegTests extends GoogleCloudStorageBlobStoreRepositoryTests { + + private static List repositoryNames; + + @BeforeClass + private static void preGenerateRepositoryNames() { + List names = new ArrayList<>(); + for (int i = 0; i < 32; i++) { + names.add("test-repo-" + i); + } + repositoryNames = Collections.synchronizedList(names); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + final MockSecureSettings secureSettings = nodeSecureSettings(nodeOrdinal); + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal), false) + .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial") + .setSecureSettings(secureSettings) + .build(); + } + + @Override + protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { + MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); + for (String repositoryName : repositoryNames) { + secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. + getConcreteSettingForNamespace(repositoryName).getKey(), "passwordPassword"); + } + return secureSettings; + } + + @Override + protected String randomRepositoryName() { + return repositoryNames.remove(0); + } + + protected long blobLengthFromDiskLength(BlobMetaData blobMetaData) { + if (BlobStoreRepository.INDEX_LATEST_BLOB.equals(blobMetaData.name())) { + // index.latest is not encrypted, hence the size on disk is equal to the content + return blobMetaData.length(); + } else { + return DecryptionPacketsInputStream.getDecryptionLength(blobMetaData.length() - + EncryptedRepository.MetadataIdentifier.byteLength(), EncryptedRepository.PACKET_LENGTH_IN_BYTES); + } + } + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateEncryptedRepositoryPlugin.class, TestGoogleCloudStoragePlugin.class); + } + + @Override + protected String repositoryType() { + return EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME; + } + + @Override + protected Settings repositorySettings() { + final Settings.Builder settings = Settings.builder(); + settings.put(super.repositorySettings()); + settings.put(EncryptedRepositoryPlugin.DELEGATE_TYPE.getKey(), "gcs"); + if (randomBoolean()) { + long size = 1 << randomInt(10); + settings.put("chunk_size", new ByteSizeValue(size, ByteSizeUnit.KB)); + } + return settings.build(); + } + +} From 710ef33dbf0b3c2f4253769a40f26ef3cf0520c9 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 20 Feb 2020 22:33:05 +0200 Subject: [PATCH 124/142] Encrypted Azure WIP --- plugins/repository-azure/build.gradle | 2 + .../azure/AzureBlobStoreRepositoryTests.java | 16 ++- ...tedAzureBlobStoreRepositoryIntegTests.java | 109 ++++++++++++++++++ plugins/repository-gcs/build.gradle | 16 +-- ...yptedGCSBlobStoreRepositoryIntegTests.java | 37 ++++-- ...eCloudStorageBlobStoreRepositoryTests.java | 5 +- plugins/repository-s3/build.gradle | 2 + .../elasticsearch/test/ESIntegTestCase.java | 8 +- .../plugin/repository-encrypted/build.gradle | 16 ++- .../encrypted/EncryptedRepository.java | 6 +- .../encrypted/EncryptedRepositoryPlugin.java | 8 +- ...ryptedFSBlobStoreRepositoryIntegTests.java | 2 +- 12 files changed, 184 insertions(+), 43 deletions(-) create mode 100644 plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/EncryptedAzureBlobStoreRepositoryIntegTests.java rename {x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted => plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs}/EncryptedGCSBlobStoreRepositoryIntegTests.java (68%) diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 5a11f8b02bf84..f012b1b27a7aa 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -30,6 +30,8 @@ dependencies { compile 'com.google.guava:guava:20.0' compile 'org.apache.commons:commons-lang3:3.4' testCompile project(':test:fixtures:azure-fixture') + // required by the test for the encrypted Azure repository + testCompile project(path: ':x-pack:plugin:repository-encrypted', configuration: 'testArtifacts') } dependencyLicenses { diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index b23693fd268d4..d158b162cbfdf 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.ESMockAPIBasedRepositoryIntegTestCase; +import org.elasticsearch.repositories.encrypted.EncryptedRepositoryPlugin; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -74,19 +75,22 @@ protected HttpHandler createErroneousHttpHandler(final HttpHandler delegate) { @Override protected Settings nodeSettings(int nodeOrdinal) { - final String key = Base64.getEncoder().encodeToString(randomAlphaOfLength(10).getBytes(StandardCharsets.UTF_8)); - final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(AzureStorageSettings.ACCOUNT_SETTING.getConcreteSettingForNamespace("test").getKey(), "account"); - secureSettings.setString(AzureStorageSettings.KEY_SETTING.getConcreteSettingForNamespace("test").getKey(), key); - final String endpoint = "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=" + httpServerUrl(); return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(AzureStorageSettings.ENDPOINT_SUFFIX_SETTING.getConcreteSettingForNamespace("test").getKey(), endpoint) - .setSecureSettings(secureSettings) .build(); } + @Override + protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { + MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); + final String key = Base64.getEncoder().encodeToString(randomAlphaOfLength(10).getBytes(StandardCharsets.UTF_8)); + secureSettings.setString(AzureStorageSettings.ACCOUNT_SETTING.getConcreteSettingForNamespace("test").getKey(), "account"); + secureSettings.setString(AzureStorageSettings.KEY_SETTING.getConcreteSettingForNamespace("test").getKey(), key); + return secureSettings; + } + /** * AzureRepositoryPlugin that allows to set low values for the Azure's client retry policy * and for BlobRequestOptions#getSingleBlobPutThresholdInBytes(). diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/EncryptedAzureBlobStoreRepositoryIntegTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/EncryptedAzureBlobStoreRepositoryIntegTests.java new file mode 100644 index 0000000000000..298a0456a00cf --- /dev/null +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/EncryptedAzureBlobStoreRepositoryIntegTests.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.azure; + +import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.repositories.encrypted.DecryptionPacketsInputStream; +import org.elasticsearch.repositories.encrypted.EncryptedRepository; +import org.elasticsearch.repositories.encrypted.EncryptedRepositoryPlugin; +import org.elasticsearch.repositories.encrypted.LocalStateEncryptedRepositoryPlugin; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +public class EncryptedAzureBlobStoreRepositoryIntegTests extends AzureBlobStoreRepositoryTests { + + private static List repositoryNames; + + @BeforeClass + private static void preGenerateRepositoryNames() { + List names = new ArrayList<>(); + for (int i = 0; i < 32; i++) { + names.add("test-repo-" + i); + } + repositoryNames = Collections.synchronizedList(names); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial") + .build(); + } + + @Override + protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { + MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); + for (String repositoryName : repositoryNames) { + secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. + getConcreteSettingForNamespace(repositoryName).getKey(), "passwordPassword"); + } + return secureSettings; + } + + @Override + protected String randomRepositoryName() { + return repositoryNames.remove(randomIntBetween(0, repositoryNames.size() - 1)); + } + + protected long blobLengthFromDiskLength(BlobMetaData blobMetaData) { + if (BlobStoreRepository.INDEX_LATEST_BLOB.equals(blobMetaData.name())) { + // index.latest is not encrypted, hence the size on disk is equal to the content + return blobMetaData.length(); + } else { + return DecryptionPacketsInputStream.getDecryptionLength(blobMetaData.length() - + EncryptedRepository.MetadataIdentifier.byteLength(), EncryptedRepository.PACKET_LENGTH_IN_BYTES); + } + } + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateEncryptedRepositoryPlugin.class, TestAzureRepositoryPlugin.class); + } + + @Override + protected String repositoryType() { + return EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME; + } + + @Override + protected Settings repositorySettings() { + final Settings.Builder settings = Settings.builder(); + settings.put(super.repositorySettings()); + settings.put(EncryptedRepositoryPlugin.DELEGATE_TYPE.getKey(), "azure"); + if (ESTestCase.randomBoolean()) { + long size = 1 << ESTestCase.randomInt(10); + settings.put("chunk_size", new ByteSizeValue(size, ByteSizeUnit.KB)); + } + return settings.build(); + } +} diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index f3a5a8dc288c8..6cab3eee5023c 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -54,6 +54,8 @@ dependencies { compile 'com.google.apis:google-api-services-storage:v1-rev20190426-1.28.0' testCompile project(':test:fixtures:gcs-fixture') + // required by the test for the encrypted GCS repository + testCompile project(path: ':x-pack:plugin:repository-encrypted', configuration: 'testArtifacts') } dependencyLicenses { @@ -125,17 +127,3 @@ check { dependsOn 'qa:google-cloud-storage:check' } -// test jar is exported by the testArtifacts configuration to be used in the repository-encrypted module -configurations { - testArtifacts.extendsFrom testRuntime -} - -task testJar(type: Jar) { - appendix 'test' - from sourceSets.test.output -} - -artifacts { - archives jar - testArtifacts testJar -} diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedGCSBlobStoreRepositoryIntegTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGCSBlobStoreRepositoryIntegTests.java similarity index 68% rename from x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedGCSBlobStoreRepositoryIntegTests.java rename to plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGCSBlobStoreRepositoryIntegTests.java index 714f08ccb3d76..0c8d9cd81d791 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedGCSBlobStoreRepositoryIntegTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGCSBlobStoreRepositoryIntegTests.java @@ -1,9 +1,22 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.repositories.encrypted; +package org.elasticsearch.repositories.gcs; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.settings.MockSecureSettings; @@ -13,7 +26,11 @@ import org.elasticsearch.license.LicenseService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import org.elasticsearch.repositories.gcs.GoogleCloudStorageBlobStoreRepositoryTests; +import org.elasticsearch.repositories.encrypted.DecryptionPacketsInputStream; +import org.elasticsearch.repositories.encrypted.EncryptedRepository; +import org.elasticsearch.repositories.encrypted.EncryptedRepositoryPlugin; +import org.elasticsearch.repositories.encrypted.LocalStateEncryptedRepositoryPlugin; +import org.elasticsearch.test.ESTestCase; import org.junit.BeforeClass; import java.util.ArrayList; @@ -37,11 +54,9 @@ private static void preGenerateRepositoryNames() { @Override protected Settings nodeSettings(int nodeOrdinal) { - final MockSecureSettings secureSettings = nodeSecureSettings(nodeOrdinal); return Settings.builder() - .put(super.nodeSettings(nodeOrdinal), false) + .put(super.nodeSettings(nodeOrdinal)) .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial") - .setSecureSettings(secureSettings) .build(); } @@ -57,7 +72,7 @@ protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { @Override protected String randomRepositoryName() { - return repositoryNames.remove(0); + return repositoryNames.remove(randomIntBetween(0, repositoryNames.size() - 1)); } protected long blobLengthFromDiskLength(BlobMetaData blobMetaData) { @@ -85,8 +100,8 @@ protected Settings repositorySettings() { final Settings.Builder settings = Settings.builder(); settings.put(super.repositorySettings()); settings.put(EncryptedRepositoryPlugin.DELEGATE_TYPE.getKey(), "gcs"); - if (randomBoolean()) { - long size = 1 << randomInt(10); + if (ESTestCase.randomBoolean()) { + long size = 1 << ESTestCase.randomInt(10); settings.put("chunk_size", new ByteSizeValue(size, ByteSizeUnit.KB)); } return settings.build(); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 1c296816a279e..980680bc3ec03 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -105,13 +105,12 @@ protected Settings nodeSettings(int nodeOrdinal) { settings.put(super.nodeSettings(nodeOrdinal)); settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace("test").getKey(), httpServerUrl()); settings.put(TOKEN_URI_SETTING.getConcreteSettingForNamespace("test").getKey(), httpServerUrl() + "/token"); - - settings.setSecureSettings(nodeSecureSettings(nodeOrdinal)); return settings.build(); } + @Override protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { - final MockSecureSettings secureSettings = new MockSecureSettings(); + final MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); final byte[] serviceAccount = TestUtils.createServiceAccount(random()); secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace("test").getKey(), serviceAccount); return secureSettings; diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 34660f6b2b54b..8489e519fee22 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -53,6 +53,8 @@ dependencies { compile 'javax.xml.bind:jaxb-api:2.2.2' testCompile project(':test:fixtures:s3-fixture') + // required by the test for the encrypted S3 repository + testCompile project(path: ':x-pack:plugin:repository-encrypted', configuration: 'testArtifacts') } dependencyLicenses { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index a579cb76929de..6a334522af079 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -81,6 +81,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -1614,10 +1615,15 @@ protected Settings nodeSettings(int nodeOrdinal) { // randomly enable low-level search cancellation to make sure it does not alter results .put(SearchService.LOW_LEVEL_CANCELLATION_SETTING.getKey(), randomBoolean()) .putList(DISCOVERY_SEED_HOSTS_SETTING.getKey()) // empty list disables a port scan for other nodes - .putList(DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), "file"); + .putList(DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), "file") + .setSecureSettings(nodeSecureSettings(nodeOrdinal)); return builder.build(); } + protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { + return new MockSecureSettings(); + } + protected Path nodeConfigPath(int nodeOrdinal) { return null; } diff --git a/x-pack/plugin/repository-encrypted/build.gradle b/x-pack/plugin/repository-encrypted/build.gradle index 1e56c8de1ca35..bef629206701a 100644 --- a/x-pack/plugin/repository-encrypted/build.gradle +++ b/x-pack/plugin/repository-encrypted/build.gradle @@ -12,7 +12,21 @@ dependencies { // necessary for the license check compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') - testCompile project(path: ':plugins:repository-gcs', configuration: 'testArtifacts') } integTest.enabled = false + +// test jar is exported by the testArtifacts configuration to be used in the cloud repositories (S3, GCS, Azure) tests +configurations { + testArtifacts.extendsFrom testRuntime +} + +task testJar(type: Jar) { + appendix 'test' + from sourceSets.test.output +} + +artifacts { + archives jar + testArtifacts testJar +} diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index e0a1d741bce51..1f10841cbff31 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -87,7 +87,7 @@ public final class EncryptedRepository extends BlobStoreRepository { // this can be changed freely (can be made a repository parameter) without adjusting // the {@link #CURRENT_ENCRYPTION_VERSION_NUMBER}, as long as it stays under the value // of {@link #MAX_PACKET_LENGTH_IN_BYTES} - static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB + public static final int PACKET_LENGTH_IN_BYTES = 64 * (1 << 10); // 64KB - "public" because used in tests static final String SALTED_PASSWORD_HASH_ALGO = "PBKDF2WithHmacSHA512"; static final int SALTED_PASSWORD_HASH_ITER_COUNT = 10000; static final int SALTED_PASSWORD_HASH_KEY_LENGTH_IN_BITS = 512; @@ -161,7 +161,7 @@ protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry this.passwordHashVerifier = new HashVerifier(password); } - static class MetadataIdentifier { + public static class MetadataIdentifier { final byte[] id; final long repositoryGeneration; @@ -190,7 +190,7 @@ String asString() { return sb.toString(); } - static int byteLength() { + public static int byteLength() { return METADATA_UID_LENGTH_IN_BYTES + Long.BYTES; } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index fdc17029d7e06..956022de30804 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -35,12 +35,14 @@ public class EncryptedRepositoryPlugin extends Plugin implements RepositoryPlugin { static final Logger logger = LogManager.getLogger(EncryptedRepositoryPlugin.class); - static final String REPOSITORY_TYPE_NAME = "encrypted"; + public static final String REPOSITORY_TYPE_NAME = "encrypted"; static final String CIPHER_ALGO = "AES"; static final String RAND_ALGO = "SHA1PRNG"; - static final Setting.AffixSetting ENCRYPTION_PASSWORD_SETTING = Setting.affixKeySetting("repository.encrypted.", + // "public" because used in integ tests for other repository types + public static final Setting.AffixSetting ENCRYPTION_PASSWORD_SETTING = Setting.affixKeySetting("repository.encrypted.", "password", key -> SecureSetting.secureString(key, null, Setting.Property.Consistent)); - static final Setting DELEGATE_TYPE = new Setting<>("delegate_type", "", Function.identity()); + // "public" because used in integ tests for other repository types + public static final Setting DELEGATE_TYPE = new Setting<>("delegate_type", "", Function.identity()); protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java index 3c70aa8c44848..1137288c7b3c9 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java @@ -52,7 +52,7 @@ protected Settings nodeSettings(int nodeOrdinal) { @Override protected String randomRepositoryName() { - return repositoryNames.remove(0); + return repositoryNames.remove(randomIntBetween(0, repositoryNames.size() - 1)); } protected long blobLengthFromDiskLength(BlobMetaData blobMetaData) { From b88ec6d8ed344692bc7b704759a5de50dbe7e4ce Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 21 Feb 2020 00:08:25 +0200 Subject: [PATCH 125/142] Prepending meta id is WRONG! --- .../encrypted/ChainingInputStream.java | 56 +++++++++++-------- .../encrypted/EncryptedRepository.java | 7 ++- ...ryptedFSBlobStoreRepositoryIntegTests.java | 16 ++++-- 3 files changed, 47 insertions(+), 32 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index 2b0208f467906..bee2aff9d2442 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -5,9 +5,11 @@ */ package org.elasticsearch.repositories.encrypted; +import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Objects; +import java.util.function.Supplier; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -74,40 +76,35 @@ public abstract class ChainingInputStream extends InputStream { /** * Returns a new {@link ChainingInputStream} that concatenates the bytes to be read from the first - * input stream with the bytes from the second input stream. + * input stream with the bytes from the second input stream. The stream arguments must support + * {@code mark} and {@code reset}. * - * @param first the input stream supplying the first bytes of the returned {@link ChainingInputStream} - * @param second the input stream supplying the bytes after the {@code first} input stream has been exhausted (and closed) - * @param markSupported whether the returned {@link ChainingInputStream} supports mark and reset + * @param firstComponentSupplier the input stream supplying the first bytes of the returned {@link ChainingInputStream} + * @param secondComponentSupplier the input stream supplying the bytes after the {@code first} input stream has been exhausted (and + * closed) */ - public static InputStream chain(InputStream first, InputStream second, boolean markSupported) { - Objects.requireNonNull(first); - Objects.requireNonNull(second); - if (markSupported && false == first.markSupported()) { - throw new IllegalArgumentException("The first input stream does not support mark"); - } - if (markSupported && false == second.markSupported()) { - throw new IllegalArgumentException("The second input stream does not support mark"); - } + public static InputStream chain(Supplier firstComponentSupplier, Supplier secondComponentSupplier) { + Objects.requireNonNull(firstComponentSupplier); + Objects.requireNonNull(secondComponentSupplier); + final int FIRST_TAG = 1; + final int SECOND_TAG = 2; return new ChainingInputStream() { - @Override InputStream nextComponent(InputStream currentComponentIn) { if (currentComponentIn == null) { - return first; - } else if (currentComponentIn == first) { - return second; - } else if (currentComponentIn == second){ + return new TaggedFilterInputStream(Objects.requireNonNull(firstComponentSupplier.get(), + "First component supplier returned null. Return the empty stream instead."), FIRST_TAG); + } else if (false == currentComponentIn instanceof TaggedFilterInputStream) { + throw new IllegalStateException("Unexpected component input stream"); + } else if (((TaggedFilterInputStream) currentComponentIn).getTag() == FIRST_TAG) { + return new TaggedFilterInputStream(Objects.requireNonNull(secondComponentSupplier.get(), + "Second component supplier returned null. Return the empty stream instead."), SECOND_TAG); + } else if (((TaggedFilterInputStream) currentComponentIn).getTag() == SECOND_TAG) { return null; } else { throw new IllegalStateException("Unexpected component input stream"); } } - - @Override - public boolean markSupported() { - return markSupported; - } }; } @@ -411,4 +408,17 @@ private boolean nextIn() throws IOException { return true; } + private static class TaggedFilterInputStream extends FilterInputStream { + + final int tag; + + TaggedFilterInputStream(InputStream in, int tag) { + super(in); + this.tag = tag; + } + + public int getTag() { + return tag; + } + } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 1f10841cbff31..8add6072c3c6f 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -466,8 +466,8 @@ public BlobContainer blobContainer(BlobPath path) { return new EncryptedBlobContainer(delegatedBlobStore, delegatedBasePath, path, dataEncryptionKeySupplier, metadataEncryption, encryptionNonceSupplier, metadataIdentifierSupplier); } - } + private static class EncryptedBlobContainer implements BlobContainer { private final BlobStore delegatedBlobStore; @@ -595,8 +595,9 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b // prepended to the encrypted data blob is the unique identifier (fixed length) of the metadata blob final long encryptedBlobSize = (long) MetadataIdentifier.byteLength() + EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); - try (InputStream encryptedInputStream = ChainingInputStream.chain(new ByteArrayInputStream(metadataIdentifier.asByteArray()), - new EncryptionPacketsInputStream(inputStream, dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES), true)) { + try (InputStream encryptedInputStream = + ChainingInputStream.chain(() -> new ByteArrayInputStream(metadataIdentifier.asByteArray()), + () -> new EncryptionPacketsInputStream(inputStream, dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES))) { delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); } } diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java index 1137288c7b3c9..5184c8968d582 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java @@ -38,18 +38,22 @@ private static void preGenerateRepositoryNames() { @Override protected Settings nodeSettings(int nodeOrdinal) { - final MockSecureSettings secureSettings = new MockSecureSettings(); - for (String repositoryName : repositoryNames) { - secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. - getConcreteSettingForNamespace(repositoryName).getKey(), "passwordPassword"); - } return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial") - .setSecureSettings(secureSettings) .build(); } + @Override + protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { + MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); + for (String repositoryName : repositoryNames) { + secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. + getConcreteSettingForNamespace(repositoryName).getKey(), "passwordPassword"); + } + return secureSettings; + } + @Override protected String randomRepositoryName() { return repositoryNames.remove(randomIntBetween(0, repositoryNames.size() - 1)); From 426bd0533d56617fcad96afcd29aa887d93e9922 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 21 Feb 2020 17:00:31 +0200 Subject: [PATCH 126/142] ChainingInputStream#chain fixes EncryptedAzureBlobStoreRepository --- .../encrypted/ChainingInputStream.java | 99 +++++++++++++------ .../encrypted/EncryptedRepository.java | 4 +- 2 files changed, 69 insertions(+), 34 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index bee2aff9d2442..d68ef0217a730 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -5,11 +5,13 @@ */ package org.elasticsearch.repositories.encrypted; +import java.io.Closeable; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; +import java.io.SequenceInputStream; +import java.util.Arrays; import java.util.Objects; -import java.util.function.Supplier; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -77,34 +79,80 @@ public abstract class ChainingInputStream extends InputStream { /** * Returns a new {@link ChainingInputStream} that concatenates the bytes to be read from the first * input stream with the bytes from the second input stream. The stream arguments must support - * {@code mark} and {@code reset}. + * the {@code mark} and {@code reset} operations; otherwise use {@link SequenceInputStream}. * - * @param firstComponentSupplier the input stream supplying the first bytes of the returned {@link ChainingInputStream} - * @param secondComponentSupplier the input stream supplying the bytes after the {@code first} input stream has been exhausted (and - * closed) + * @param first the input stream supplying the first bytes of the returned {@link ChainingInputStream} + * @param second the input stream supplying the bytes after the {@code first} input stream has been exhausted */ - public static InputStream chain(Supplier firstComponentSupplier, Supplier secondComponentSupplier) { - Objects.requireNonNull(firstComponentSupplier); - Objects.requireNonNull(secondComponentSupplier); - final int FIRST_TAG = 1; - final int SECOND_TAG = 2; + public static InputStream chain(InputStream first, InputStream second) { + if (false == Objects.requireNonNull(first).markSupported()) { + throw new IllegalArgumentException("The first component input stream does not support mark"); + } + if (false == Objects.requireNonNull(second).markSupported()) { + throw new IllegalArgumentException("The second component input stream does not support mark"); + } + final InputStream firstComponent = new FilterInputStream(first) { + @Override + public void close() { + // silence close + // "first" can be reused, and the {@code ChainingInputStream} eagerly closes components after every use + // "first" is closed when the returned {@code ChainingInputStream} is closed + } + }; + final InputStream secondComponent = new FilterInputStream(second) { + @Override + public void close() { + // silence close + // "second" can be reused, and the {@code ChainingInputStream} eagerly closes components after every use + // "second" is closed when the returned {@code ChainingInputStream} is closed + } + }; + // be sure to remember the start of components because they might be reused + firstComponent.mark(Integer.MAX_VALUE); + secondComponent.mark(Integer.MAX_VALUE); + return new ChainingInputStream() { + @Override - InputStream nextComponent(InputStream currentComponentIn) { + InputStream nextComponent(InputStream currentComponentIn) throws IOException { if (currentComponentIn == null) { - return new TaggedFilterInputStream(Objects.requireNonNull(firstComponentSupplier.get(), - "First component supplier returned null. Return the empty stream instead."), FIRST_TAG); - } else if (false == currentComponentIn instanceof TaggedFilterInputStream) { - throw new IllegalStateException("Unexpected component input stream"); - } else if (((TaggedFilterInputStream) currentComponentIn).getTag() == FIRST_TAG) { - return new TaggedFilterInputStream(Objects.requireNonNull(secondComponentSupplier.get(), - "Second component supplier returned null. Return the empty stream instead."), SECOND_TAG); - } else if (((TaggedFilterInputStream) currentComponentIn).getTag() == SECOND_TAG) { + // when returning the next component, start from its beginning + firstComponent.reset(); + return firstComponent; + } else if (currentComponentIn == firstComponent) { + // when returning the next component, start from its beginning + secondComponent.reset(); + return secondComponent; + } else if (currentComponentIn == secondComponent) { return null; } else { throw new IllegalStateException("Unexpected component input stream"); } } + + @Override + public void close() throws IOException { + IOException IOExceptions = null; + try { + super.close(); + } catch (IOException e) { + IOExceptions = e; + } + for (Closeable closeable : Arrays.asList(first, second)) { + try { + closeable.close(); + } catch (IOException e) { + if (IOExceptions != null) { + IOExceptions.addSuppressed(e); + } else { + IOExceptions = e; + } + } + } + if (IOExceptions != null) { + throw IOExceptions; + } + } }; } @@ -408,17 +456,4 @@ private boolean nextIn() throws IOException { return true; } - private static class TaggedFilterInputStream extends FilterInputStream { - - final int tag; - - TaggedFilterInputStream(InputStream in, int tag) { - super(in); - this.tag = tag; - } - - public int getTag() { - return tag; - } - } } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 8add6072c3c6f..5c28dd9ed141f 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -596,8 +596,8 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b final long encryptedBlobSize = (long) MetadataIdentifier.byteLength() + EncryptionPacketsInputStream.getEncryptionLength(blobSize, PACKET_LENGTH_IN_BYTES); try (InputStream encryptedInputStream = - ChainingInputStream.chain(() -> new ByteArrayInputStream(metadataIdentifier.asByteArray()), - () -> new EncryptionPacketsInputStream(inputStream, dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES))) { + ChainingInputStream.chain(new ByteArrayInputStream(metadataIdentifier.asByteArray()), + new EncryptionPacketsInputStream(inputStream, dataEncryptionKey, nonce, PACKET_LENGTH_IN_BYTES))) { delegatedBlobContainer.writeBlob(blobName, encryptedInputStream, encryptedBlobSize, failIfAlreadyExists); } } From 10e84d32e44d362700e3f4aa2cf0c61ba82a0087 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 21 Feb 2020 17:23:04 +0200 Subject: [PATCH 127/142] License code after refactor --- .../java/org/elasticsearch/license/XPackLicenseState.java | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 96b94dfd88ddf..b3f34247425e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -486,8 +486,7 @@ public boolean isWatcherAllowed() { * license operation mode (i.e. it's free for all). */ public synchronized boolean isEncryptedSnapshotAllowed() { - final Status currentStatus = status; - return currentStatus.active && isEncryptedSnapshotAllowedForOperationMode(currentStatus.mode); + return isAllowedByLicense(OperationMode.PLATINUM); } /** @@ -679,10 +678,6 @@ public static boolean isCcrAllowedForOperationMode(final OperationMode operation return isAllowedByOperationMode(operationMode, OperationMode.PLATINUM, true); } - public static boolean isEncryptedSnapshotAllowedForOperationMode(final OperationMode operationMode) { - return isAllowedByOperationMode(operationMode, OperationMode.PLATINUM, true); - } - public static boolean isAllowedByOperationMode( final OperationMode operationMode, final OperationMode minimumMode, final boolean allowTrial) { if (allowTrial && OperationMode.TRIAL == operationMode) { From 94b3847715f4de5109243eb68bb9eb7d6983f123 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 24 Feb 2020 01:07:22 +0200 Subject: [PATCH 128/142] EncryptedS3BlobStoreRepositoryIntegTests and MockHttpServer --- .../azure/AzureBlobStoreRepositoryTests.java | 11 +- ...tedAzureBlobStoreRepositoryIntegTests.java | 7 +- ...yptedGCSBlobStoreRepositoryIntegTests.java | 7 +- ...eCloudStorageBlobStoreRepositoryTests.java | 12 +- ...ryptedS3BlobStoreRepositoryIntegTests.java | 110 ++++++++++++++++++ .../s3/S3BlobStoreRepositoryTests.java | 15 ++- ...ESMockAPIBasedRepositoryIntegTestCase.java | 10 +- .../elasticsearch/test/ESIntegTestCase.java | 8 +- .../encrypted/EncryptedRepository.java | 58 +++++---- ...ryptedFSBlobStoreRepositoryIntegTests.java | 9 +- 10 files changed, 184 insertions(+), 63 deletions(-) create mode 100644 plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/EncryptedS3BlobStoreRepositoryIntegTests.java diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index d158b162cbfdf..3adbf5858c348 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.ESMockAPIBasedRepositoryIntegTestCase; -import org.elasticsearch.repositories.encrypted.EncryptedRepositoryPlugin; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -77,14 +76,14 @@ protected HttpHandler createErroneousHttpHandler(final HttpHandler delegate) { protected Settings nodeSettings(int nodeOrdinal) { final String endpoint = "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=" + httpServerUrl(); return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(AzureStorageSettings.ENDPOINT_SUFFIX_SETTING.getConcreteSettingForNamespace("test").getKey(), endpoint) - .build(); + .put(super.nodeSettings(nodeOrdinal)) + .put(AzureStorageSettings.ENDPOINT_SUFFIX_SETTING.getConcreteSettingForNamespace("test").getKey(), endpoint) + .setSecureSettings(nodeSecureSettings(nodeOrdinal)) + .build(); } - @Override protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { - MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); + MockSecureSettings secureSettings = new MockSecureSettings(); final String key = Base64.getEncoder().encodeToString(randomAlphaOfLength(10).getBytes(StandardCharsets.UTF_8)); secureSettings.setString(AzureStorageSettings.ACCOUNT_SETTING.getConcreteSettingForNamespace("test").getKey(), "account"); secureSettings.setString(AzureStorageSettings.KEY_SETTING.getConcreteSettingForNamespace("test").getKey(), key); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/EncryptedAzureBlobStoreRepositoryIntegTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/EncryptedAzureBlobStoreRepositoryIntegTests.java index 298a0456a00cf..51ec89292bd2b 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/EncryptedAzureBlobStoreRepositoryIntegTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/EncryptedAzureBlobStoreRepositoryIntegTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; @@ -56,7 +57,7 @@ private static void preGenerateRepositoryNames() { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial") + .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), License.LicenseType.TRIAL.getTypeName()) .build(); } @@ -65,7 +66,7 @@ protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); for (String repositoryName : repositoryNames) { secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. - getConcreteSettingForNamespace(repositoryName).getKey(), "passwordPassword"); + getConcreteSettingForNamespace(repositoryName).getKey(), "password" + repositoryName); } return secureSettings; } @@ -99,7 +100,7 @@ protected String repositoryType() { protected Settings repositorySettings() { final Settings.Builder settings = Settings.builder(); settings.put(super.repositorySettings()); - settings.put(EncryptedRepositoryPlugin.DELEGATE_TYPE.getKey(), "azure"); + settings.put(EncryptedRepositoryPlugin.DELEGATE_TYPE.getKey(), AzureRepository.TYPE); if (ESTestCase.randomBoolean()) { long size = 1 << ESTestCase.randomInt(10); settings.put("chunk_size", new ByteSizeValue(size, ByteSizeUnit.KB)); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGCSBlobStoreRepositoryIntegTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGCSBlobStoreRepositoryIntegTests.java index 0c8d9cd81d791..103f79b0b5f8e 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGCSBlobStoreRepositoryIntegTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGCSBlobStoreRepositoryIntegTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; @@ -56,7 +57,7 @@ private static void preGenerateRepositoryNames() { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial") + .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), License.LicenseType.TRIAL.getTypeName()) .build(); } @@ -65,7 +66,7 @@ protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); for (String repositoryName : repositoryNames) { secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. - getConcreteSettingForNamespace(repositoryName).getKey(), "passwordPassword"); + getConcreteSettingForNamespace(repositoryName).getKey(), "password" + repositoryName); } return secureSettings; } @@ -99,7 +100,7 @@ protected String repositoryType() { protected Settings repositorySettings() { final Settings.Builder settings = Settings.builder(); settings.put(super.repositorySettings()); - settings.put(EncryptedRepositoryPlugin.DELEGATE_TYPE.getKey(), "gcs"); + settings.put(EncryptedRepositoryPlugin.DELEGATE_TYPE.getKey(), GoogleCloudStorageRepository.TYPE); if (ESTestCase.randomBoolean()) { long size = 1 << ESTestCase.randomInt(10); settings.put("chunk_size", new ByteSizeValue(size, ByteSizeUnit.KB)); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 980680bc3ec03..cb9a0cab5e632 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -101,16 +101,16 @@ protected HttpHandler createErroneousHttpHandler(final HttpHandler delegate) { @Override protected Settings nodeSettings(int nodeOrdinal) { - final Settings.Builder settings = Settings.builder(); - settings.put(super.nodeSettings(nodeOrdinal)); - settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace("test").getKey(), httpServerUrl()); - settings.put(TOKEN_URI_SETTING.getConcreteSettingForNamespace("test").getKey(), httpServerUrl() + "/token"); + final Settings.Builder settings = Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(ENDPOINT_SETTING.getConcreteSettingForNamespace("test").getKey(), httpServerUrl()) + .put(TOKEN_URI_SETTING.getConcreteSettingForNamespace("test").getKey(), httpServerUrl() + "/token") + .setSecureSettings(nodeSecureSettings(nodeOrdinal)); return settings.build(); } - @Override protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { - final MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); + final MockSecureSettings secureSettings = new MockSecureSettings(); final byte[] serviceAccount = TestUtils.createServiceAccount(random()); secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace("test").getKey(), serviceAccount); return secureSettings; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/EncryptedS3BlobStoreRepositoryIntegTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/EncryptedS3BlobStoreRepositoryIntegTests.java new file mode 100644 index 0000000000000..0da596fa8d4ca --- /dev/null +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/EncryptedS3BlobStoreRepositoryIntegTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.s3; + +import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.repositories.encrypted.DecryptionPacketsInputStream; +import org.elasticsearch.repositories.encrypted.EncryptedRepository; +import org.elasticsearch.repositories.encrypted.EncryptedRepositoryPlugin; +import org.elasticsearch.repositories.encrypted.LocalStateEncryptedRepositoryPlugin; +import org.junit.BeforeClass; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +public class EncryptedS3BlobStoreRepositoryIntegTests extends S3BlobStoreRepositoryTests { + + private static List repositoryNames; + + @BeforeClass + private static void preGenerateRepositoryNames() { + List names = new ArrayList<>(); + for (int i = 0; i < 32; i++) { + names.add("test-repo-" + i); + } + repositoryNames = Collections.synchronizedList(names); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), License.LicenseType.TRIAL.getTypeName()) + .build(); + } + + @Override + protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { + MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); + for (String repositoryName : repositoryNames) { + secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. + getConcreteSettingForNamespace(repositoryName).getKey(), "password" + repositoryName); + } + return secureSettings; + } + + @Override + protected String randomRepositoryName() { + return repositoryNames.remove(randomIntBetween(0, repositoryNames.size() - 1)); + } + + protected long blobLengthFromDiskLength(BlobMetaData blobMetaData) { + if (BlobStoreRepository.INDEX_LATEST_BLOB.equals(blobMetaData.name())) { + // index.latest is not encrypted, hence the size on disk is equal to the content + return blobMetaData.length(); + } else { + return DecryptionPacketsInputStream.getDecryptionLength(blobMetaData.length() - + EncryptedRepository.MetadataIdentifier.byteLength(), EncryptedRepository.PACKET_LENGTH_IN_BYTES); + } + } + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateEncryptedRepositoryPlugin.class, TestS3RepositoryPlugin.class); + } + + @Override + protected String repositoryType() { + return EncryptedRepositoryPlugin.REPOSITORY_TYPE_NAME; + } + + @Override + protected Settings repositorySettings() { + final Settings.Builder settings = Settings.builder(); + settings.put(super.repositorySettings()); + settings.put(EncryptedRepositoryPlugin.DELEGATE_TYPE.getKey(), S3Repository.TYPE); + return settings.build(); + } + + @Override + public void testEnforcedCooldownPeriod() { + // this test is not applicable for the encrypted repository because it verifies behavior which pertains to snapshots that must + // be created before the encrypted repository was introduced, hence no such encrypted snapshots can possibly exist + } + +} diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index c28f6fbb66e7b..f8ae1ec594eca 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -118,10 +118,6 @@ protected HttpHandler createErroneousHttpHandler(final HttpHandler delegate) { @Override protected Settings nodeSettings(int nodeOrdinal) { - final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(S3ClientSettings.ACCESS_KEY_SETTING.getConcreteSettingForNamespace("test").getKey(), "access"); - secureSettings.setString(S3ClientSettings.SECRET_KEY_SETTING.getConcreteSettingForNamespace("test").getKey(), "secret"); - final Settings.Builder builder = Settings.builder() .put(ThreadPool.ESTIMATED_TIME_INTERVAL_SETTING.getKey(), 0) // We have tests that verify an exact wait time .put(S3ClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace("test").getKey(), httpServerUrl()) @@ -130,7 +126,7 @@ protected Settings nodeSettings(int nodeOrdinal) { // Disable request throttling because some random values in tests might generate too many failures for the S3 client .put(S3ClientSettings.USE_THROTTLE_RETRIES_SETTING.getConcreteSettingForNamespace("test").getKey(), false) .put(super.nodeSettings(nodeOrdinal)) - .setSecureSettings(secureSettings); + .setSecureSettings(nodeSecureSettings(nodeOrdinal)); if (signerOverride != null) { builder.put(S3ClientSettings.SIGNER_OVERRIDE.getConcreteSettingForNamespace("test").getKey(), signerOverride); @@ -141,8 +137,15 @@ protected Settings nodeSettings(int nodeOrdinal) { return builder.build(); } + protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(S3ClientSettings.ACCESS_KEY_SETTING.getConcreteSettingForNamespace("test").getKey(), "access"); + secureSettings.setString(S3ClientSettings.SECRET_KEY_SETTING.getConcreteSettingForNamespace("test").getKey(), "secret"); + return secureSettings; + } + public void testEnforcedCooldownPeriod() throws IOException { - final String repoName = createRepository(randomName(), Settings.builder().put(repositorySettings()) + final String repoName = createRepository(randomRepositoryName(), Settings.builder().put(repositorySettings()) .put(S3Repository.COOLDOWN_PERIOD.getKey(), TEST_COOLDOWN_PERIOD).build()); final SnapshotId fakeOldSnapshot = client().admin().cluster().prepareCreateSnapshot(repoName, "snapshot-old") diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index ce953389d0f4a..85bfc9e83a84e 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -79,12 +79,12 @@ protected interface BlobStoreHttpHandler extends HttpHandler { public static void startHttpServer() throws Exception { httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); httpServer.setExecutor(r -> { - try { - r.run(); - } catch (Throwable t) { + Thread runThread = new Thread(r); + runThread.setUncaughtExceptionHandler((t, e) -> { log.error("Error in execution on mock http server IO thread", t); - throw t; - } + Thread.currentThread().getUncaughtExceptionHandler().uncaughtException(t, e); + }); + runThread.start(); }); httpServer.start(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 8c78370649ebd..24ab6803e6e7c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -81,7 +81,6 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -1618,15 +1617,10 @@ protected Settings nodeSettings(int nodeOrdinal) { // randomly enable low-level search cancellation to make sure it does not alter results .put(SearchService.LOW_LEVEL_CANCELLATION_SETTING.getKey(), randomBoolean()) .putList(DISCOVERY_SEED_HOSTS_SETTING.getKey()) // empty list disables a port scan for other nodes - .putList(DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), "file") - .setSecureSettings(nodeSecureSettings(nodeOrdinal)); + .putList(DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), "file"); return builder.build(); } - protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { - return new MockSecureSettings(); - } - protected Path nodeConfigPath(int nodeOrdinal) { return null; } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 5c28dd9ed141f..f198b71983da3 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -521,33 +521,45 @@ public BlobPath path() { */ @Override public InputStream readBlob(String blobName) throws IOException { + // TODO this requires two concurrent readBlob operations and it's technically possible that the storage server has concurrent + // connections handling limit which gets saturated with only the first connection of the pair, thereby hampering progress, + // when connections start timing out on read until the pair connection for an existing live connection succeeds final InputStream encryptedDataInputStream = delegatedBlobContainer.readBlob(blobName); - // read the metadata identifier (fixed length) which is prepended to the encrypted blob - final byte[] metaId = encryptedDataInputStream.readNBytes(MetadataIdentifier.byteLength()); - if (metaId.length != MetadataIdentifier.byteLength()) { - throw new IOException("Failure to read encrypted blob metadata identifier"); - } - final MetadataIdentifier metadataIdentifier = MetadataIdentifier.fromByteArray(metaId); - // the metadata blob name is the name of the data blob followed by the base64 encoding (URL safe) of the metadata identifier - final String metadataBlobName = MetadataIdentifier.formMetadataBlobName(blobName, metadataIdentifier); - // read the encrypted metadata contents - final BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(metadataBlobName)); - final BlobEncryptionMetadata metadata; try { - // decrypt and parse metadata - metadata = BlobEncryptionMetadata.deserializeMetadata(BytesReference.toBytes(encryptedMetadataBytes), - metadataEncryption::decrypt); - } catch (IOException e) { - // friendlier exception message - String failureMessage = "Failure to decrypt metadata for blob [" + blobName + "]"; - if (e.getCause() instanceof AEADBadTagException) { - failureMessage = failureMessage + ". The repository password is probably wrong."; + // read the metadata identifier (fixed length) which is prepended to the encrypted blob + final byte[] metaId = encryptedDataInputStream.readNBytes(MetadataIdentifier.byteLength()); + if (metaId.length != MetadataIdentifier.byteLength()) { + throw new IOException("Failure to read encrypted blob metadata identifier"); + } + final MetadataIdentifier metadataIdentifier = MetadataIdentifier.fromByteArray(metaId); + // the metadata blob name is the name of the data blob followed by the base64 encoding (URL safe) of the metadata identifier + final String metadataBlobName = MetadataIdentifier.formMetadataBlobName(blobName, metadataIdentifier); + // read the encrypted metadata contents + final BytesReference encryptedMetadataBytes = Streams.readFully(encryptionMetadataBlobContainer.readBlob(metadataBlobName)); + final BlobEncryptionMetadata metadata; + try { + // decrypt and parse metadata + metadata = BlobEncryptionMetadata.deserializeMetadata(BytesReference.toBytes(encryptedMetadataBytes), + metadataEncryption::decrypt); + } catch (IOException e) { + // friendlier exception message + String failureMessage = "Failure to decrypt metadata for blob [" + blobName + "]"; + if (e.getCause() instanceof AEADBadTagException) { + failureMessage = failureMessage + ". The repository password is probably wrong."; + } + throw new IOException(failureMessage, e); + } + // read and decrypt the data blob + return new DecryptionPacketsInputStream(encryptedDataInputStream, metadata.getDataEncryptionKey(), metadata.getNonce(), + metadata.getPacketLengthInBytes()); + } catch (Exception e) { + try { + encryptedDataInputStream.close(); + } catch (IOException closeEx) { + e.addSuppressed(closeEx); } - throw new IOException(failureMessage, e); + throw e; } - // read and decrypt the data blob - return new DecryptionPacketsInputStream(encryptedDataInputStream, metadata.getDataEncryptionKey(), metadata.getNonce(), - metadata.getPacketLengthInBytes()); } /** diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java index 5184c8968d582..7dfbda5c15d85 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedFSBlobStoreRepositoryIntegTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; @@ -40,16 +41,16 @@ private static void preGenerateRepositoryNames() { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial") + .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), License.LicenseType.TRIAL.getTypeName()) + .setSecureSettings(nodeSecureSettings(nodeOrdinal)) .build(); } - @Override protected MockSecureSettings nodeSecureSettings(int nodeOrdinal) { - MockSecureSettings secureSettings = super.nodeSecureSettings(nodeOrdinal); + MockSecureSettings secureSettings = new MockSecureSettings(); for (String repositoryName : repositoryNames) { secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. - getConcreteSettingForNamespace(repositoryName).getKey(), "passwordPassword"); + getConcreteSettingForNamespace(repositoryName).getKey(), "password" + repositoryName); } return secureSettings; } From 6708354c331f2267040f71a78a7b0cfcb4c06905 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 24 Feb 2020 01:10:43 +0200 Subject: [PATCH 129/142] Nit --- plugins/repository-gcs/build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 6cab3eee5023c..21b222887b4a1 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -126,4 +126,3 @@ check { // also execute the QA tests when testing the plugin dependsOn 'qa:google-cloud-storage:check' } - From 12a074be2688f83a19abc12d56f7ceb235245907 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 24 Feb 2020 01:21:02 +0200 Subject: [PATCH 130/142] bad logger --- .../blobstore/ESMockAPIBasedRepositoryIntegTestCase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index 85bfc9e83a84e..24c1c2cd1246d 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -81,7 +81,7 @@ public static void startHttpServer() throws Exception { httpServer.setExecutor(r -> { Thread runThread = new Thread(r); runThread.setUncaughtExceptionHandler((t, e) -> { - log.error("Error in execution on mock http server IO thread", t); + log.error("Error in execution on mock http server IO thread", e); Thread.currentThread().getUncaughtExceptionHandler().uncaughtException(t, e); }); runThread.start(); From a8c931ca382ecb4af8731609e353d835c475efed Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 24 Feb 2020 17:14:32 +0200 Subject: [PATCH 131/142] ESMockAPIBasedRepositoryIntegTestCase Executor Service --- ...ESMockAPIBasedRepositoryIntegTestCase.java | 22 +++++++++---------- .../encrypted/EncryptedRepository.java | 2 ++ .../encrypted/EncryptedRepositoryPlugin.java | 5 ++++- 3 files changed, 17 insertions(+), 12 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index 24c1c2cd1246d..97b1d66e8231d 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -22,7 +22,6 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; import org.apache.http.HttpStatus; -import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; @@ -32,6 +31,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.mocksocket.MockHttpServer; import org.elasticsearch.test.BackgroundIndexer; import org.junit.After; @@ -46,6 +47,9 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @@ -71,21 +75,16 @@ protected interface BlobStoreHttpHandler extends HttpHandler { private static final byte[] BUFFER = new byte[1024]; private static HttpServer httpServer; + private static ExecutorService executorService; private Map handlers; - private static final Logger log = LogManager.getLogger(); - @BeforeClass public static void startHttpServer() throws Exception { httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); - httpServer.setExecutor(r -> { - Thread runThread = new Thread(r); - runThread.setUncaughtExceptionHandler((t, e) -> { - log.error("Error in execution on mock http server IO thread", e); - Thread.currentThread().getUncaughtExceptionHandler().uncaughtException(t, e); - }); - runThread.start(); - }); + ThreadFactory threadFactory = EsExecutors.daemonThreadFactory("[" + ESMockAPIBasedRepositoryIntegTestCase.class.getName() + "]"); + executorService = EsExecutors.newScaling(ESMockAPIBasedRepositoryIntegTestCase.class.getName(), 0, 2, 60, TimeUnit.SECONDS, + threadFactory, new ThreadContext(Settings.EMPTY)); + httpServer.setExecutor(executorService); httpServer.start(); } @@ -98,6 +97,7 @@ public void setUpHttpServer() { @AfterClass public static void stopHttpServer() { httpServer.stop(0); + executorService.shutdown(); httpServer = null; } diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index f198b71983da3..f50e3ffd65866 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -263,6 +263,8 @@ public void finalizeSnapshot(SnapshotId snapshotId, ShardGenerations shardGenera Version repositoryMetaVersion, ActionListener listener) { validateRepositoryPasswordHash(userMetadata, listener::onFailure); if (userMetadata != null && userMetadata.containsKey(PASSWORD_HASH_RESERVED_USER_METADATA_KEY)) { + // remove the repository password hash from the snapshot metadata, after all repository password verifications + // have completed, so that the hash is not displayed in the API response to the user userMetadata.remove(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); } super.finalizeSnapshot(snapshotId, shardGenerations, startTime, failure, totalShards, shardFailures, repositoryStateId, diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java index 956022de30804..cab86a67b4aa5 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryPlugin.java @@ -94,10 +94,13 @@ public Repository create(RepositoryMetaData metaData, Function Date: Mon, 24 Feb 2020 17:34:16 +0200 Subject: [PATCH 132/142] Explain that readBlob requires two concurrent connections --- .../repositories/encrypted/EncryptedRepository.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index f50e3ffd65866..37fc34aa75200 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -523,9 +523,11 @@ public BlobPath path() { */ @Override public InputStream readBlob(String blobName) throws IOException { - // TODO this requires two concurrent readBlob operations and it's technically possible that the storage server has concurrent - // connections handling limit which gets saturated with only the first connection of the pair, thereby hampering progress, - // when connections start timing out on read until the pair connection for an existing live connection succeeds + // this requires two concurrent readBlob connections so it's possible that, under lab conditions, the storage service + // is saturated only by the first read connection of the pair, so that the second read connection (for the metadata) can not be + // fulfilled. In this case the second connection will time-out which will trigger the closing of the first one, therefore + // allowing other pair connections to complete. In this situation the restore process should slowly make headway, albeit under + // read-timeout exceptions final InputStream encryptedDataInputStream = delegatedBlobContainer.readBlob(blobName); try { // read the metadata identifier (fixed length) which is prepended to the encrypted blob From 28dd9e105ed05775a6c1492b6985bdbf41889602 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 25 Feb 2020 13:04:01 +0200 Subject: [PATCH 133/142] Add EncryptedRepository#getEncryptedBlobByteLength --- .../repositories/encrypted/EncryptedRepository.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 37fc34aa75200..f3a45dd6f38f7 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -132,6 +132,16 @@ public final class EncryptedRepository extends BlobStoreRepository { // repository password on the local node private final HashVerifier passwordHashVerifier; + /** + * Returns the byte length (i.e. the storage size) of an encrypted blob, given the length of the blob's plaintext contents. + * + * @see EncryptionPacketsInputStream#getEncryptionLength(long, int) + */ + public static long getEncryptedBlobByteLength(long plaintextBlobByteLength) { + return ((long) MetadataIdentifier.byteLength()) + EncryptionPacketsInputStream.getEncryptionLength(plaintextBlobByteLength, + PACKET_LENGTH_IN_BYTES); + } + protected EncryptedRepository(RepositoryMetaData metadata, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService, BlobStoreRepository delegatedRepository, Supplier licenseStateSupplier, char[] password) throws NoSuchAlgorithmException { From b9ba309c616193706781bb151532d0ff5d11e001 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 25 Feb 2020 13:05:04 +0200 Subject: [PATCH 134/142] Log unrecognized blobs during cleanup --- .../repositories/encrypted/EncryptedRepository.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index f3a45dd6f38f7..8874f6b2ffd33 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -399,6 +399,7 @@ private DeleteResult cleanUnreferencedEncryptionMetadata(EncryptedBlobContainer blobNameAndMetaId = MetadataIdentifier.parseFromMetadataBlobName(metadataBlobName); } catch (IllegalArgumentException e) { // ignore invalid metadata blob names, which most likely have been created externally + logger.warn("Unrecognized blob name for metadata [" + metadataBlobName + "]", e); continue; } if (false == allDataBlobs.containsKey(blobNameAndMetaId.v1()) && From ef4c86054c89ad97a61854b2ecea6cf0adaa8573 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 25 Feb 2020 13:05:56 +0200 Subject: [PATCH 135/142] Fix EncryptedRepository#cleanup bug --- .../repositories/encrypted/EncryptedRepository.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 8874f6b2ffd33..e8570588957e7 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -409,8 +409,8 @@ private DeleteResult cleanUnreferencedEncryptionMetadata(EncryptedBlobContainer metadataBlobsToDelete.add(blobNameAndMetaId.v1()); } // group metadata blobs by their associated blob name - metaDataByBlobName.putIfAbsent(blobNameAndMetaId.v1(), new ArrayList<>()).add(new Tuple<>(blobNameAndMetaId.v2(), - metadataBlobName)); + metaDataByBlobName.computeIfAbsent(blobNameAndMetaId.v1(), k -> new ArrayList<>(1)) + .add(new Tuple<>(blobNameAndMetaId.v2(), metadataBlobName)); } metaDataByBlobName.entrySet().forEach(entry -> { if (entry.getValue().size() > 1) { From 40fa0eb4369d932d28756d149f86e382637bc90a Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 25 Feb 2020 13:07:18 +0200 Subject: [PATCH 136/142] Add EncryptedGoogleCloudStorageThirdPartyTests --- .../qa/google-cloud-storage/build.gradle | 8 ++ ...ptedGoogleCloudStorageThirdPartyTests.java | 111 ++++++++++++++++++ .../GoogleCloudStorageThirdPartyTests.java | 4 +- .../AbstractThirdPartyRepositoryTestCase.java | 30 +++-- 4 files changed, 138 insertions(+), 15 deletions(-) create mode 100644 plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGoogleCloudStorageThirdPartyTests.java diff --git a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle index 5f6d90f84416b..9568b5d7a8fe0 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle +++ b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle @@ -34,6 +34,9 @@ apply plugin: 'elasticsearch.test.fixtures' // TODO think about flattening qa:google-cloud-storage project into parent dependencies { testCompile project(path: ':plugins:repository-gcs') + // required by the third-party test for the encrypted GCS repository + testCompile project(path: ':x-pack:plugin:repository-encrypted') + testCompile project(path: ':x-pack:plugin:core') } testFixtures.useFixture(':test:fixtures:gcs-fixture') @@ -101,6 +104,7 @@ task thirdPartyTest(type: Test) { } include '**/GoogleCloudStorageThirdPartyTests.class' + include '**/EncryptedGoogleCloudStorageThirdPartyTests.class' systemProperty 'tests.security.manager', false systemProperty 'test.google.bucket', gcsBucket systemProperty 'test.google.base', gcsBasePath + "_third_party_tests_" + BuildParams.testSeed @@ -126,10 +130,14 @@ processTestResources { integTest { dependsOn project(':plugins:repository-gcs').bundlePlugin + dependsOn project(':x-pack:plugin:core').bundlePlugin + dependsOn project(':x-pack:plugin:repository-encrypted').bundlePlugin } testClusters.integTest { plugin file(project(':plugins:repository-gcs').bundlePlugin.archiveFile) + plugin file(project(':x-pack:plugin:core').bundlePlugin.archiveFile) + plugin file(project(':x-pack:plugin:repository-encrypted').bundlePlugin.archiveFile) keystore 'gcs.client.integration_test.credentials_file', serviceAccountFile, IGNORE_VALUE diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGoogleCloudStorageThirdPartyTests.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGoogleCloudStorageThirdPartyTests.java new file mode 100644 index 0000000000000..011c5d7061928 --- /dev/null +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/EncryptedGoogleCloudStorageThirdPartyTests.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.gcs; + +import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.SecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.encrypted.EncryptedRepository; +import org.elasticsearch.repositories.encrypted.EncryptedRepositoryPlugin; +import org.elasticsearch.xpack.core.XPackPlugin; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class EncryptedGoogleCloudStorageThirdPartyTests extends GoogleCloudStorageThirdPartyTests { + + @Override + protected Collection> getPlugins() { + return pluginList(XPackPlugin.class, EncryptedRepositoryPlugin.class, GoogleCloudStoragePlugin.class); + } + + @Override + protected Settings nodeSettings() { + return Settings.builder() + .put(super.nodeSettings()) + .put("xpack.license.self_generated.type", "trial") + .build(); + } + + @Override + protected SecureSettings credentials() { + MockSecureSettings secureSettings = (MockSecureSettings) super.credentials(); + secureSettings.setString(EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING. + getConcreteSettingForNamespace("test-encrypted-repo").getKey(), "password-test-repo"); + return secureSettings; + } + + @Override + protected void createRepository(final String repoName) { + AcknowledgedResponse putRepositoryResponse = client().admin().cluster() + .preparePutRepository("test-encrypted-repo") + .setType("encrypted") + .setSettings(Settings.builder() + .put("delegate_type", "gcs") + .put("bucket", System.getProperty("test.google.bucket")) + .put("base_path", System.getProperty("test.google.base", "") + + "/" + EncryptedGoogleCloudStorageThirdPartyTests.class.getName() ) + ).get(); + assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + } + + @Override + protected void assertCleanupResponse(CleanupRepositoryResponse response, long bytes, long blobs) { + // TODO cleanup of root blobs does not count the encryption metadata blobs, but the cleanup of blob containers ("indices" folder) + // does count them; ideally there should be consistency, one way or the other + assertThat(response.result().blobs(), equalTo(1L + 2L + 1L /* one metadata blob */)); + // the cleanup stats of the encrypted repository currently includes only some of the metadata blobs (as per above), which are + // themselves cumbersome to size; but the bytes count is stable + assertThat(response.result().bytes(), equalTo(244L)); + } + + @Override + protected void assertBlobsByPrefix(BlobPath path, String prefix, Map blobs) throws Exception { + // blobs are larger after encryption + Map blobsWithSizeAfterEncryption = new HashMap<>(); + blobs.forEach((name, meta) -> { + blobsWithSizeAfterEncryption.put(name, new BlobMetaData() { + @Override + public String name() { + return meta.name(); + } + + @Override + public long length() { + return EncryptedRepository.getEncryptedBlobByteLength(meta.length()); + } + }); + }); + super.assertBlobsByPrefix(path, prefix, blobsWithSizeAfterEncryption); + } + + @Override + protected String getTestRepoName() { + return "test-encrypted-repo"; + } + +} diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageThirdPartyTests.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageThirdPartyTests.java index 0096198791e39..03b477dadbf1a 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageThirdPartyTests.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageThirdPartyTests.java @@ -69,11 +69,11 @@ protected SecureSettings credentials() { @Override protected void createRepository(final String repoName) { - AcknowledgedResponse putRepositoryResponse = client().admin().cluster().preparePutRepository("test-repo") + AcknowledgedResponse putRepositoryResponse = client().admin().cluster().preparePutRepository(repoName) .setType("gcs") .setSettings(Settings.builder() .put("bucket", System.getProperty("test.google.bucket")) - .put("base_path", System.getProperty("test.google.base", "/")) + .put("base_path", System.getProperty("test.google.base", "") + "/" + GoogleCloudStorageThirdPartyTests.class.getName() ) ).get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java index efc0e653edf5c..e632b4d2a2b6c 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java @@ -69,14 +69,14 @@ protected Settings nodeSettings() { @Override public void setUp() throws Exception { super.setUp(); - createRepository("test-repo"); + createRepository(getTestRepoName()); deleteAndAssertEmpty(getRepository().basePath()); } @Override public void tearDown() throws Exception { deleteAndAssertEmpty(getRepository().basePath()); - client().admin().cluster().prepareDeleteRepository("test-repo").get(); + client().admin().cluster().prepareDeleteRepository(getTestRepoName()).get(); super.tearDown(); } @@ -112,7 +112,7 @@ public void testCreateSnapshot() { logger.info("--> snapshot"); CreateSnapshotResponse createSnapshotResponse = client().admin() .cluster() - .prepareCreateSnapshot("test-repo", snapshotName) + .prepareCreateSnapshot(getTestRepoName(), snapshotName) .setWaitForCompletion(true) .setIndices("test-idx-*", "-test-idx-3") .get(); @@ -122,17 +122,17 @@ public void testCreateSnapshot() { assertThat(client().admin() .cluster() - .prepareGetSnapshots("test-repo") + .prepareGetSnapshots(getTestRepoName()) .setSnapshots(snapshotName) .get() - .getSnapshots("test-repo") + .getSnapshots(getTestRepoName()) .get(0) .state(), equalTo(SnapshotState.SUCCESS)); assertTrue(client().admin() .cluster() - .prepareDeleteSnapshot("test-repo", snapshotName) + .prepareDeleteSnapshot(getTestRepoName(), snapshotName) .get() .isAcknowledged()); } @@ -188,7 +188,7 @@ public void testCleanup() throws Exception { logger.info("--> snapshot"); CreateSnapshotResponse createSnapshotResponse = client().admin() .cluster() - .prepareCreateSnapshot("test-repo", snapshotName) + .prepareCreateSnapshot(getTestRepoName(), snapshotName) .setWaitForCompletion(true) .setIndices("test-idx-*", "-test-idx-3") .get(); @@ -198,16 +198,16 @@ public void testCleanup() throws Exception { assertThat(client().admin() .cluster() - .prepareGetSnapshots("test-repo") + .prepareGetSnapshots(getTestRepoName()) .setSnapshots(snapshotName) .get() - .getSnapshots("test-repo") + .getSnapshots(getTestRepoName()) .get(0) .state(), equalTo(SnapshotState.SUCCESS)); final BlobStoreRepository repo = - (BlobStoreRepository) getInstanceFromNode(RepositoriesService.class).repository("test-repo"); + (BlobStoreRepository) getInstanceFromNode(RepositoriesService.class).repository(getTestRepoName()); final Executor genericExec = repo.threadPool().executor(ThreadPool.Names.GENERIC); logger.info("--> creating a dangling index folder"); @@ -215,7 +215,7 @@ public void testCleanup() throws Exception { createDanglingIndex(repo, genericExec); logger.info("--> deleting a snapshot to trigger repository cleanup"); - client().admin().cluster().deleteSnapshot(new DeleteSnapshotRequest("test-repo", snapshotName)).actionGet(); + client().admin().cluster().deleteSnapshot(new DeleteSnapshotRequest(getTestRepoName(), snapshotName)).actionGet(); assertConsistentRepository(repo, genericExec); @@ -223,7 +223,7 @@ public void testCleanup() throws Exception { createDanglingIndex(repo, genericExec); logger.info("--> Execute repository cleanup"); - final CleanupRepositoryResponse response = client().admin().cluster().prepareCleanupRepository("test-repo").get(); + final CleanupRepositoryResponse response = client().admin().cluster().prepareCleanupRepository(getTestRepoName()).get(); assertCleanupResponse(response, 3L, 1L); } @@ -285,6 +285,10 @@ private Set listChildren(BlobPath path) { } protected BlobStoreRepository getRepository() { - return (BlobStoreRepository) getInstanceFromNode(RepositoriesService.class).repository("test-repo"); + return (BlobStoreRepository) getInstanceFromNode(RepositoriesService.class).repository(getTestRepoName()); + } + + protected String getTestRepoName() { + return "test-repo"; } } From b7e140cd0bfce171e00c57b03c3e43ef30dec26c Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 25 Feb 2020 13:17:51 +0200 Subject: [PATCH 137/142] use ThreadPool#terminate in ESMockAPIBasedRepository... --- .../blobstore/ESMockAPIBasedRepositoryIntegTestCase.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index 97b1d66e8231d..dd9fa6ba54fdb 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.mocksocket.MockHttpServer; import org.elasticsearch.test.BackgroundIndexer; +import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -97,7 +98,7 @@ public void setUpHttpServer() { @AfterClass public static void stopHttpServer() { httpServer.stop(0); - executorService.shutdown(); + ThreadPool.terminate(executorService, 10, TimeUnit.SECONDS); httpServer = null; } From fe505e523821f030dea243c677ce7d20f496f9fa Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 25 Feb 2020 13:25:56 +0200 Subject: [PATCH 138/142] Revert to wrapping the HTTP IO executor executor in ESMockAPIBasedRepositoryInteg... --- .../ESMockAPIBasedRepositoryIntegTestCase.java | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index dd9fa6ba54fdb..25822df60da88 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -22,6 +22,7 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; import org.apache.http.HttpStatus; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; @@ -79,13 +80,24 @@ protected interface BlobStoreHttpHandler extends HttpHandler { private static ExecutorService executorService; private Map handlers; + private static final Logger log = LogManager.getLogger(); + @BeforeClass public static void startHttpServer() throws Exception { httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); ThreadFactory threadFactory = EsExecutors.daemonThreadFactory("[" + ESMockAPIBasedRepositoryIntegTestCase.class.getName() + "]"); executorService = EsExecutors.newScaling(ESMockAPIBasedRepositoryIntegTestCase.class.getName(), 0, 2, 60, TimeUnit.SECONDS, threadFactory, new ThreadContext(Settings.EMPTY)); - httpServer.setExecutor(executorService); + httpServer.setExecutor(r -> { + executorService.execute(() -> { + try { + r.run(); + } catch (Throwable t) { + log.error("Error in execution on mock http server IO thread", t); + throw t; + } + }); + }); httpServer.start(); } From 8e036f0826959582a9141d43fc9a054815a49fcb Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 25 Feb 2020 13:58:59 +0200 Subject: [PATCH 139/142] Use IOUtils.close --- .../encrypted/ChainingInputStream.java | 33 ++++++------------- .../DecryptionPacketsInputStream.java | 12 +++++-- .../EncryptionPacketsInputStream.java | 12 +++++-- 3 files changed, 30 insertions(+), 27 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java index d68ef0217a730..0ee406950ffc6 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/ChainingInputStream.java @@ -5,18 +5,17 @@ */ package org.elasticsearch.repositories.encrypted; -import java.io.Closeable; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.core.internal.io.IOUtils; + import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.io.SequenceInputStream; -import java.util.Arrays; import java.util.Objects; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.Nullable; - /** * A {@code ChainingInputStream} concatenates multiple component input streams into a * single input stream. @@ -132,25 +131,13 @@ InputStream nextComponent(InputStream currentComponentIn) throws IOException { @Override public void close() throws IOException { - IOException IOExceptions = null; + Exception superException = null; try { super.close(); - } catch (IOException e) { - IOExceptions = e; - } - for (Closeable closeable : Arrays.asList(first, second)) { - try { - closeable.close(); - } catch (IOException e) { - if (IOExceptions != null) { - IOExceptions.addSuppressed(e); - } else { - IOExceptions = e; - } - } - } - if (IOExceptions != null) { - throw IOExceptions; + } catch (Exception e) { + superException = e; + } finally { + IOUtils.close(superException, first, second); } } }; diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java index 7ccecf801ebb5..2d77a21020d22 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/DecryptionPacketsInputStream.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.repositories.encrypted; +import org.elasticsearch.core.internal.io.IOUtils; + import javax.crypto.BadPaddingException; import javax.crypto.Cipher; import javax.crypto.IllegalBlockSizeException; @@ -125,8 +127,14 @@ public void reset() throws IOException { @Override public void close() throws IOException { - super.close(); - source.close(); + Exception superException = null; + try { + super.close(); + } catch (IOException e) { + superException = e; + } finally { + IOUtils.close(superException, source); + } } private int decrypt(PrefixInputStream packetInputStream) throws IOException { diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java index 27178d07ac534..0c39da326077d 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptionPacketsInputStream.java @@ -6,6 +6,8 @@ package org.elasticsearch.repositories.encrypted; +import org.elasticsearch.core.internal.io.IOUtils; + import javax.crypto.Cipher; import javax.crypto.CipherInputStream; import javax.crypto.NoSuchPaddingException; @@ -173,8 +175,14 @@ public void reset() throws IOException { @Override public void close() throws IOException { - super.close(); - source.close(); + Exception superException = null; + try { + super.close(); + } catch (IOException e) { + superException = e; + } finally { + IOUtils.close(superException, source); + } } private static Cipher getPacketEncryptionCipher(SecretKey secretKey, byte[] packetIv) throws IOException { From 262b3e97da17942846c5acce5b3af4ca3019e8c4 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 25 Feb 2020 14:30:42 +0200 Subject: [PATCH 140/142] Refactor validateRepositoryPasswordHash --- .../encrypted/EncryptedRepository.java | 41 ++++++++++++------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index e8570588957e7..4c34d086f575a 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -67,7 +67,6 @@ import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Consumer; import java.util.function.Supplier; public final class EncryptedRepository extends BlobStoreRepository { @@ -271,11 +270,15 @@ public void finalizeSnapshot(SnapshotId snapshotId, ShardGenerations shardGenera int totalShards, List shardFailures, long repositoryStateId, boolean includeGlobalState, MetaData clusterMetaData, Map userMetadata, Version repositoryMetaVersion, ActionListener listener) { - validateRepositoryPasswordHash(userMetadata, listener::onFailure); - if (userMetadata != null && userMetadata.containsKey(PASSWORD_HASH_RESERVED_USER_METADATA_KEY)) { + try { + validateRepositoryPasswordHash(userMetadata); // remove the repository password hash from the snapshot metadata, after all repository password verifications // have completed, so that the hash is not displayed in the API response to the user + userMetadata = new HashMap<>(userMetadata); userMetadata.remove(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); + } catch (Exception passValidationException) { + listener.onFailure(passValidationException); + return; } super.finalizeSnapshot(snapshotId, shardGenerations, startTime, failure, totalShards, shardFailures, repositoryStateId, includeGlobalState, clusterMetaData, userMetadata, repositoryMetaVersion, listener); @@ -285,7 +288,12 @@ public void finalizeSnapshot(SnapshotId snapshotId, ShardGenerations shardGenera public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, Version repositoryMetaVersion, Map userMetadata, ActionListener listener) { - validateRepositoryPasswordHash(userMetadata, listener::onFailure); + try { + validateRepositoryPasswordHash(userMetadata); + } catch (Exception passValidationException) { + listener.onFailure(passValidationException); + return; + } super.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, repositoryMetaVersion, userMetadata, listener); } @@ -771,25 +779,28 @@ private static String computeSaltedPBKDF2Hash(byte[] salt, char[] password) { } /** - * Called before every snapshot operation on every node to validate that the snapshot metadata contains a password hash - * that matches up with the repository password on the local node. + * Called before the shard snapshot and finalize operations, on the data and master nodes. This validates that the repository + * password hash of the master node that started the snapshot operation matches with the repository password on the data nodes. * * @param snapshotUserMetadata the snapshot metadata to verify - * @param exception the exception handler to call when the repository password check fails + * @throws RepositoryException if the repository password on the local node mismatches or cannot be verified from the + * master's password hash from {@code snapshotUserMetadata} */ - private void validateRepositoryPasswordHash(Map snapshotUserMetadata, Consumer exception) { - Object repositoryPasswordHash = snapshotUserMetadata.get(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); + private void validateRepositoryPasswordHash(Map snapshotUserMetadata) throws RepositoryException { + if (snapshotUserMetadata == null) { + throw new RepositoryException(metadata.name(), "Unexpected fatal internal error", + new IllegalStateException("Null snapshot metadata")); + } + final Object repositoryPasswordHash = snapshotUserMetadata.get(PASSWORD_HASH_RESERVED_USER_METADATA_KEY); if (repositoryPasswordHash == null || (false == repositoryPasswordHash instanceof String)) { - exception.accept(new RepositoryException(metadata.name(), "Unexpected fatal internal error", - new IllegalStateException("Snapshot metadata does not contain the repository password hash as a String"))); - return; + throw new RepositoryException(metadata.name(), "Unexpected fatal internal error", + new IllegalStateException("Snapshot metadata does not contain the repository password hash as a String")); } if (false == passwordHashVerifier.verify((String) repositoryPasswordHash)) { - exception.accept(new RepositoryException(metadata.name(), + throw new RepositoryException(metadata.name(), "Repository password mismatch. The local node's value of the keystore secure setting [" + EncryptedRepositoryPlugin.ENCRYPTION_PASSWORD_SETTING.getConcreteSettingForNamespace(metadata.name()).getKey() + - "] is different from the elected master node, which started the snapshot operation")); - return; + "] is different from the elected master node, which started the snapshot operation"); } } From a7e2dacb1eb0c76b9709066e4e6fd24a3752f650 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 25 Feb 2020 15:15:46 +0200 Subject: [PATCH 141/142] Log password validation exceptions --- .../repositories/encrypted/EncryptedRepository.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java index 4c34d086f575a..e577c110116a0 100644 --- a/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java +++ b/x-pack/plugin/repository-encrypted/src/main/java/org/elasticsearch/repositories/encrypted/EncryptedRepository.java @@ -824,20 +824,24 @@ boolean verify(String saltedHash) { Objects.requireNonNull(saltedHash); // first check if this exact hash has been checked before if (saltedHash.equals(lastVerifiedHash.get())) { + logger.debug("The repository salted password hash [" + saltedHash + "] is locally cached as VALID"); return true; } String[] parts = saltedHash.split(":"); + // the hash has an invalid format if (parts == null || parts.length != 2) { - // the hash has an invalid format + logger.error("Unrecognized format for the repository password hash [" + saltedHash + "]"); return false; } String salt = parts[0]; + logger.debug("Computing repository password hash"); String computedHash = computeSaltedPBKDF2Hash(Base64.getUrlDecoder().decode(salt.getBytes(StandardCharsets.UTF_8)), password); if (false == computedHash.equals(saltedHash)) { return false; } // remember last successfully verified hash lastVerifiedHash.set(computedHash); + logger.debug("Repository password hash [" + saltedHash + "] validated successfully and is now locally cached"); return true; } From 856cfe38f0bb2a4bd5e7b3fbc7c39a608ed99dd6 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 5 Mar 2020 22:46:55 +0200 Subject: [PATCH 142/142] BlobPathTests --- .../java/org/elasticsearch/common/blobstore/BlobPathTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java b/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java index 48caddf31a505..ec846ca30690e 100644 --- a/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java +++ b/server/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java @@ -38,5 +38,4 @@ public void testBuildAsString() { path = path.add("d/"); assertThat(path.buildAsString(), is("a/b/c/d/")); } - }