Skip to content

Commit

Permalink
Merge branch 'master' into si/protection-header
Browse files Browse the repository at this point in the history
  • Loading branch information
gwbrown committed Aug 11, 2020
2 parents 3924661 + 9059cfb commit 8e5b29e
Show file tree
Hide file tree
Showing 20 changed files with 105 additions and 78 deletions.
4 changes: 2 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -174,8 +174,8 @@ tasks.register("verifyVersions") {
* after the backport of the backcompat code is complete.
*/

boolean bwc_tests_enabled = false
final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/57936" /* place a PR link here when committing bwc changes */
boolean bwc_tests_enabled = true
final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */
if (bwc_tests_enabled == false) {
if (bwc_tests_disabled_issue.isEmpty()) {
throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ public void testParsingFromEsResponse() throws IOException {
assertThat(result.order(), equalTo(esIMD.order()));
assertThat(result.version(), equalTo(esIMD.version()));

BytesArray mappingSource = new BytesArray(esIMD.mappings().uncompressed());
BytesReference mappingSource = esIMD.mappings().uncompressed();
Map<String, Object> expectedMapping =
XContentHelper.convertToMap(mappingSource, true, xContentBuilder.contentType()).v2();
assertThat(result.mappings().sourceAsMap(), equalTo(expectedMapping.get("_doc")));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput;
Expand Down Expand Up @@ -336,7 +335,7 @@ public static void toXContent(AliasMetadata aliasMetadata, XContentBuilder build
if (binary) {
builder.field("filter", aliasMetadata.filter.compressed());
} else {
builder.field("filter", XContentHelper.convertToMap(new BytesArray(aliasMetadata.filter().uncompressed()), true).v2());
builder.field("filter", XContentHelper.convertToMap(aliasMetadata.filter().uncompressed(), true).v2());
}
}
if (aliasMetadata.indexRouting() != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
Expand All @@ -33,6 +34,7 @@
import org.elasticsearch.indices.InvalidAliasNameException;

import java.io.IOException;
import java.io.InputStream;
import java.util.function.Function;

import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
Expand Down Expand Up @@ -125,11 +127,13 @@ public void validateAliasFilter(String alias, String filter, QueryShardContext q
* provided {@link org.elasticsearch.index.query.QueryShardContext}
* @throws IllegalArgumentException if the filter is not valid
*/
public void validateAliasFilter(String alias, byte[] filter, QueryShardContext queryShardContext,
NamedXContentRegistry xContentRegistry) {
public void validateAliasFilter(String alias, BytesReference filter, QueryShardContext queryShardContext,
NamedXContentRegistry xContentRegistry) {
assert queryShardContext != null;
try (XContentParser parser = XContentFactory.xContent(filter)
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, filter)) {

try (InputStream inputStream = filter.streamInput();
XContentParser parser = XContentFactory.xContentType(inputStream).xContent()
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, filter.streamInput())) {
validateAliasFilter(parser, queryShardContext);
} catch (Exception e) {
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
import org.elasticsearch.cluster.node.DiscoveryNodeFilters;
import org.elasticsearch.cluster.routing.allocation.IndexMetadataUpdater;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.ImmutableOpenIntMap;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.MapBuilder;
Expand Down Expand Up @@ -345,7 +344,7 @@ public static APIBlock readFrom(StreamInput input) throws IOException {

public static final String INDEX_STATE_FILE_PREFIX = "state-";

static final Version SYSTEM_INDEX_FLAG_ADDED = Version.V_8_0_0;
static final Version SYSTEM_INDEX_FLAG_ADDED = Version.V_7_10_0;

private final int routingNumShards;
private final int routingFactor;
Expand Down Expand Up @@ -1316,15 +1315,15 @@ public static void toXContent(IndexMetadata indexMetadata, XContentBuilder build
if (binary) {
builder.value(mmd.source().compressed());
} else {
builder.map(XContentHelper.convertToMap(new BytesArray(mmd.source().uncompressed()), true).v2());
builder.map(XContentHelper.convertToMap(mmd.source().uncompressed(), true).v2());
}
}
builder.endArray();
} else {
builder.startObject(KEY_MAPPINGS);
MappingMetadata mmd = indexMetadata.mapping();
if (mmd != null) {
Map<String, Object> mapping = XContentHelper.convertToMap(new BytesArray(mmd.source().uncompressed()), false).v2();
Map<String, Object> mapping = XContentHelper.convertToMap(mmd.source().uncompressed(), false).v2();
if (mapping.size() == 1 && mapping.containsKey(mmd.type())) {
// the type name is the root value, reduce it
mapping = (Map<String, Object>) mapping.get(mmd.type());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.compress.CompressedXContent;
Expand Down Expand Up @@ -392,7 +391,7 @@ private static void toInnerXContent(IndexTemplateMetadata indexTemplateMetadata,

CompressedXContent m = indexTemplateMetadata.mappings();
if (m != null) {
Map<String, Object> documentMapping = XContentHelper.convertToMap(new BytesArray(m.uncompressed()), true).v2();
Map<String, Object> documentMapping = XContentHelper.convertToMap(m.uncompressed(), true).v2();
if (includeTypeName == false) {
documentMapping = reduceMapping(documentMapping);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
Expand Down Expand Up @@ -169,7 +168,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
}
if (this.mappings != null) {
Map<String, Object> uncompressedMapping =
XContentHelper.convertToMap(new BytesArray(this.mappings.uncompressed()), true, XContentType.JSON).v2();
XContentHelper.convertToMap(this.mappings.uncompressed(), true, XContentType.JSON).v2();
if (uncompressedMapping.size() > 0) {
builder.field(MAPPINGS.getPreferredName());
builder.map(reduceMapping(uncompressedMapping));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.common.compress;

import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams;
Expand Down Expand Up @@ -94,21 +93,17 @@ public CompressedXContent(BytesReference data) throws IOException {
if (compressor != null) {
// already compressed...
this.bytes = BytesReference.toBytes(data);
this.crc32 = crc32(new BytesArray(uncompressed()));
this.crc32 = crc32(uncompressed());
} else {
BytesStreamOutput out = new BytesStreamOutput();
try (OutputStream compressedOutput = CompressorFactory.COMPRESSOR.streamOutput(out)) {
data.writeTo(compressedOutput);
}
this.bytes = BytesReference.toBytes(out.bytes());
this.bytes = BytesReference.toBytes(CompressorFactory.COMPRESSOR.compress(data));
this.crc32 = crc32(data);
}
assertConsistent();
}

private void assertConsistent() {
assert CompressorFactory.compressor(new BytesArray(bytes)) != null;
assert this.crc32 == crc32(new BytesArray(uncompressed()));
assert this.crc32 == crc32(uncompressed());
}

public CompressedXContent(byte[] data) throws IOException {
Expand All @@ -130,16 +125,16 @@ public BytesReference compressedReference() {
}

/** Return the uncompressed bytes. */
public byte[] uncompressed() {
public BytesReference uncompressed() {
try {
return BytesReference.toBytes(CompressorFactory.uncompress(new BytesArray(bytes)));
return CompressorFactory.uncompress(new BytesArray(bytes));
} catch (IOException e) {
throw new IllegalStateException("Cannot decompress compressed string", e);
}
}

public String string() {
return new BytesRef(uncompressed()).utf8ToString();
return uncompressed().utf8ToString();
}

public static CompressedXContent readCompressedString(StreamInput in) throws IOException {
Expand Down Expand Up @@ -167,7 +162,7 @@ public boolean equals(Object o) {
return false;
}

return Arrays.equals(uncompressed(), that.uncompressed());
return uncompressed().equals(that.uncompressed());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,20 @@ public interface Compressor {
* output. Closing the returned {@link StreamOutput} will close the provided stream output.
*/
StreamOutput streamOutput(OutputStream out) throws IOException;

/**
* Decompress bytes into a newly allocated buffer.
*
* @param bytesReference bytes to decompress
* @return decompressed bytes
*/
BytesReference uncompress(BytesReference bytesReference) throws IOException;

/**
* Compress bytes into a newly allocated buffer.
*
* @param bytesReference bytes to compress
* @return compressed bytes
*/
BytesReference compress(BytesReference bytesReference) throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;

Expand Down Expand Up @@ -71,14 +70,7 @@ private static boolean isAncient(BytesReference bytes) {
*/
public static BytesReference uncompressIfNeeded(BytesReference bytes) throws IOException {
Compressor compressor = compressor(Objects.requireNonNull(bytes, "the BytesReference must not be null"));
BytesReference uncompressed;
if (compressor != null) {
uncompressed = uncompress(bytes, compressor);
} else {
uncompressed = bytes;
}

return uncompressed;
return compressor == null ? bytes : compressor.uncompress(bytes);
}

/** Decompress the provided {@link BytesReference}. */
Expand All @@ -87,10 +79,6 @@ public static BytesReference uncompress(BytesReference bytes) throws IOException
if (compressor == null) {
throw new NotCompressedException();
}
return uncompress(bytes, compressor);
}

private static BytesReference uncompress(BytesReference bytes, Compressor compressor) throws IOException {
return Streams.readFully(compressor.streamInput(bytes.streamInput()));
return compressor.uncompress(bytes);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.common.compress;

import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
Expand All @@ -36,6 +37,7 @@
import java.util.zip.DeflaterOutputStream;
import java.util.zip.Inflater;
import java.util.zip.InflaterInputStream;
import java.util.zip.InflaterOutputStream;

/**
* {@link Compressor} implementation based on the DEFLATE compression algorithm.
Expand Down Expand Up @@ -129,4 +131,41 @@ public void close() throws IOException {
}
};
}

// Reusable Inflater reference. Note: This is not used for the decompressing stream wrapper because we don't have strong guarantees
// about the scope in which the stream wrapper is used.
private static final ThreadLocal<Inflater> inflaterRef = ThreadLocal.withInitial(() -> new Inflater(true));

private static final ThreadLocal<BytesStreamOutput> baos = ThreadLocal.withInitial(BytesStreamOutput::new);

@Override
public BytesReference uncompress(BytesReference bytesReference) throws IOException {
final BytesStreamOutput buffer = baos.get();
final Inflater inflater = inflaterRef.get();
inflater.reset();
try (InflaterOutputStream ios = new InflaterOutputStream(buffer, inflater)) {
bytesReference.slice(HEADER.length, bytesReference.length() - HEADER.length).writeTo(ios);
}
final BytesReference res = buffer.copyBytes();
buffer.reset();
return res;
}

// Reusable Deflater reference. Note: This is not used for the compressing stream wrapper because we don't have strong guarantees
// about the scope in which the stream wrapper is used.
private static final ThreadLocal<Deflater> deflaterRef = ThreadLocal.withInitial(() -> new Deflater(LEVEL, true));

@Override
public BytesReference compress(BytesReference bytesReference) throws IOException {
final BytesStreamOutput buffer = baos.get();
final Deflater deflater = deflaterRef.get();
deflater.reset();
buffer.write(HEADER);
try (DeflaterOutputStream dos = new DeflaterOutputStream(buffer, deflater, true)) {
bytesReference.writeTo(dos);
}
final BytesReference res = buffer.copyBytes();
buffer.reset();
return res;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@

import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.util.ArrayList;
Expand Down Expand Up @@ -1491,9 +1492,10 @@ interface IndexDeletionAllowedPredicate {
public AliasFilter buildAliasFilter(ClusterState state, String index, Set<String> resolvedExpressions) {
/* Being static, parseAliasFilter doesn't have access to whatever guts it needs to parse a query. Instead of passing in a bunch
* of dependencies we pass in a function that can perform the parsing. */
CheckedFunction<byte[], QueryBuilder, IOException> filterParser = bytes -> {
try (XContentParser parser = XContentFactory.xContent(bytes)
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, bytes)) {
CheckedFunction<BytesReference, QueryBuilder, IOException> filterParser = bytes -> {
try (InputStream inputStream = bytes.streamInput();
XContentParser parser = XContentFactory.xContentType(inputStream).xContent()
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, inputStream)) {
return parseInnerQueryBuilder(parser);
}
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,6 @@
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.compress.NotXContentException;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
Expand Down Expand Up @@ -1303,12 +1301,8 @@ private void doGetRepositoryData(ActionListener<RepositoryData> listener) {
private void cacheRepositoryData(BytesReference updated, long generation) {
if (cacheRepositoryData && bestEffortConsistency == false) {
final BytesReference serialized;
BytesStreamOutput out = new BytesStreamOutput();
try {
try (StreamOutput tmp = CompressorFactory.COMPRESSOR.streamOutput(out)) {
updated.writeTo(tmp);
}
serialized = out.bytes();
serialized = CompressorFactory.COMPRESSOR.compress(updated);
final int len = serialized.length();
if (len > ByteSizeUnit.KB.toBytes(500)) {
logger.debug("Not caching repository data of size [{}] for repository [{}] because it is larger than 500KB in" +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ public Rewriteable rewrite(QueryRewriteContext ctx) throws IOException {
* The list of filtering aliases should be obtained by calling Metadata.filteringAliases.
* Returns {@code null} if no filtering is required.</p>
*/
public static QueryBuilder parseAliasFilter(CheckedFunction<byte[], QueryBuilder, IOException> filterParser,
public static QueryBuilder parseAliasFilter(CheckedFunction<BytesReference, QueryBuilder, IOException> filterParser,
IndexMetadata metadata, String... aliasNames) {
if (aliasNames == null || aliasNames.length == 0) {
return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public class DeflateCompressedXContentTests extends ESTestCase {

private void assertEquals(CompressedXContent s1, CompressedXContent s2) {
Assert.assertEquals(s1, s2);
assertArrayEquals(s1.uncompressed(), s2.uncompressed());
assertEquals(s1.uncompressed(), s2.uncompressed());
assertEquals(s1.hashCode(), s2.hashCode());
}

Expand Down
Loading

0 comments on commit 8e5b29e

Please sign in to comment.