Skip to content

Commit

Permalink
Remove changes from other branches
Browse files Browse the repository at this point in the history
  • Loading branch information
carlosdelest committed Jan 10, 2024
1 parent 6ec089e commit 85eeec0
Show file tree
Hide file tree
Showing 8 changed files with 11 additions and 155 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@

import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
Expand Down Expand Up @@ -572,7 +571,7 @@ public IndexMetadata randomCreate(String name) {
@Override
public IndexMetadata randomChange(IndexMetadata part) {
IndexMetadata.Builder builder = IndexMetadata.builder(part);
switch (randomIntBetween(0, 3)) {
switch (randomIntBetween(0, 2)) {
case 0:
builder.settings(Settings.builder().put(part.getSettings()).put(randomSettings(Settings.EMPTY)));
break;
Expand All @@ -586,34 +585,11 @@ public IndexMetadata randomChange(IndexMetadata part) {
case 2:
builder.settings(Settings.builder().put(part.getSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0));
break;
case 3:
builder.fieldsForModels(randomFieldsForModels());
break;
default:
throw new IllegalArgumentException("Shouldn't be here");
}
return builder.build();
}

/**
* Generates a random fieldsForModels map
*/
private Map<String, Set<String>> randomFieldsForModels() {
if (randomBoolean()) {
return null;
}

Map<String, Set<String>> fieldsForModels = new HashMap<>();
for (int i = 0; i < randomIntBetween(0, 5); i++) {
Set<String> fields = new HashSet<>();
for (int j = 0; j < randomIntBetween(1, 4); j++) {
fields.add(randomAlphaOfLengthBetween(4, 10));
}
fieldsForModels.put(randomAlphaOfLengthBetween(4, 10), fields);
}

return fieldsForModels;
}
});
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
Expand Down Expand Up @@ -83,8 +82,6 @@ public void testIndexMetadataSerialization() throws IOException {
IndexMetadataStats indexStats = randomBoolean() ? randomIndexStats(numShard) : null;
Double indexWriteLoadForecast = randomBoolean() ? randomDoubleBetween(0.0, 128, true) : null;
Long shardSizeInBytesForecast = randomBoolean() ? randomLongBetween(1024, 10240) : null;
Map<String, Set<String>> fieldsForModels = randomFieldsForModels(true);

IndexMetadata metadata = IndexMetadata.builder("foo")
.settings(indexSettings(numShard, numberOfReplicas).put("index.version.created", 1))
.creationDate(randomLong())
Expand All @@ -108,7 +105,6 @@ public void testIndexMetadataSerialization() throws IOException {
.stats(indexStats)
.indexWriteLoadForecast(indexWriteLoadForecast)
.shardSizeInBytesForecast(shardSizeInBytesForecast)
.fieldsForModels(fieldsForModels)
.build();
assertEquals(system, metadata.isSystem());

Expand Down Expand Up @@ -142,7 +138,6 @@ public void testIndexMetadataSerialization() throws IOException {
assertEquals(metadata.getStats(), fromXContentMeta.getStats());
assertEquals(metadata.getForecastedWriteLoad(), fromXContentMeta.getForecastedWriteLoad());
assertEquals(metadata.getForecastedShardSizeInBytes(), fromXContentMeta.getForecastedShardSizeInBytes());
assertEquals(metadata.getFieldsForModels(), fromXContentMeta.getFieldsForModels());

final BytesStreamOutput out = new BytesStreamOutput();
metadata.writeTo(out);
Expand All @@ -164,9 +159,8 @@ public void testIndexMetadataSerialization() throws IOException {
assertEquals(metadata.getCustomData(), deserialized.getCustomData());
assertEquals(metadata.isSystem(), deserialized.isSystem());
assertEquals(metadata.getStats(), deserialized.getStats());
assertEquals(metadata.getForecastedWriteLoad(), deserialized.getForecastedWriteLoad());
assertEquals(metadata.getForecastedShardSizeInBytes(), deserialized.getForecastedShardSizeInBytes());
assertEquals(metadata.getFieldsForModels(), deserialized.getFieldsForModels());
assertEquals(metadata.getForecastedWriteLoad(), fromXContentMeta.getForecastedWriteLoad());
assertEquals(metadata.getForecastedShardSizeInBytes(), fromXContentMeta.getForecastedShardSizeInBytes());
}
}

Expand Down Expand Up @@ -550,37 +544,10 @@ public void testPartialIndexReceivesDataFrozenTierPreference() {
}
}

public void testFieldsForModels() {
Settings.Builder settings = indexSettings(IndexVersion.current(), randomIntBetween(1, 8), 0);
IndexMetadata idxMeta1 = IndexMetadata.builder("test").settings(settings).build();
assertThat(idxMeta1.getFieldsForModels(), equalTo(Map.of()));

Map<String, Set<String>> fieldsForModels = randomFieldsForModels(false);
IndexMetadata idxMeta2 = IndexMetadata.builder(idxMeta1).fieldsForModels(fieldsForModels).build();
assertThat(idxMeta2.getFieldsForModels(), equalTo(fieldsForModels));
}

private static Settings indexSettingsWithDataTier(String dataTier) {
return indexSettings(IndexVersion.current(), 1, 0).put(DataTier.TIER_PREFERENCE, dataTier).build();
}

private static Map<String, Set<String>> randomFieldsForModels(boolean allowNull) {
if (allowNull && randomBoolean()) {
return null;
}

Map<String, Set<String>> fieldsForModels = new HashMap<>();
for (int i = 0; i < randomIntBetween(0, 5); i++) {
Set<String> fields = new HashSet<>();
for (int j = 0; j < randomIntBetween(1, 4); j++) {
fields.add(randomAlphaOfLengthBetween(4, 10));
}
fieldsForModels.put(randomAlphaOfLengthBetween(4, 10), fields);
}

return fieldsForModels;
}

private IndexMetadataStats randomIndexStats(int numberOfShards) {
IndexWriteLoad.Builder indexWriteLoadBuilder = IndexWriteLoad.builder(numberOfShards);
int numberOfPopulatedWriteLoads = randomIntBetween(0, numberOfShards);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;

import static java.util.Collections.emptyList;
Expand All @@ -36,21 +35,13 @@ public void testEmpty() {
Collection<String> names = lookup.getMatchingFieldNames("foo");
assertNotNull(names);
assertThat(names, hasSize(0));

Map<String, Set<String>> fieldsForModels = lookup.getFieldsForModels();
assertNotNull(fieldsForModels);
assertTrue(fieldsForModels.isEmpty());
}

public void testAddNewField() {
MockFieldMapper f = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(Collections.singletonList(f), emptyList(), Collections.emptyList());
assertNull(lookup.get("bar"));
assertEquals(f.fieldType(), lookup.get("foo"));

Map<String, Set<String>> fieldsForModels = lookup.getFieldsForModels();
assertNotNull(fieldsForModels);
assertTrue(fieldsForModels.isEmpty());
}

public void testAddFieldAlias() {
Expand Down Expand Up @@ -430,19 +421,6 @@ public void testRuntimeFieldNameOutsideContext() {
}
}

public void testInferenceModelFieldType() {
MockFieldMapper f = new MockFieldMapper(new MockInferenceModelFieldType("foo", "bar"));
FieldTypeLookup lookup = new FieldTypeLookup(Collections.singletonList(f), emptyList(), Collections.emptyList());
assertEquals(f.fieldType(), lookup.get("foo"));
assertEquals(Collections.emptySet(), lookup.getFieldsForModel("baz"));
assertEquals(Collections.singleton("foo"), lookup.getFieldsForModel("bar"));

Map<String, Set<String>> fieldsForModels = lookup.getFieldsForModels();
assertNotNull(fieldsForModels);
assertEquals(1, fieldsForModels.size());
assertEquals(Collections.singleton("foo"), fieldsForModels.get("bar"));
}

private static FlattenedFieldMapper createFlattenedMapper(String fieldName) {
return new FlattenedFieldMapper.Builder(fieldName).build(MapperBuilderContext.root(false, false));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

import static java.util.Collections.emptyList;
Expand Down Expand Up @@ -122,8 +121,6 @@ public void testEmptyMappingLookup() {
assertEquals(0, mappingLookup.getMapping().getMetadataMappersMap().size());
assertFalse(mappingLookup.fieldMappers().iterator().hasNext());
assertEquals(0, mappingLookup.getMatchingFieldNames("*").size());
assertNotNull(mappingLookup.getFieldsForModels());
assertTrue(mappingLookup.getFieldsForModels().isEmpty());
}

public void testValidateDoesNotShadow() {
Expand Down Expand Up @@ -191,22 +188,6 @@ public MetricType getMetricType() {
);
}

public void testFieldsForModels() {
MockInferenceModelFieldType fieldType = new MockInferenceModelFieldType("test_field_name", "test_model_id");
MappingLookup mappingLookup = createMappingLookup(
Collections.singletonList(new MockFieldMapper(fieldType)),
emptyList(),
emptyList()
);
assertEquals(1, size(mappingLookup.fieldMappers()));
assertEquals(fieldType, mappingLookup.getFieldType("test_field_name"));

Map<String, Set<String>> fieldsForModels = mappingLookup.getFieldsForModels();
assertNotNull(fieldsForModels);
assertEquals(1, fieldsForModels.size());
assertEquals(Collections.singleton("test_field_name"), fieldsForModels.get("test_model_id"));
}

private void assertAnalyzes(Analyzer analyzer, String field, String output) throws IOException {
try (TokenStream tok = analyzer.tokenStream(field, new StringReader(""))) {
CharTermAttribute term = tok.addAttribute(CharTermAttribute.class);
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

package org.elasticsearch.xpack.core.inference.action;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

package org.elasticsearch.xpack.core.inference.results;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,9 @@ public List<Route> routes() {
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String taskType = restRequest.param("task_type");
String modelId = restRequest.param("model_id");
var request = InferenceAction.Request.parseRequest(modelId, taskType, restRequest.contentParser());

return channel -> client.execute(InferenceAction.INSTANCE, request, new RestToXContentListener<>(channel));
try (var parser = restRequest.contentParser()) {
var request = InferenceAction.Request.parseRequest(modelId, taskType, parser);
return channel -> client.execute(InferenceAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
}
}

0 comments on commit 85eeec0

Please sign in to comment.