Skip to content

Commit

Permalink
serialize legacy as false for scan query for rolling downgrade/upgrade (
Browse files Browse the repository at this point in the history
apache#16793)

Fixes rolling downgrades/upgrades after apache#16659 by hard coding scan query "legacy":false since it is a required property during deserialization.
  • Loading branch information
clintropolis authored and sreemanamala committed Aug 6, 2024
1 parent 00543b3 commit 4220ed8
Show file tree
Hide file tree
Showing 179 changed files with 843 additions and 917 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -301,15 +301,7 @@ public void testExplain() throws IOException
), SqlStatementResourceTest.makeOkRequest());

Assert.assertEquals(
"{PLAN=[{\"query\":"
+ "{\"queryType\":\"scan\","
+ "\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},"
+ "\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},"
+ "\"resultFormat\":\"compactedList\","
+ "\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],"
+ "\"context\":{\"__resultFormat\":\"object\",\"executionMode\":\"ASYNC\",\"scanSignature\":\"[{\\\"name\\\":\\\"__time\\\",\\\"type\\\":\\\"LONG\\\"},{\\\"name\\\":\\\"cnt\\\",\\\"type\\\":\\\"LONG\\\"},{\\\"name\\\":\\\"dim1\\\",\\\"type\\\":\\\"STRING\\\"},{\\\"name\\\":\\\"dim2\\\",\\\"type\\\":\\\"STRING\\\"},{\\\"name\\\":\\\"dim3\\\",\\\"type\\\":\\\"STRING\\\"},{\\\"name\\\":\\\"m1\\\",\\\"type\\\":\\\"FLOAT\\\"},{\\\"name\\\":\\\"m2\\\",\\\"type\\\":\\\"DOUBLE\\\"},{\\\"name\\\":\\\"unique_dim1\\\",\\\"type\\\":\\\"COMPLEX<hyperUnique>\\\"}]\",\"sqlQueryId\":\"queryId\"},\"columnTypes\":[\"LONG\",\"LONG\",\"STRING\",\"STRING\",\"STRING\",\"FLOAT\",\"DOUBLE\",\"COMPLEX<hyperUnique>\"],\"granularity\":{\"type\":\"all\"}},\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX<hyperUnique>\"}],\"columnMappings\":[{\"queryColumn\":\"__time\",\"outputColumn\":\"__time\"},{\"queryColumn\":\"dim1\",\"outputColumn\":\"dim1\"},{\"queryColumn\":\"dim2\",\"outputColumn\":\"dim2\"},{\"queryColumn\":\"dim3\",\"outputColumn\":\"dim3\"},{\"queryColumn\":\"cnt\",\"outputColumn\":\"cnt\"},{\"queryColumn\":\"m1\",\"outputColumn\":\"m1\"},{\"queryColumn\":\"m2\",\"outputColumn\":\"m2\"},{\"queryColumn\":\"unique_dim1\",\"outputColumn\":\"unique_dim1\"}]}],"
+ " RESOURCES=[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}],"
+ " ATTRIBUTES={\"statementType\":\"SELECT\"}}",
"{PLAN=[{\"query\":{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"context\":{\"__resultFormat\":\"object\",\"executionMode\":\"ASYNC\",\"scanSignature\":\"[{\\\"name\\\":\\\"__time\\\",\\\"type\\\":\\\"LONG\\\"},{\\\"name\\\":\\\"cnt\\\",\\\"type\\\":\\\"LONG\\\"},{\\\"name\\\":\\\"dim1\\\",\\\"type\\\":\\\"STRING\\\"},{\\\"name\\\":\\\"dim2\\\",\\\"type\\\":\\\"STRING\\\"},{\\\"name\\\":\\\"dim3\\\",\\\"type\\\":\\\"STRING\\\"},{\\\"name\\\":\\\"m1\\\",\\\"type\\\":\\\"FLOAT\\\"},{\\\"name\\\":\\\"m2\\\",\\\"type\\\":\\\"DOUBLE\\\"},{\\\"name\\\":\\\"unique_dim1\\\",\\\"type\\\":\\\"COMPLEX<hyperUnique>\\\"}]\",\"sqlQueryId\":\"queryId\"},\"columnTypes\":[\"LONG\",\"LONG\",\"STRING\",\"STRING\",\"STRING\",\"FLOAT\",\"DOUBLE\",\"COMPLEX<hyperUnique>\"],\"granularity\":{\"type\":\"all\"},\"legacy\":false},\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX<hyperUnique>\"}],\"columnMappings\":[{\"queryColumn\":\"__time\",\"outputColumn\":\"__time\"},{\"queryColumn\":\"dim1\",\"outputColumn\":\"dim1\"},{\"queryColumn\":\"dim2\",\"outputColumn\":\"dim2\"},{\"queryColumn\":\"dim3\",\"outputColumn\":\"dim3\"},{\"queryColumn\":\"cnt\",\"outputColumn\":\"cnt\"},{\"queryColumn\":\"m1\",\"outputColumn\":\"m1\"},{\"queryColumn\":\"m2\",\"outputColumn\":\"m2\"},{\"queryColumn\":\"unique_dim1\",\"outputColumn\":\"unique_dim1\"}]}], RESOURCES=[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}], ATTRIBUTES={\"statementType\":\"SELECT\"}}",
String.valueOf(SqlStatementResourceTest.getResultRowsFromResponse(response).get(0))
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -443,6 +443,18 @@ public List<ColumnType> getColumnTypes()
return columnTypes;
}

/**
* Prior to PR https://github.com/apache/druid/pull/16659 (Druid 31) data servers require
* the "legacy" parameter to be set to a non-null value. For compatibility with older data
* servers during rolling updates, we need to write out "false".
*/
@Deprecated
@JsonProperty("legacy")
public Boolean isLegacy()
{
return false;
}

@Override
public Ordering<ScanResultValue> getResultOrdering()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,7 @@ public void testSerialization() throws Exception
+ "\"limit\":3}";

String current =
"{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},"
+ "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]},"
+ "\"resultFormat\":\"list\","
+ "\"limit\":3,"
+ "\"columns\":[\"market\",\"quality\",\"index\"],"
+ "\"granularity\":{\"type\":\"all\"}}";
"{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]},\"resultFormat\":\"list\",\"limit\":3,\"columns\":[\"market\",\"quality\",\"index\"],\"granularity\":{\"type\":\"all\"},\"legacy\":false}";

ScanQuery query = new ScanQuery(
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
Expand Down Expand Up @@ -80,13 +75,7 @@ public void testSerialization() throws Exception
public void testSerializationWithTimeOrder() throws Exception
{
String originalJson =
"{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},"
+ "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]},"
+ "\"resultFormat\":\"list\","
+ "\"limit\":3,"
+ "\"order\":\"ascending\","
+ "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"],"
+ "\"granularity\":{\"type\":\"all\"}}";
"{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]},\"resultFormat\":\"list\",\"limit\":3,\"order\":\"ascending\",\"columns\":[\"market\",\"quality\",\"index\",\"__time\"],\"granularity\":{\"type\":\"all\"},\"legacy\":false}";

ScanQuery expectedQuery = new ScanQuery(
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
Expand Down Expand Up @@ -118,13 +107,7 @@ public void testSerializationWithTimeOrder() throws Exception
public void testSerializationWithOrderBy() throws Exception
{
String originalJson =
"{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},"
+ "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]},"
+ "\"resultFormat\":\"list\","
+ "\"limit\":3,"
+ "\"orderBy\":[{\"columnName\":\"quality\",\"order\":\"ascending\"}],"
+ "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"],"
+ "\"granularity\":{\"type\":\"all\"}}";
"{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]},\"resultFormat\":\"list\",\"limit\":3,\"orderBy\":[{\"columnName\":\"quality\",\"order\":\"ascending\"}],\"columns\":[\"market\",\"quality\",\"index\",\"__time\"],\"granularity\":{\"type\":\"all\"},\"legacy\":false}";

ScanQuery expectedQuery = new ScanQuery(
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,19 @@

package org.apache.druid.query.scan;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.query.Druids;
import org.apache.druid.query.Query;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
Expand All @@ -45,6 +49,7 @@

public class ScanQueryTest
{
private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper();
private static QuerySegmentSpec intervalSpec;
private static ScanResultValue s1;
private static ScanResultValue s2;
Expand Down Expand Up @@ -94,6 +99,20 @@ public static void setup()
);
}

@Test
public void testSerdeAndLegacyBackwardsCompat() throws JsonProcessingException
{
ScanQuery query = Druids.newScanQueryBuilder()
.columns(ImmutableList.of("__time", "quality"))
.dataSource("source")
.intervals(intervalSpec)
.build();
Assert.assertFalse(query.isLegacy());
String json = JSON_MAPPER.writeValueAsString(query);
Assert.assertTrue(json.contains("\"legacy\":false"));
Assert.assertEquals(query, JSON_MAPPER.readValue(json, Query.class));
}

@Test(expected = IllegalArgumentException.class)
public void testAscendingScanQueryWithInvalidColumns()
{
Expand Down
Loading

0 comments on commit 4220ed8

Please sign in to comment.