diff --git a/base/bucket_n1ql_test.go b/base/bucket_n1ql_test.go index 0c9f41808a..7fc9be4687 100644 --- a/base/bucket_n1ql_test.go +++ b/base/bucket_n1ql_test.go @@ -358,7 +358,7 @@ func TestCreateAndDropIndex(t *testing.T) { t.Fatalf("Requires bucket to be N1QLStore") } - createExpression := SyncPropertyName + ".sequence" + createExpression := SyncPropertyName + ".`sequence`" err := n1qlStore.CreateIndex(ctx, "testIndex_sequence", createExpression, "", testN1qlOptions) if err != nil { t.Fatalf("Error creating index: %s", err) @@ -389,7 +389,7 @@ func TestCreateDuplicateIndex(t *testing.T) { t.Fatalf("Requires bucket to be N1QLStore") } - createExpression := SyncPropertyName + ".sequence" + createExpression := SyncPropertyName + ".`sequence`" err := n1qlStore.CreateIndex(ctx, "testIndexDuplicateSequence", createExpression, "", testN1qlOptions) if err != nil { t.Fatalf("Error creating index: %s", err) @@ -425,7 +425,7 @@ func TestCreateAndDropIndexSpecialCharacters(t *testing.T) { t.Fatalf("Requires bucket to be N1QLStore") } - createExpression := SyncPropertyName + ".sequence" + createExpression := SyncPropertyName + ".`sequence`" err := n1qlStore.CreateIndex(ctx, "testIndex-sequence", createExpression, "", testN1qlOptions) if err != nil { t.Fatalf("Error creating index: %s", err) @@ -465,7 +465,7 @@ func TestDeferredCreateIndex(t *testing.T) { DeferBuild: true, } - createExpression := SyncPropertyName + ".sequence" + createExpression := SyncPropertyName + ".`sequence`" err := n1qlStore.CreateIndex(ctx, indexName, createExpression, "", deferN1qlOptions) if err != nil { t.Fatalf("Error creating index: %s", err) @@ -514,7 +514,7 @@ func TestBuildDeferredIndexes(t *testing.T) { } // Create a deferred and a non-deferred index - createExpression := SyncPropertyName + ".sequence" + createExpression := SyncPropertyName + ".`sequence`" err := n1qlStore.CreateIndex(ctx, deferredIndexName, createExpression, "", deferN1qlOptions) if err != nil { t.Errorf("Error creating index: %s", err) @@ -579,11 +579,8 @@ func TestCreateAndDropIndexErrors(t *testing.T) { } // Create index - createExpression = "_sync.sequence" - err = n1qlStore.CreateIndex(ctx, "testIndex_sequence", createExpression, "", testN1qlOptions) - if err != nil { - t.Fatalf("Error creating index: %s", err) - } + createExpression = "_sync.`sequence`" + require.NoError(t, n1qlStore.CreateIndex(ctx, "testIndex_sequence", createExpression, "", testN1qlOptions)) // Attempt to recreate duplicate index err = n1qlStore.CreateIndex(ctx, "testIndex_sequence", createExpression, "", testN1qlOptions) diff --git a/db/indexes.go b/db/indexes.go index 337201b0b6..02b019a4e2 100644 --- a/db/indexes.go +++ b/db/indexes.go @@ -114,9 +114,9 @@ var ( indexExpressions = map[SGIndexType]string{ IndexAccess: "ALL (ARRAY (op.name) FOR op IN OBJECT_PAIRS($sync.access) END)", IndexRoleAccess: "ALL (ARRAY (op.name) FOR op IN OBJECT_PAIRS($sync.role_access) END)", - IndexChannels: "ALL (ARRAY [op.name, LEAST($sync.sequence,op.val.seq), IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)] FOR op IN OBJECT_PAIRS($sync.channels) END), " + - "$sync.rev, $sync.sequence, $sync.flags", - IndexAllDocs: "$sync.sequence, $sync.rev, $sync.flags, $sync.deleted", + IndexChannels: "ALL (ARRAY [op.name, LEAST($sync.`sequence`,op.val.seq), IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)] FOR op IN OBJECT_PAIRS($sync.channels) END), " + + "$sync.rev, $sync.`sequence`, $sync.flags", + IndexAllDocs: "$sync.`sequence`, $sync.rev, $sync.flags, $sync.deleted", IndexTombstones: "$sync.tombstoned_at", IndexSyncDocs: "META().id", IndexUser: "META().id, name, email, disabled", @@ -166,12 +166,12 @@ var ( "USE INDEX ($idx) " + "WHERE ANY op in OBJECT_PAIRS($relativesync.role_access) SATISFIES op.name = 'foo' end " + "LIMIT 1", - IndexChannels: "SELECT [op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)][1] AS sequence " + + IndexChannels: "SELECT [op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)][1] AS `sequence` " + "FROM %s AS %s " + "USE INDEX ($idx) " + "UNNEST OBJECT_PAIRS($relativesync.channels) AS op " + - "WHERE [op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)] BETWEEN ['foo', 0] AND ['foo', 1] " + - "ORDER BY [op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)] " + + "WHERE [op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)] BETWEEN ['foo', 0] AND ['foo', 1] " + + "ORDER BY [op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)] " + "LIMIT 1", } ) diff --git a/db/query.go b/db/query.go index a2511d134a..28c9685cd2 100644 --- a/db/query.go +++ b/db/query.go @@ -99,19 +99,19 @@ const ( var QueryChannels = SGQuery{ name: QueryTypeChannels, statement: fmt.Sprintf( - "SELECT [op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][1] AS seq, "+ - "[op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][2] AS rRev, "+ - "[op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][3] AS rDel, "+ + "SELECT [op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][1] AS seq, "+ + "[op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][2] AS rRev, "+ + "[op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][3] AS rDel, "+ "$sync.rev AS rev, "+ "$sync.flags AS flags, "+ "META(%s).id AS id "+ "FROM %s AS %s "+ "USE INDEX ($idx) "+ "UNNEST OBJECT_PAIRS($relativesync.channels) AS op "+ - "WHERE ([op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)] "+ + "WHERE ([op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)] "+ "BETWEEN [$channelName, $startSeq] AND [$channelName, $endSeq]) "+ "%s"+ - "ORDER BY [op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)]", + "ORDER BY [op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)]", base.KeyspaceQueryAlias, base.KeyspaceQueryToken, base.KeyspaceQueryAlias, activeOnlyFilter), @@ -121,15 +121,15 @@ var QueryChannels = SGQuery{ var QueryStarChannel = SGQuery{ name: QueryTypeChannelsStar, statement: fmt.Sprintf( - "SELECT $sync.sequence AS seq, "+ + "SELECT $sync.`sequence` AS seq, "+ "$sync.rev AS rev, "+ "$sync.flags AS flags, "+ "META(%s).id AS id "+ "FROM %s AS %s "+ "USE INDEX ($idx) "+ - "WHERE $sync.sequence >= $startSeq AND $sync.sequence < $endSeq "+ + "WHERE $sync.`sequence` >= $startSeq AND $sync.`sequence` < $endSeq "+ "AND META().id NOT LIKE '%s' %s"+ - "ORDER BY $sync.sequence", + "ORDER BY $sync.`sequence`", base.KeyspaceQueryAlias, base.KeyspaceQueryToken, base.KeyspaceQueryAlias, SyncDocWildcard, activeOnlyFilter), @@ -139,13 +139,13 @@ var QueryStarChannel = SGQuery{ var QuerySequences = SGQuery{ name: QueryTypeSequences, statement: fmt.Sprintf( - "SELECT $sync.sequence AS seq, "+ + "SELECT $sync.`sequence` AS seq, "+ "$sync.rev AS rev, "+ "$sync.flags AS flags, "+ "META(%s).id AS id "+ "FROM %s AS %s "+ "USE INDEX($idx) "+ - "WHERE $sync.sequence IN $inSequences "+ + "WHERE $sync.`sequence` IN $inSequences "+ "AND META().id NOT LIKE '%s'", base.KeyspaceQueryAlias, base.KeyspaceQueryToken, base.KeyspaceQueryAlias, @@ -338,11 +338,11 @@ var QueryAllDocs = SGQuery{ statement: fmt.Sprintf( "SELECT META(%s).id as id, "+ "$sync.rev as r, "+ - "$sync.sequence as s, "+ + "$sync.`sequence` as s, "+ "$sync.channels as c "+ "FROM %s AS %s "+ "USE INDEX ($idx) "+ - "WHERE $sync.sequence > 0 AND "+ // Required to use IndexAllDocs + "WHERE $sync.`sequence` > 0 AND "+ // Required to use IndexAllDocs "META(%s).id NOT LIKE '%s' "+ "AND $sync IS NOT MISSING "+ "AND ($sync.flags IS MISSING OR BITTEST($sync.flags,1) = false)", diff --git a/db/util_testing.go b/db/util_testing.go index 94b2a802e2..337e52e6f4 100644 --- a/db/util_testing.go +++ b/db/util_testing.go @@ -212,9 +212,9 @@ func emptyAllDocsIndex(ctx context.Context, dataStore sgbucket.DataStore, tbp *b // A stripped down version of db.Compact() that works on AllDocs instead of tombstones statement := `SELECT META(ks).id AS id -FROM ` + base.KeyspaceQueryToken + ` AS ks USE INDEX (sg_allDocs_x1) -WHERE META(ks).xattrs._sync.sequence IS NOT MISSING - AND META(ks).id NOT LIKE '\\_sync:%'` +FROM ` + base.KeyspaceQueryToken + ` AS ks USE INDEX (sg_allDocs_x1)` + statement += " WHERE META(ks).xattrs._sync.`sequence` IS NOT MISSING" + statement += ` AND META(ks).id NOT LIKE '\\_sync:%'` results, err := n1qlStore.Query(ctx, statement, nil, base.RequestPlus, true) if err != nil { return 0, err diff --git a/integration-test/start_server.sh b/integration-test/start_server.sh index 3f0f2148d9..cc7c9dc101 100755 --- a/integration-test/start_server.sh +++ b/integration-test/start_server.sh @@ -58,11 +58,16 @@ docker stop ${SG_TEST_COUCHBASE_SERVER_DOCKER_NAME} || true docker rm ${SG_TEST_COUCHBASE_SERVER_DOCKER_NAME} || true # --volume: Makes and mounts a CBS folder for storing a CBCollect if needed +# use dockerhub if no registry is specified, allows for pre-release images from alternative registries +if [[ "${COUCHBASE_DOCKER_IMAGE_NAME}" != *"/"* ]]; then + COUCHBASE_DOCKER_IMAGE_NAME="couchbase/server:${COUCHBASE_DOCKER_IMAGE_NAME}" +fi + if [ "${MULTI_NODE:-}" == "true" ]; then ${DOCKER_COMPOSE} up -d --force-recreate --renew-anon-volumes --remove-orphans else # single node - docker run --rm -d --name ${SG_TEST_COUCHBASE_SERVER_DOCKER_NAME} --volume "${WORKSPACE_ROOT}:/workspace" -p 8091-8096:8091-8096 -p 11207:11207 -p 11210:11210 -p 11211:11211 -p 18091-18094:18091-18094 "couchbase/server:${COUCHBASE_DOCKER_IMAGE_NAME}" + docker run --rm -d --name ${SG_TEST_COUCHBASE_SERVER_DOCKER_NAME} --volume "${WORKSPACE_ROOT}:/workspace" -p 8091-8096:8091-8096 -p 11207:11207 -p 11210:11210 -p 11211:11211 -p 18091-18094:18091-18094 "${COUCHBASE_DOCKER_IMAGE_NAME}" fi # Test to see if Couchbase Server is up