Skip to content

Commit

Permalink
CBG-3615 escape sequence keyword in N1QL (#6586)
Browse files Browse the repository at this point in the history
* CBG-3615 Escape sequence keyword, pass tests with 7.6

- for integration tests, allow non dockerhub images if you specify a
  path

* use wildcard rather than regex
  • Loading branch information
torcolvin authored Nov 29, 2023
1 parent ad74e7e commit 4c00dba
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 32 deletions.
17 changes: 7 additions & 10 deletions base/bucket_n1ql_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ func TestCreateAndDropIndex(t *testing.T) {
t.Fatalf("Requires bucket to be N1QLStore")
}

createExpression := SyncPropertyName + ".sequence"
createExpression := SyncPropertyName + ".`sequence`"
err := n1qlStore.CreateIndex(ctx, "testIndex_sequence", createExpression, "", testN1qlOptions)
if err != nil {
t.Fatalf("Error creating index: %s", err)
Expand Down Expand Up @@ -389,7 +389,7 @@ func TestCreateDuplicateIndex(t *testing.T) {
t.Fatalf("Requires bucket to be N1QLStore")
}

createExpression := SyncPropertyName + ".sequence"
createExpression := SyncPropertyName + ".`sequence`"
err := n1qlStore.CreateIndex(ctx, "testIndexDuplicateSequence", createExpression, "", testN1qlOptions)
if err != nil {
t.Fatalf("Error creating index: %s", err)
Expand Down Expand Up @@ -425,7 +425,7 @@ func TestCreateAndDropIndexSpecialCharacters(t *testing.T) {
t.Fatalf("Requires bucket to be N1QLStore")
}

createExpression := SyncPropertyName + ".sequence"
createExpression := SyncPropertyName + ".`sequence`"
err := n1qlStore.CreateIndex(ctx, "testIndex-sequence", createExpression, "", testN1qlOptions)
if err != nil {
t.Fatalf("Error creating index: %s", err)
Expand Down Expand Up @@ -465,7 +465,7 @@ func TestDeferredCreateIndex(t *testing.T) {
DeferBuild: true,
}

createExpression := SyncPropertyName + ".sequence"
createExpression := SyncPropertyName + ".`sequence`"
err := n1qlStore.CreateIndex(ctx, indexName, createExpression, "", deferN1qlOptions)
if err != nil {
t.Fatalf("Error creating index: %s", err)
Expand Down Expand Up @@ -514,7 +514,7 @@ func TestBuildDeferredIndexes(t *testing.T) {
}

// Create a deferred and a non-deferred index
createExpression := SyncPropertyName + ".sequence"
createExpression := SyncPropertyName + ".`sequence`"
err := n1qlStore.CreateIndex(ctx, deferredIndexName, createExpression, "", deferN1qlOptions)
if err != nil {
t.Errorf("Error creating index: %s", err)
Expand Down Expand Up @@ -579,11 +579,8 @@ func TestCreateAndDropIndexErrors(t *testing.T) {
}

// Create index
createExpression = "_sync.sequence"
err = n1qlStore.CreateIndex(ctx, "testIndex_sequence", createExpression, "", testN1qlOptions)
if err != nil {
t.Fatalf("Error creating index: %s", err)
}
createExpression = "_sync.`sequence`"
require.NoError(t, n1qlStore.CreateIndex(ctx, "testIndex_sequence", createExpression, "", testN1qlOptions))

// Attempt to recreate duplicate index
err = n1qlStore.CreateIndex(ctx, "testIndex_sequence", createExpression, "", testN1qlOptions)
Expand Down
12 changes: 6 additions & 6 deletions db/indexes.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,9 +114,9 @@ var (
indexExpressions = map[SGIndexType]string{
IndexAccess: "ALL (ARRAY (op.name) FOR op IN OBJECT_PAIRS($sync.access) END)",
IndexRoleAccess: "ALL (ARRAY (op.name) FOR op IN OBJECT_PAIRS($sync.role_access) END)",
IndexChannels: "ALL (ARRAY [op.name, LEAST($sync.sequence,op.val.seq), IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)] FOR op IN OBJECT_PAIRS($sync.channels) END), " +
"$sync.rev, $sync.sequence, $sync.flags",
IndexAllDocs: "$sync.sequence, $sync.rev, $sync.flags, $sync.deleted",
IndexChannels: "ALL (ARRAY [op.name, LEAST($sync.`sequence`,op.val.seq), IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)] FOR op IN OBJECT_PAIRS($sync.channels) END), " +
"$sync.rev, $sync.`sequence`, $sync.flags",
IndexAllDocs: "$sync.`sequence`, $sync.rev, $sync.flags, $sync.deleted",
IndexTombstones: "$sync.tombstoned_at",
IndexSyncDocs: "META().id",
IndexUser: "META().id, name, email, disabled",
Expand Down Expand Up @@ -166,12 +166,12 @@ var (
"USE INDEX ($idx) " +
"WHERE ANY op in OBJECT_PAIRS($relativesync.role_access) SATISFIES op.name = 'foo' end " +
"LIMIT 1",
IndexChannels: "SELECT [op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)][1] AS sequence " +
IndexChannels: "SELECT [op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)][1] AS `sequence` " +
"FROM %s AS %s " +
"USE INDEX ($idx) " +
"UNNEST OBJECT_PAIRS($relativesync.channels) AS op " +
"WHERE [op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)] BETWEEN ['foo', 0] AND ['foo', 1] " +
"ORDER BY [op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)] " +
"WHERE [op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null), IFMISSING(op.val.del,null)] BETWEEN ['foo', 0] AND ['foo', 1] " +
"ORDER BY [op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)] " +
"LIMIT 1",
}
)
Expand Down
24 changes: 12 additions & 12 deletions db/query.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,19 +99,19 @@ const (
var QueryChannels = SGQuery{
name: QueryTypeChannels,
statement: fmt.Sprintf(
"SELECT [op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][1] AS seq, "+
"[op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][2] AS rRev, "+
"[op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][3] AS rDel, "+
"SELECT [op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][1] AS seq, "+
"[op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][2] AS rRev, "+
"[op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)][3] AS rDel, "+
"$sync.rev AS rev, "+
"$sync.flags AS flags, "+
"META(%s).id AS id "+
"FROM %s AS %s "+
"USE INDEX ($idx) "+
"UNNEST OBJECT_PAIRS($relativesync.channels) AS op "+
"WHERE ([op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)] "+
"WHERE ([op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)] "+
"BETWEEN [$channelName, $startSeq] AND [$channelName, $endSeq]) "+
"%s"+
"ORDER BY [op.name, LEAST($sync.sequence, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)]",
"ORDER BY [op.name, LEAST($sync.`sequence`, op.val.seq),IFMISSING(op.val.rev,null),IFMISSING(op.val.del,null)]",
base.KeyspaceQueryAlias,
base.KeyspaceQueryToken, base.KeyspaceQueryAlias,
activeOnlyFilter),
Expand All @@ -121,15 +121,15 @@ var QueryChannels = SGQuery{
var QueryStarChannel = SGQuery{
name: QueryTypeChannelsStar,
statement: fmt.Sprintf(
"SELECT $sync.sequence AS seq, "+
"SELECT $sync.`sequence` AS seq, "+
"$sync.rev AS rev, "+
"$sync.flags AS flags, "+
"META(%s).id AS id "+
"FROM %s AS %s "+
"USE INDEX ($idx) "+
"WHERE $sync.sequence >= $startSeq AND $sync.sequence < $endSeq "+
"WHERE $sync.`sequence` >= $startSeq AND $sync.`sequence` < $endSeq "+
"AND META().id NOT LIKE '%s' %s"+
"ORDER BY $sync.sequence",
"ORDER BY $sync.`sequence`",
base.KeyspaceQueryAlias,
base.KeyspaceQueryToken, base.KeyspaceQueryAlias,
SyncDocWildcard, activeOnlyFilter),
Expand All @@ -139,13 +139,13 @@ var QueryStarChannel = SGQuery{
var QuerySequences = SGQuery{
name: QueryTypeSequences,
statement: fmt.Sprintf(
"SELECT $sync.sequence AS seq, "+
"SELECT $sync.`sequence` AS seq, "+
"$sync.rev AS rev, "+
"$sync.flags AS flags, "+
"META(%s).id AS id "+
"FROM %s AS %s "+
"USE INDEX($idx) "+
"WHERE $sync.sequence IN $inSequences "+
"WHERE $sync.`sequence` IN $inSequences "+
"AND META().id NOT LIKE '%s'",
base.KeyspaceQueryAlias,
base.KeyspaceQueryToken, base.KeyspaceQueryAlias,
Expand Down Expand Up @@ -338,11 +338,11 @@ var QueryAllDocs = SGQuery{
statement: fmt.Sprintf(
"SELECT META(%s).id as id, "+
"$sync.rev as r, "+
"$sync.sequence as s, "+
"$sync.`sequence` as s, "+
"$sync.channels as c "+
"FROM %s AS %s "+
"USE INDEX ($idx) "+
"WHERE $sync.sequence > 0 AND "+ // Required to use IndexAllDocs
"WHERE $sync.`sequence` > 0 AND "+ // Required to use IndexAllDocs
"META(%s).id NOT LIKE '%s' "+
"AND $sync IS NOT MISSING "+
"AND ($sync.flags IS MISSING OR BITTEST($sync.flags,1) = false)",
Expand Down
6 changes: 3 additions & 3 deletions db/util_testing.go
Original file line number Diff line number Diff line change
Expand Up @@ -212,9 +212,9 @@ func emptyAllDocsIndex(ctx context.Context, dataStore sgbucket.DataStore, tbp *b

// A stripped down version of db.Compact() that works on AllDocs instead of tombstones
statement := `SELECT META(ks).id AS id
FROM ` + base.KeyspaceQueryToken + ` AS ks USE INDEX (sg_allDocs_x1)
WHERE META(ks).xattrs._sync.sequence IS NOT MISSING
AND META(ks).id NOT LIKE '\\_sync:%'`
FROM ` + base.KeyspaceQueryToken + ` AS ks USE INDEX (sg_allDocs_x1)`
statement += " WHERE META(ks).xattrs._sync.`sequence` IS NOT MISSING"
statement += ` AND META(ks).id NOT LIKE '\\_sync:%'`
results, err := n1qlStore.Query(ctx, statement, nil, base.RequestPlus, true)
if err != nil {
return 0, err
Expand Down
7 changes: 6 additions & 1 deletion integration-test/start_server.sh
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,16 @@ docker stop ${SG_TEST_COUCHBASE_SERVER_DOCKER_NAME} || true
docker rm ${SG_TEST_COUCHBASE_SERVER_DOCKER_NAME} || true
# --volume: Makes and mounts a CBS folder for storing a CBCollect if needed

# use dockerhub if no registry is specified, allows for pre-release images from alternative registries
if [[ "${COUCHBASE_DOCKER_IMAGE_NAME}" != *"/"* ]]; then
COUCHBASE_DOCKER_IMAGE_NAME="couchbase/server:${COUCHBASE_DOCKER_IMAGE_NAME}"
fi

if [ "${MULTI_NODE:-}" == "true" ]; then
${DOCKER_COMPOSE} up -d --force-recreate --renew-anon-volumes --remove-orphans
else
# single node
docker run --rm -d --name ${SG_TEST_COUCHBASE_SERVER_DOCKER_NAME} --volume "${WORKSPACE_ROOT}:/workspace" -p 8091-8096:8091-8096 -p 11207:11207 -p 11210:11210 -p 11211:11211 -p 18091-18094:18091-18094 "couchbase/server:${COUCHBASE_DOCKER_IMAGE_NAME}"
docker run --rm -d --name ${SG_TEST_COUCHBASE_SERVER_DOCKER_NAME} --volume "${WORKSPACE_ROOT}:/workspace" -p 8091-8096:8091-8096 -p 11207:11207 -p 11210:11210 -p 11211:11211 -p 18091-18094:18091-18094 "${COUCHBASE_DOCKER_IMAGE_NAME}"
fi

# Test to see if Couchbase Server is up
Expand Down

0 comments on commit 4c00dba

Please sign in to comment.