Skip to content

Commit

Permalink
Merge #27809 #27858
Browse files Browse the repository at this point in the history
27809: importccl: pre-read schemas in mysqldump import r=dt a=dt

This switches mysqldump import to use read schemas from during setup on
the gateway, rather than during sampling, similar to how pgdump operates
(i.e. in three passes over the input rather than two).

This simplifies handling foreign keys, which can sometimes appear in
a table definition before the table they reference — making them hard to
correctly resolve immediately. Reading though the whole file to capture
all the schemas before evaluating them should make that a bit easier.

In the future, a return to 2-pass could be possible either if it turns
out that KVs can be produced correctly even if the schema is later
changed by a foreign key, or by oversampling raw rows of the input
*without* converting during the read extracting the schemas, then using
those schemas to convert the sampled rows to KVs from which the splits
can be sampled.

Release note: none.

27858: distsqlrun: explain(distsql) queries w/ fragment r=jordanlewis a=jordanlewis

Previously, explain(distsql) emitted a URL that used the query string to
send the compressed plan data to cockroachdb.github.io. This was
unideal, as query strings are transmitted to the server, but these query
strings might contain compressed, sensitive data.

Now, the query string is sent in the URL fragment, which is kept on the
client side by the browser.

Release note: None

Co-authored-by: David Taylor <[email protected]>
Co-authored-by: Jordan Lewis <[email protected]>
  • Loading branch information
3 people committed Jul 23, 2018
3 parents 0ab635e + e984ec5 + 755f111 commit a8588f7
Show file tree
Hide file tree
Showing 27 changed files with 402 additions and 446 deletions.
2 changes: 2 additions & 0 deletions pkg/ccl/importccl/import_stmt.go
Original file line number Diff line number Diff line change
Expand Up @@ -687,6 +687,8 @@ func importPlanHook(
fks := fkHandler{skip: skipFKs, allowed: true, resolver: make(fkResolver)}
switch format.Format {
case roachpb.IOFileFormat_Mysqldump:
evalCtx := &p.ExtendedEvalContext().EvalContext
tableDescs, err = readMysqlCreateTable(reader, evalCtx, parentID, match)
case roachpb.IOFileFormat_PgDump:
evalCtx := &p.ExtendedEvalContext().EvalContext
tableDescs, err = readPostgresCreateTable(reader, evalCtx, p.ExecCfg().Settings, match, parentID, walltime, fks, int(format.PgDump.MaxRowSize))
Expand Down
51 changes: 4 additions & 47 deletions pkg/ccl/importccl/read_import_mysql.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@ import (
"strconv"
"strings"

"github.com/cockroachdb/cockroach/pkg/roachpb"

"github.com/pkg/errors"
mysqltypes "vitess.io/vitess/go/sqltypes"
mysql "vitess.io/vitess/go/vt/sqlparser"
Expand All @@ -41,11 +39,9 @@ import (
// tables with names that appear in the `tables` map is converted to Cockroach
// KVs using the mapped converter and sent to kvCh.
type mysqldumpReader struct {
evalCtx *tree.EvalContext
tables map[string]*rowConverter
importAll bool // import any table encountered.
kvCh chan kvBatch

evalCtx *tree.EvalContext
tables map[string]*rowConverter
kvCh chan kvBatch
debugRow func(tree.Datums)
}

Expand All @@ -54,7 +50,7 @@ var _ inputConverter = &mysqldumpReader{}
func newMysqldumpReader(
kvCh chan kvBatch, tables map[string]*sqlbase.TableDescriptor, evalCtx *tree.EvalContext,
) (*mysqldumpReader, error) {
res := &mysqldumpReader{evalCtx: evalCtx, kvCh: kvCh, importAll: len(tables) == 0}
res := &mysqldumpReader{evalCtx: evalCtx, kvCh: kvCh}

converters := make(map[string]*rowConverter, len(tables))
for name, table := range tables {
Expand All @@ -69,7 +65,6 @@ func newMysqldumpReader(
converters[name] = conv
}
res.tables = converters

return res, nil
}

Expand All @@ -83,7 +78,6 @@ func (m *mysqldumpReader) inputFinished(ctx context.Context) {
func (m *mysqldumpReader) readFile(
ctx context.Context, input io.Reader, inputIdx int32, inputName string, progressFn progressFn,
) error {
var generatedIDs sqlbase.ID
var inserts, count int64
r := bufio.NewReaderSize(input, 1024*64)
tokens := mysql.NewTokenizer(r)
Expand All @@ -100,43 +94,6 @@ func (m *mysqldumpReader) readFile(
return errors.Wrap(err, "mysql parse error")
}
switch i := stmt.(type) {
case *mysql.DDL:
if i.Action == mysql.DropStr {
continue
}
if i.Action != mysql.CreateStr {
return errors.Errorf("unsupported %q statement in mysqldump", i.Action)
}
name := i.NewName.Name.String()
conv, ok := m.tables[name]
// If we already have this schema, skip it.
if conv != nil {
continue
}
// If we're only importing the named tables and this is not one, skip it.
if !m.importAll && !ok {
continue
}

generatedIDs++
id := defaultCSVTableID + generatedIDs
tbl, err := mysqlTableToCockroach(m.evalCtx, defaultCSVParentID, id, name, i.TableSpec)
if err != nil {
return err
}
conv, err = newRowConverter(tbl, m.evalCtx, m.kvCh)
if err != nil {
return err
}
kv := roachpb.KeyValue{Key: sqlbase.MakeDescMetadataKey(id)}
if err := kv.Value.SetProto(tbl); err != nil {
return err
}
kv.Value.InitChecksum(kv.Key)
conv.kvBatch = append(conv.kvBatch, kv)

m.tables[name] = conv

case *mysql.Insert:
name := i.Table.Name.String()
conv, ok := m.tables[name]
Expand Down
10 changes: 5 additions & 5 deletions pkg/sql/distsql_physical_planner_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,7 @@ func TestDistSQLDeadHosts(t *testing.T) {
// Verify the plan (should include all 5 nodes).
r.CheckQueryResults(t,
"SELECT url FROM [EXPLAIN (DISTSQL) SELECT sum(xsquared) FROM t]",
[][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html?eJy8k09LwzAYxu9-CnlOCu9h7bo5e5rHHXQy9SQ91OalFLamJCkoo99d1iDaIskgo8f8-T2_PG1yRC0FP-UH1kjfEYEQgzAHIQFhgYzQKFmw1lKdtlhgIz6RzghV3bTmNJ0RCqkY6RGmMntGitf8Y887zgUrEASbvNr3kkZVh1x9rQ0I29ak1-sYWUeQrflJ6-h8z0NZKi5zI0eal7fHm3V0e3b0b2JbSyVYsRgEZt2F5dFE38_jCakQT1TB4wmpMJ-ogscTUiGZqILHc6mH-E_0jnUja82jBznMywgsSrZvWctWFfysZNGH2-G2391PCNbGrkZ2sKnt0ulYf-HICccDOBrDsdvsUc-ddOKGk5BzL5zw0m1ehpjvnPDKbV6FmO_d_2rmuSbuSzZ2Z93VdwAAAP__XTV6BQ=="}},
[][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html#eJy8k09LwzAYxu9-CnlOCu9h7bo5e5rHHXQy9SQ91OalFLamJCkoo99d1iDaIskgo8f8-T2_PG1yRC0FP-UH1kjfEYEQgzAHIQFhgYzQKFmw1lKdtlhgIz6RzghV3bTmNJ0RCqkY6RGmMntGitf8Y887zgUrEASbvNr3kkZVh1x9rQ0I29ak1-sYWUeQrflJ6-h8z0NZKi5zI0eal7fHm3V0e3b0b2JbSyVYsRgEZt2F5dFE38_jCakQT1TB4wmpMJ-ogscTUiGZqILHc6mH-E_0jnUja82jBznMywgsSrZvWctWFfysZNGH2-G2391PCNbGrkZ2sKnt0ulYf-HICccDOBrDsdvsUc-ddOKGk5BzL5zw0m1ehpjvnPDKbV6FmO_d_2rmuSbuSzZ2Z93VdwAAAP__XTV6BQ=="}},
)

// Stop node 5.
Expand All @@ -442,7 +442,7 @@ func TestDistSQLDeadHosts(t *testing.T) {

r.CheckQueryResults(t,
"SELECT url FROM [EXPLAIN (DISTSQL) SELECT sum(xsquared) FROM t]",
[][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html?eJy8k8FK7DAYhff3KS5npZCF6dRx7KouZ6Ejo64ki9j8lEKnKUkKytB3lzaItkg60qHL5M93vpySHlFpRQ_yQBbJKzgYIjCswBBDMNRGZ2StNt3YH96qdyRXDEVVN67bFgyZNoTkCFe4kpDgWb6VtCepyIBBkZNF2QtqUxyk-UgdGHaNS_6nEUTLoBv3lday0z13eW4ol06PNE8v9xcpvzw5-juxqbRRZEgNAkV7Zjlf6PtNeOZUiBaqMOGZU2G1UIUJz7le8S_Re7K1riyNXvMwTzCQysn_CFY3JqNHo7M-3C93_el-Q5F1fsr9Ylv5UXetnzAPwtEA5mM4CsK3YfMqCMdhOJ5z7esgvA6b13PMN0F4EzZv_mQW7b_PAAAA__-DuA-E"}},
[][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html#eJy8k8FK7DAYhff3KS5npZCF6dRx7KouZ6Ejo64ki9j8lEKnKUkKytB3lzaItkg60qHL5M93vpySHlFpRQ_yQBbJKzgYIjCswBBDMNRGZ2StNt3YH96qdyRXDEVVN67bFgyZNoTkCFe4kpDgWb6VtCepyIBBkZNF2QtqUxyk-UgdGHaNS_6nEUTLoBv3lday0z13eW4ol06PNE8v9xcpvzw5-juxqbRRZEgNAkV7Zjlf6PtNeOZUiBaqMOGZU2G1UIUJz7le8S_Re7K1riyNXvMwTzCQysn_CFY3JqNHo7M-3C93_el-Q5F1fsr9Ylv5UXetnzAPwtEA5mM4CsK3YfMqCMdhOJ5z7esgvA6b13PMN0F4EzZv_mQW7b_PAAAA__-DuA-E"}},
)

// Stop node 2; note that no range had replicas on both 2 and 5.
Expand All @@ -452,7 +452,7 @@ func TestDistSQLDeadHosts(t *testing.T) {

r.CheckQueryResults(t,
"SELECT url FROM [EXPLAIN (DISTSQL) SELECT sum(xsquared) FROM t]",
[][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html?eJy8kkFLwzAUx-9-CvmfFHIwXZ3QUz3uoJOpJ8khNo9S6JrykoIy-t2lDaItkk02dkxe_r_fe-Ht0FhDj3pLDtkbJAQWEEihBFq2BTlneSiFhyvzgexGoGrazg_XSqCwTMh28JWvCRle9HtNG9KGGAKGvK7qEd5ytdX8mXsIrDufXeYJVC9gO_9N68XhnvuyZCq1tzPN8-vDVS6vD0b_ELvGsiEmMwGq_sRyeab_2-M5ZoTkTCPs8ZxqBf5Ab8i1tnE0W4UpTwmQKSlskbMdF_TEthjh4bgeX48XhpwPVRkOqyaUhrZ-h2U0nEzCch5OouG7uHkRDafxcHpM27fR8DJuXv7LrPqLrwAAAP__vMyldA=="}},
[][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html#eJy8kkFLwzAUx-9-CvmfFHIwXZ3QUz3uoJOpJ8khNo9S6JrykoIy-t2lDaItkk02dkxe_r_fe-Ht0FhDj3pLDtkbJAQWEEihBFq2BTlneSiFhyvzgexGoGrazg_XSqCwTMh28JWvCRle9HtNG9KGGAKGvK7qEd5ytdX8mXsIrDufXeYJVC9gO_9N68XhnvuyZCq1tzPN8-vDVS6vD0b_ELvGsiEmMwGq_sRyeab_2-M5ZoTkTCPs8ZxqBf5Ab8i1tnE0W4UpTwmQKSlskbMdF_TEthjh4bgeX48XhpwPVRkOqyaUhrZ-h2U0nEzCch5OouG7uHkRDafxcHpM27fR8DJuXv7LrPqLrwAAAP__vMyldA=="}},
)
}

Expand Down Expand Up @@ -509,15 +509,15 @@ func TestDistSQLDrainingHosts(t *testing.T) {
}

// Verify distribution.
expectPlan([][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html?eJyskT1rwzAQhvf-inJTCoJETrpoSumUoXbJBx2KCap1GEMsmZMELcH_vdgaEodYTSGjTn7uef3qCNooTGWNFsQncGCQQM6gIVOgtYa6cfhopb5BzBhUuvGuG-cMCkMI4giucgcEAVv5dcA1SoU0nQEDhU5Wh351Q1Ut6WepfW2BQeadeEyNRshbBsa701LrZIkgeMtuF7-UJWEpnaFpMvS-Zrt0u19nH5vJ06grGXWdFF4bUkioBvvzNp5mMUyz2b3tV-l2suTjYeaDMPz2xvldG_9DfPaP87s2fsW1RtsYbfGi-eubZ92LoCoxPJ81ngp8J1P0mnDMeq4fKLQu3PJwWOlw1QU8h3kUTgYwv4STKPwcN8-j8CIOL_4VO28ffgMAAP__nC9YuA=="}})
expectPlan([][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html#eJyskT1rwzAQhvf-inJTCoJETrpoSumUoXbJBx2KCap1GEMsmZMELcH_vdgaEodYTSGjTn7uef3qCNooTGWNFsQncGCQQM6gIVOgtYa6cfhopb5BzBhUuvGuG-cMCkMI4giucgcEAVv5dcA1SoU0nQEDhU5Wh351Q1Ut6WepfW2BQeadeEyNRshbBsa701LrZIkgeMtuF7-UJWEpnaFpMvS-Zrt0u19nH5vJ06grGXWdFF4bUkioBvvzNp5mMUyz2b3tV-l2suTjYeaDMPz2xvldG_9DfPaP87s2fsW1RtsYbfGi-eubZ92LoCoxPJ81ngp8J1P0mnDMeq4fKLQu3PJwWOlw1QU8h3kUTgYwv4STKPwcN8-j8CIOL_4VO28ffgMAAP__nC9YuA=="}})

// Drain the second node and expect the query to be planned on only the
// first node.
distServer := tc.Server(1).DistSQLServer().(*distsqlrun.ServerImpl)
distServer.ServerConfig.TestingKnobs.DrainFast = true
distServer.Drain(ctx, 0 /* flowDrainWait */)

expectPlan([][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html?eJyUkEFL9DAQhu_fr_h4TwqBbfeYk-JpL63UFQ8SJDZDKLSZMklAWfrfpc1BV1jR47yT533CnBDYUWMnitDPqGEUZuGeYmRZo_Lg4N6gK4UhzDmtsVHoWQj6hDSkkaBxtK8jdWQdya6CgqNkh3GrnWWYrLzfhDxFKLQ56f8NB4JZFDinz9KYrCfoelG_F996L-RtYtnV59679rE5vnTt08PV9UXX_i-ujuLMIdKZ51JztRgFcp7KISNn6eleuN80ZWw3bgscxVS2dRkOoazWD36F6x_h_TfYLP8-AgAA__-zG6EE"}})
expectPlan([][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html#eJyUkEFL9DAQhu_fr_h4TwqBbfeYk-JpL63UFQ8SJDZDKLSZMklAWfrfpc1BV1jR47yT533CnBDYUWMnitDPqGEUZuGeYmRZo_Lg4N6gK4UhzDmtsVHoWQj6hDSkkaBxtK8jdWQdya6CgqNkh3GrnWWYrLzfhDxFKLQ56f8NB4JZFDinz9KYrCfoelG_F996L-RtYtnV59679rE5vnTt08PV9UXX_i-ujuLMIdKZ51JztRgFcp7KISNn6eleuN80ZWw3bgscxVS2dRkOoazWD36F6x_h_TfYLP8-AgAA__-zG6EE"}})

// Verify correctness.
var res int
Expand Down
129 changes: 63 additions & 66 deletions pkg/sql/distsql_plan_csv.go
Original file line number Diff line number Diff line change
Expand Up @@ -248,61 +248,63 @@ func LoadCSV(

details := job.Details().(jobspb.ImportDetails)
samples := details.Samples
var parsedTables map[sqlbase.ID]*sqlbase.TableDescriptor
if samples == nil {
var err error
samples, parsedTables, err = dsp.loadCSVSamplingPlan(ctx, job, db, evalCtx, thisNode, nodes, from, splitSize, oversample, &planCtx, inputSpecs, sstSpecs)
samples, err = dsp.loadCSVSamplingPlan(ctx, job, db, evalCtx, thisNode, nodes, from, splitSize, oversample, &planCtx, inputSpecs, sstSpecs)
if err != nil {
return err
}
}

// If sampling returns parsed table definitions, we need to potentially assign
// them real IDs and re-key the samples with those IDs, then update the job
// details to record the tables and their matching samples.
if len(parsedTables) > 0 {
importing := to == "" // are we actually ingesting, or just transforming?

rekeys := make(map[sqlbase.ID]*sqlbase.TableDescriptor, len(parsedTables))

// Update the tables map with the parsed tables and allocate them real IDs.
for _, parsed := range parsedTables {
name := parsed.Name
if existing, ok := tables[name]; ok && existing != nil {
return errors.Errorf("unexpected parsed table definition for %q", name)
}
tables[name] = parsed

// If we're actually importing, we'll need a real ID for this table.
if importing {
rekeys[parsed.ID] = parsed
parsed.ID, err = GenerateUniqueDescID(ctx, phs.ExecCfg().DB)
if err != nil {
return err
/*
TODO(dt): when we enable reading schemas during sampling, might do this:
// If sampling returns parsed table definitions, we need to potentially assign
// them real IDs and re-key the samples with those IDs, then update the job
// details to record the tables and their matching samples.
if len(parsedTables) > 0 {
importing := to == "" // are we actually ingesting, or just transforming?
rekeys := make(map[sqlbase.ID]*sqlbase.TableDescriptor, len(parsedTables))
// Update the tables map with the parsed tables and allocate them real IDs.
for _, parsed := range parsedTables {
name := parsed.Name
if existing, ok := tables[name]; ok && existing != nil {
return errors.Errorf("unexpected parsed table definition for %q", name)
}
tables[name] = parsed
// If we're actually importing, we'll need a real ID for this table.
if importing {
rekeys[parsed.ID] = parsed
parsed.ID, err = GenerateUniqueDescID(ctx, phs.ExecCfg().DB)
if err != nil {
return err
}
}
}
}
}
// The samples were created using the dummy IDs, but the IMPORT run will use
// the actual IDs, so we need to re-key the samples so that they actually
// act as splits in the IMPORTed key-space.
if importing {
kr, err := makeRewriter(rekeys)
if err != nil {
return err
}
for i := range samples {
var ok bool
samples[i], ok, err = kr.RewriteKey(samples[i])
if err != nil {
return err
}
if !ok {
return errors.Errorf("expected rewriter to rewrite key %v", samples[i])
// The samples were created using the dummy IDs, but the IMPORT run will use
// the actual IDs, so we need to re-key the samples so that they actually
// act as splits in the IMPORTed key-space.
if importing {
kr, err := makeRewriter(rekeys)
if err != nil {
return err
}
for i := range samples {
var ok bool
samples[i], ok, err = kr.RewriteKey(samples[i])
if err != nil {
return err
}
if !ok {
return errors.Errorf("expected rewriter to rewrite key %v", samples[i])
}
}
}
}
}
}
*/

if len(tables) == 0 {
return errors.Errorf("must specify table(s) to import")
Expand Down Expand Up @@ -438,12 +440,6 @@ func LoadCSV(

d := details.(*jobspb.Payload_Import).Import
d.Samples = samples
if len(parsedTables) > 0 {
d.Tables = make([]jobspb.ImportDetails_Table, 0, len(tables))
for _, tbl := range tables {
d.Tables = append(d.Tables, jobspb.ImportDetails_Table{Desc: tbl})
}
}
return prog.Completed()
},
); err != nil {
Expand Down Expand Up @@ -483,7 +479,7 @@ func (dsp *DistSQLPlanner) loadCSVSamplingPlan(
planCtx *planningCtx,
csvSpecs []*distsqlrun.ReadImportDataSpec,
sstSpecs []distsqlrun.SSTWriterSpec,
) ([][]byte, map[sqlbase.ID]*sqlbase.TableDescriptor, error) {
) ([][]byte, error) {
// splitSize is the target number of bytes at which to create SST files. We
// attempt to do this by sampling, which is what the first DistSQL plan of this
// function does. CSV rows are converted into KVs. The total size of the KV is
Expand All @@ -501,7 +497,7 @@ func (dsp *DistSQLPlanner) loadCSVSamplingPlan(
}
sampleSize := splitSize / oversample
if sampleSize > math.MaxInt32 {
return nil, nil, errors.Errorf("SST size must fit in an int32: %d", splitSize)
return nil, errors.Errorf("SST size must fit in an int32: %d", splitSize)
}

var p physicalPlan
Expand Down Expand Up @@ -530,7 +526,7 @@ func (dsp *DistSQLPlanner) loadCSVSamplingPlan(
d.SamplingProgress = make([]float32, len(csvSpecs))
return d.Completed()
}); err != nil {
return nil, nil, err
return nil, err
}

// We only need the key during sorting.
Expand Down Expand Up @@ -558,23 +554,24 @@ func (dsp *DistSQLPlanner) loadCSVSamplingPlan(
)

var samples [][]byte
parsedTables := make(map[sqlbase.ID]*sqlbase.TableDescriptor)

sampleCount := 0
rowResultWriter := newCallbackResultWriter(func(ctx context.Context, row tree.Datums) error {
key := roachpb.Key(*row[0].(*tree.DBytes))

if keys.IsDescriptorKey(key) {
kv := roachpb.KeyValue{Key: key}
kv.Value.RawBytes = []byte(*row[1].(*tree.DBytes))
var desc sqlbase.TableDescriptor
if err := kv.Value.GetProto(&desc); err != nil {
return err
/*
TODO(dt): when we enable reading schemas during sampling, might do this:
if keys.IsDescriptorKey(key) {
kv := roachpb.KeyValue{Key: key}
kv.Value.RawBytes = []byte(*row[1].(*tree.DBytes))
var desc sqlbase.TableDescriptor
if err := kv.Value.GetProto(&desc); err != nil {
return err
}
parsedTables[desc.ID] = &desc
return nil
}
parsedTables[desc.ID] = &desc
return nil
}

*/
sampleCount++
sampleCount = sampleCount % int(oversample)
if sampleCount == 0 {
Expand Down Expand Up @@ -606,10 +603,10 @@ func (dsp *DistSQLPlanner) loadCSVSamplingPlan(
samples = nil
dsp.Run(planCtx, nil, &p, recv, evalCtx)
if err := rowResultWriter.Err(); err != nil {
return nil, nil, err
return nil, err
}

log.VEventf(ctx, 1, "generated %d splits; begin routing for job %s", len(samples), job.Payload().Description)

return samples, parsedTables, nil
return samples, nil
}
2 changes: 1 addition & 1 deletion pkg/sql/distsqlrun/flow_diagram.go
Original file line number Diff line number Diff line change
Expand Up @@ -635,7 +635,7 @@ func encodeJSONToURL(json bytes.Buffer) (string, url.URL, error) {
Scheme: "https",
Host: "cockroachdb.github.io",
Path: "distsqlplan/decode.html",
RawQuery: compressed.String(),
Fragment: compressed.String(),
}
return jsonStr, url, nil
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/sql/distsqlrun/flow_diagram_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ func TestPlanDiagramIndexJoin(t *testing.T) {

compareDiagrams(t, json, expected)

expectedURL := "https://cockroachdb.github.io/distsqlplan/decode.html?eJy0kjFr8zAQhvfvV3y8awS1pE6aNBXSoSlpt9aDah1B4EhGkiEl-L8XyyWJoSkpaUfd3XvP48N7-GDpwWwpQb2Ag0GAQaJm6GJoKKUQx9Y0uLQ7qIrB-a7PY7lmaEIkqD2yyy1B4dm8tbQmYyneVGCwlI1ry_qnsKWlt7TTZQgMqz6r_5ozLVAPDKHPx8Upmw1B8YGdwPnlcP7rcHE5XPwp_MgM0VIkO6dpsUA9fGF4H5z_FJTzSBfd1sT3g96dazPF0XChxWtfVbLR8qAtzzqLnxxsTakLPtFM5dzmavwgshuaDpBCHxt6jKEpf-T0XJVcKVhKeerK6bH0pVWOehrm14TFNWH5bfh2Fq6Gevj3EQAA__8R3DK5"
expectedURL := "https://cockroachdb.github.io/distsqlplan/decode.html#eJy0kjFr8zAQhvfvV3y8awS1pE6aNBXSoSlpt9aDah1B4EhGkiEl-L8XyyWJoSkpaUfd3XvP48N7-GDpwWwpQb2Ag0GAQaJm6GJoKKUQx9Y0uLQ7qIrB-a7PY7lmaEIkqD2yyy1B4dm8tbQmYyneVGCwlI1ry_qnsKWlt7TTZQgMqz6r_5ozLVAPDKHPx8Upmw1B8YGdwPnlcP7rcHE5XPwp_MgM0VIkO6dpsUA9fGF4H5z_FJTzSBfd1sT3g96dazPF0XChxWtfVbLR8qAtzzqLnxxsTakLPtFM5dzmavwgshuaDpBCHxt6jKEpf-T0XJVcKVhKeerK6bH0pVWOehrm14TFNWH5bfh2Fq6Gevj3EQAA__8R3DK5"
if url.String() != expectedURL {
t.Errorf("expected `%s` got `%s`", expectedURL, &url)
}
Expand Down
Loading

0 comments on commit a8588f7

Please sign in to comment.