-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathobfuscate_and_normalize.go
43 lines (33 loc) · 1.37 KB
/
obfuscate_and_normalize.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
package sqllexer
import "strings"
// ObfuscateAndNormalize takes an input SQL string and returns an normalized SQL string with metadata
// This function is a convenience function that combines the Obfuscator and Normalizer in one pass
func ObfuscateAndNormalize(input string, obfuscator *Obfuscator, normalizer *Normalizer, lexerOpts ...lexerOption) (normalizedSQL string, statementMetadata *StatementMetadata, err error) {
lexer := New(
input,
lexerOpts...,
)
var normalizedSQLBuilder strings.Builder
statementMetadata = &StatementMetadata{
Tables: []string{},
Comments: []string{},
Commands: []string{},
Procedures: []string{},
}
var lastToken Token // The last token that is not whitespace or comment
var groupablePlaceholder groupablePlaceholder
ctes := make(map[string]bool) // Holds the CTEs that are currently being processed
for {
token := lexer.Scan()
if token.Type == EOF {
break
}
token.Value = obfuscator.ObfuscateTokenValue(token, lastToken, lexerOpts...)
normalizer.collectMetadata(&token, &lastToken, statementMetadata, ctes)
normalizer.normalizeSQL(&token, &lastToken, &normalizedSQLBuilder, &groupablePlaceholder, lexerOpts...)
}
normalizedSQL = normalizedSQLBuilder.String()
// Dedupe collected metadata
dedupeStatementMetadata(statementMetadata)
return normalizer.trimNormalizedSQL(normalizedSQL), statementMetadata, nil
}