From f6a43fe3baeaeb91c96626f0cea3ccb5d0f2f5ec Mon Sep 17 00:00:00 2001 From: Shahidh K Muhammed Date: Thu, 16 Jan 2020 09:23:18 +0530 Subject: [PATCH 1/3] fix various version-related issues with cli (close #3706) (#3709) - do not quit on server-cli version mismatch, show warning - fix a bug in update-cli command which prevets updates to pre-release versions - if a console template for a particular version is not found, use the latest template --- cli/assets/assets.go | 31 ++++++++++++-- cli/assets/latest/console.html | 75 ++++++++++++++++++++++++++++++++++ cli/cli.go | 2 +- cli/commands/console.go | 4 ++ cli/commands/update-cli.go | 2 + cli/update/update.go | 2 +- cli/util/template.go | 11 +++++ cli/version/compatibility.go | 6 +-- 8 files changed, 125 insertions(+), 8 deletions(-) create mode 100644 cli/assets/latest/console.html diff --git a/cli/assets/assets.go b/cli/assets/assets.go index 18b38b0bae70b..85b65b6be0775 100644 --- a/cli/assets/assets.go +++ b/cli/assets/assets.go @@ -1,5 +1,6 @@ // Code generated by go-bindata. // sources: +// assets/latest/console.html // assets/unversioned/console.html // assets/v1.0-alpha/console.html // assets/v1.0/console.html @@ -70,6 +71,26 @@ func (fi bindataFileInfo) Sys() interface{} { return nil } +var _assetsLatestConsoleHtml = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xac\x56\xdf\x6f\xdb\xb6\x13\x7f\xcf\x5f\x71\xe0\x17\x5f\xf4\x65\x22\x9d\xae\xc0\x0a\xd9\x0a\x50\xac\x18\x36\x6c\x03\x02\xb4\xd9\x6b\xc0\x50\x27\xe9\x52\x8a\xd4\x78\xb4\x13\xd7\xf0\xff\x3e\xe8\xb7\xec\x64\x4d\x57\x14\x7e\x21\xef\xf7\xe7\x73\xc7\x93\x37\x55\xac\x2d\x58\xed\xca\x4c\xa0\x4b\xb6\x2c\xae\x2e\x00\x36\x15\xea\xbc\x3d\x00\x6c\x2c\xb9\x4f\x10\xd0\x66\x82\x8c\x77\x02\xe2\xbe\xc1\x4c\x50\xad\x4b\x54\x8d\x2b\x05\x54\x01\x8b\x4c\x54\x31\x36\x9c\x2a\x55\x06\xdd\x54\x7f\xdb\x04\x5d\x49\x0e\x13\x93\x3b\x59\x69\xde\x06\x2d\xc9\x2b\xe3\x1d\x7b\x8b\x4a\x33\x63\x64\x65\x7c\x5d\x7b\xa7\xa8\x2e\x55\xa1\x77\x6d\xf8\xdb\x32\x20\x3a\xd9\xc5\x55\x43\x01\x6c\x02\x35\xb1\xbf\x00\x3c\x90\xcb\xfd\x83\xbc\xbd\x45\xb7\x83\x0c\x0e\x83\x18\x40\x37\xf4\xab\xe7\x98\xc2\xe1\x20\x87\xf3\xf1\xf8\xc3\x52\x7d\xed\x43\x4c\x41\xf4\xfa\xf6\x72\x3c\x8a\xd9\xc0\x58\xfa\x0b\x03\x93\x77\x5d\x88\xf9\xba\x8c\x92\xeb\xa8\xdf\x35\x74\x13\x6c\x67\x34\x5f\x9f\x31\x5a\x46\x3b\x15\x2d\x8d\x0f\x87\x04\xa8\x80\x96\xa4\x77\xc6\x20\xf3\xef\xb8\x87\xe3\x71\xae\x7b\x14\xf6\xc0\xf2\x9a\xdc\x07\x34\x01\xe3\x69\x10\x40\xcb\x78\xe2\x37\x5b\xbe\xe4\xe9\x72\x48\x16\x9e\xdb\x60\xaf\x03\x16\xf4\x98\x82\x50\x4b\x82\xfa\xe6\xfd\xe9\x73\x4c\x41\x18\x4b\xa7\xe4\xdd\xdc\xfc\xf6\x7e\x64\xae\x3d\x2f\xb3\xa0\xd3\x77\x16\x3f\xa2\xc5\x1a\x63\xe8\xa1\x9c\xc9\x4e\x7a\xd5\x8d\xc7\xb5\x8e\x55\x0a\xdf\x32\x58\x8b\xc2\x18\xc3\x0e\xc3\xd4\x8a\xb6\xf9\x27\xa2\xc5\x08\x1c\xd7\xfd\xbc\xa9\x79\xe0\x36\x6a\x7c\x08\x9b\x3b\x9f\xef\xc7\x81\x8c\x7b\x8b\xfd\x59\xd6\x9a\xdc\xcf\xde\x45\x74\x71\x9a\xc5\x9c\xb8\xb1\x7a\x9f\xc2\x2b\xe7\x1d\xbe\x5a\x0f\x62\xdf\x68\x43\x71\x9f\xc2\x6a\x94\xc4\xa0\x1d\x53\xec\x2a\x1b\xb4\x20\x5f\xaf\x18\x2c\x39\xd4\xa1\x37\x3b\x3e\x49\x24\xb9\xf2\x0f\xcf\x64\xbb\xb3\xde\x7c\x7a\x9a\xee\xf2\x1b\xd2\x6d\xd4\x00\xb2\xbf\xe5\xb4\x03\xca\x33\x61\xbd\xce\xc9\x95\x62\x7c\x8c\x9d\xc2\x58\xcd\x9c\x89\x46\x97\x98\x8c\x06\xd0\xb9\x67\x62\xb0\xab\xc9\x25\x15\x52\x59\xc5\x14\x2e\x57\xab\x5d\xb5\x9e\x5e\x73\xde\x36\xf9\x72\xb5\xfa\xff\xfa\x1c\x4f\x61\xf1\x71\x14\x6a\x4b\xa5\x4b\x28\x62\xcd\x29\x18\x74\x11\xc3\xa8\x2a\xbc\x8b\x49\xa1\x6b\xb2\xfb\x14\x58\x3b\x4e\x18\x03\x15\xa3\xfa\x7e\xcb\x91\x8a\x7d\x62\x7a\xee\xce\xbd\x27\x28\x6d\x5f\x1b\xed\x46\x34\xe7\x08\x86\x3c\x4c\x9f\x31\x85\xd7\x58\xaf\x27\x79\xad\x43\x49\x2e\x89\xbe\x49\x21\xf9\x71\xa9\x31\xde\xfa\x90\xc2\xff\xde\xbe\x69\x7f\xb3\x7c\x91\xf3\x8f\x9e\x2f\x29\xe5\xc8\xa8\x6a\xab\x98\xf8\x55\x39\xed\x86\xa9\x5b\x1c\xc7\x76\x0c\x98\xc4\x58\xf4\x62\x46\xc4\xd5\xd2\x61\xde\xe1\x1d\x2a\xae\x10\xe3\xf9\xe2\x36\xb9\xbb\x67\x69\xac\xdf\xe6\x85\xd5\x01\xa5\xf1\xb5\xd2\xf7\xfa\x51\x59\xba\x63\xd5\xc1\xd7\x0f\xc8\xbe\x46\xf5\x46\xfe\x24\x57\xca\xf0\xa9\x58\xd6\xe4\xa4\x61\x16\x6a\x98\x9b\xc3\xa1\x5b\x6d\xc6\xd2\x87\xa8\x23\x99\xf7\x14\xda\x15\xf5\x72\x4d\x8a\x3b\x7b\xd5\x02\xea\x22\x82\xa9\x74\x60\x8c\x99\xb8\xf9\xf8\x4b\xf2\x56\x9c\x7e\x1b\x80\x83\x99\x9d\x76\xe8\x72\x1f\xe4\xfd\x53\xaf\xab\xe5\xdb\xfe\x17\xef\x2e\xe5\x97\x7d\x47\x6c\xe3\xca\xfd\x7a\x8e\xff\xcb\xc7\xb1\xdd\xd9\xdd\x69\xda\x54\x13\x1d\xb2\xfc\xfc\x55\x8c\x7c\xa7\xb4\x13\xa1\xcf\xe5\xfd\x12\xa7\xdf\x13\xf6\x8b\xd9\x2f\xe6\xaf\x59\xdf\x93\x8d\xea\x77\xf6\x46\xb5\xff\x71\xae\x2e\xfe\x09\x00\x00\xff\xff\xdd\xc2\x97\xba\xeb\x08\x00\x00") + +func assetsLatestConsoleHtmlBytes() ([]byte, error) { + return bindataRead( + _assetsLatestConsoleHtml, + "assets/latest/console.html", + ) +} + +func assetsLatestConsoleHtml() (*asset, error) { + bytes, err := assetsLatestConsoleHtmlBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "assets/latest/console.html", size: 2283, mode: os.FileMode(420), modTime: time.Unix(1579063085, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + var _assetsUnversionedConsoleHtml = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xac\x56\x4d\x8f\xdb\x36\x10\xbd\xef\xaf\x18\xb0\x28\x72\xa9\x48\x27\x0d\xd0\x40\xb6\x16\x08\x1a\x14\x2d\xda\x02\x01\x92\xed\x75\xc1\xa5\x46\xd2\x6c\x28\x52\xe5\xd0\xf6\x3a\x86\xff\x7b\xa1\x6f\xd9\xbb\xcd\xa6\x45\xe0\x0b\xf9\xe6\x8b\xef\x71\x38\xf2\xa6\x8a\xb5\x05\xab\x5d\x99\x09\x74\xc9\x96\xc5\xf5\x15\xc0\xa6\x42\x9d\xb7\x0b\x80\x8d\x25\xf7\x09\x02\xda\x4c\x90\xf1\x4e\x40\x3c\x34\x98\x09\xaa\x75\x89\xaa\x71\xa5\x80\x2a\x60\x91\x89\x2a\xc6\x86\x53\xa5\xca\xa0\x9b\xea\x6f\x9b\xa0\x2b\xc9\x61\x62\x72\x27\x2b\xcd\xdb\xa0\x25\x79\x65\xbc\x63\x6f\x51\x69\x66\x8c\xac\x8c\xaf\x6b\xef\x14\xd5\xa5\x2a\xf4\xae\x4d\x7f\x5b\x06\x44\x27\xbb\xbc\x6a\x38\x00\x9b\x40\x4d\xec\x37\x00\x7b\x72\xb9\xdf\xcb\xdb\x5b\x74\x3b\xc8\xe0\x38\xc0\x00\xba\xa1\x5f\x3d\xc7\x14\x8e\x47\x39\xac\x4f\xa7\x1f\x96\xe6\xf7\x3e\xc4\x14\x44\x6f\x6f\x37\xa7\x93\x98\x1d\x8c\xa5\xbf\x30\x30\x79\xd7\xa5\x98\xb7\xcb\x2c\xb9\x8e\xfa\x6d\x43\x37\xc1\x76\x4e\xf3\xf6\x09\xa7\x65\xb6\x73\x68\xe9\x7c\x3c\x26\x40\x05\xb4\x22\xbd\x35\x06\x99\x7f\xc7\x03\x9c\x4e\xf3\xb9\x47\xb0\x27\x96\xd7\xe4\x3e\xa0\x09\x18\xcf\x93\x00\x5a\xc6\xb3\xb8\xd9\xf3\xb9\x48\x97\x43\xb2\x88\xdc\x06\xfb\x3e\x60\x41\x0f\x29\x08\xb5\x14\xa8\xbf\xbc\x3f\x7d\x8e\x29\x08\x63\xe9\x5c\xbc\x9b\x9b\xdf\xde\x8d\xca\xb5\xeb\x65\x15\x74\xfa\xce\xe2\x47\xb4\x58\x63\x0c\x3d\x95\x0b\x6c\xe9\xce\x18\x76\x18\x26\xfd\xda\x1b\x3b\x83\x16\xf7\x76\x5a\xf7\x4d\xa2\xe6\x2e\xd9\xa8\xb1\x7b\x37\x77\x3e\x3f\x8c\x5d\x14\x0f\x16\xfb\xb5\xac\x35\xb9\x9f\xbd\x8b\xe8\xe2\xd4\x40\x39\x71\x63\xf5\x21\x85\x17\xce\x3b\x7c\xb1\x1e\x60\xdf\x68\x43\xf1\x90\xc2\x6a\x44\x62\xd0\x8e\x29\x76\x27\x1b\xac\x20\x5f\xad\x18\x2c\x39\xd4\xa1\x77\x3b\x3d\x2a\x24\xb9\xf2\xfb\x27\xaa\xdd\x59\x6f\x3e\x3d\x2e\xf7\xf2\x7f\x94\xdb\xa8\x81\x64\xbf\xcb\x69\x07\x94\x67\xc2\x7a\x9d\x93\x2b\xc5\xf8\x82\x3a\x83\xb1\x9a\x39\x13\x8d\x2e\x31\x19\x1d\xa0\x0b\xcf\xc4\xe0\x57\x93\x4b\x2a\xa4\xb2\x8a\x29\xbc\x5c\xad\x76\xd5\x7a\x7a\x82\x79\xac\x3a\xec\xfb\xf5\x25\x9f\xc2\xe2\xc3\x08\x6a\x4b\xa5\x4b\x28\x62\xcd\x29\x18\x74\x11\xc3\x68\x2a\xbc\x8b\x49\xa1\x6b\xb2\x87\x14\x58\x3b\x4e\x18\x03\x15\xa3\xf9\x7e\xcb\x91\x8a\x43\x62\x7a\xed\x2e\xa3\x27\x2a\xed\xbd\x36\xda\x8d\x6c\x2e\x19\x0c\x75\x98\x3e\x63\x0a\xaf\xb0\x5e\x4f\x78\xad\x43\x49\x2e\x89\xbe\x49\x21\xf9\x71\x69\x31\xde\xfa\x90\xc2\x77\x6f\x5e\xb7\xbf\x19\x5f\xd4\xfc\xa3\xd7\x4b\x4a\x39\x2a\xaa\xda\x53\x4c\xfa\xaa\x9c\x76\x43\xd7\x2d\x96\xe3\x75\x0c\x9c\xc4\x78\xe8\x45\x8f\x88\xeb\x65\xc0\x3c\x78\x3b\x56\x5c\x21\xc6\xcb\x69\x6b\x72\x77\xcf\xd2\x58\xbf\xcd\x0b\xab\x03\x4a\xe3\x6b\xa5\xef\xf5\x83\xb2\x74\xc7\xaa\xa3\xaf\xf7\xc8\xbe\x46\xf5\x5a\xfe\x24\x57\xca\xf0\x39\x2c\x6b\x72\xd2\x30\x0b\x35\xf4\xcd\xf1\xd8\xcd\x23\x63\xe9\x43\xd4\x91\xcc\x3b\x0a\xed\x5c\x79\xfe\x4c\x8a\x3b\x7f\xd5\x12\xea\x32\x82\xa9\x74\x60\x8c\x99\xb8\xf9\xf8\x4b\xf2\x46\x9c\x0f\x74\xe0\x60\xe6\xa0\x1d\xba\xdc\x07\x79\xff\x38\xea\x7a\xf9\xb6\xff\x25\xba\x2b\xf9\xe5\xd8\x91\xdb\x38\x27\xbf\x5e\xe3\xff\xf2\x45\x6b\x07\x6d\xb7\x9a\x26\xd5\x24\x87\x2c\x3f\x7f\x95\x22\xdf\xa8\xec\x24\xe8\x53\x75\xbf\xa4\xe9\xb7\xa4\xfd\x6c\xf5\xab\xf9\x13\xd4\xdf\xc9\x46\xf5\x33\x7b\xa3\xda\x3f\x26\xd7\x57\xff\x04\x00\x00\xff\xff\x7d\x6e\x65\x2c\xa0\x08\x00\x00") func assetsUnversionedConsoleHtmlBytes() ([]byte, error) { @@ -85,7 +106,7 @@ func assetsUnversionedConsoleHtml() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "assets/unversioned/console.html", size: 2208, mode: os.FileMode(420), modTime: time.Unix(1576470932, 0)} + info := bindataFileInfo{name: "assets/unversioned/console.html", size: 2208, mode: os.FileMode(420), modTime: time.Unix(1576481548, 0)} a := &asset{bytes: bytes, info: info} return a, nil } @@ -105,7 +126,7 @@ func assetsV10AlphaConsoleHtml() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "assets/v1.0-alpha/console.html", size: 1928, mode: os.FileMode(420), modTime: time.Unix(1576470932, 0)} + info := bindataFileInfo{name: "assets/v1.0-alpha/console.html", size: 1928, mode: os.FileMode(420), modTime: time.Unix(1576481548, 0)} a := &asset{bytes: bytes, info: info} return a, nil } @@ -125,7 +146,7 @@ func assetsV10ConsoleHtml() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "assets/v1.0/console.html", size: 2283, mode: os.FileMode(420), modTime: time.Unix(1576812491, 0)} + info := bindataFileInfo{name: "assets/v1.0/console.html", size: 2283, mode: os.FileMode(420), modTime: time.Unix(1577362631, 0)} a := &asset{bytes: bytes, info: info} return a, nil } @@ -182,6 +203,7 @@ func AssetNames() []string { // _bindata is a table, holding each asset generator, mapped to its name. var _bindata = map[string]func() (*asset, error){ + "assets/latest/console.html": assetsLatestConsoleHtml, "assets/unversioned/console.html": assetsUnversionedConsoleHtml, "assets/v1.0-alpha/console.html": assetsV10AlphaConsoleHtml, "assets/v1.0/console.html": assetsV10ConsoleHtml, @@ -228,6 +250,9 @@ type bintree struct { } var _bintree = &bintree{nil, map[string]*bintree{ "assets": &bintree{nil, map[string]*bintree{ + "latest": &bintree{nil, map[string]*bintree{ + "console.html": &bintree{assetsLatestConsoleHtml, map[string]*bintree{}}, + }}, "unversioned": &bintree{nil, map[string]*bintree{ "console.html": &bintree{assetsUnversionedConsoleHtml, map[string]*bintree{}}, }}, diff --git a/cli/assets/latest/console.html b/cli/assets/latest/console.html new file mode 100644 index 0000000000000..59610b012cc88 --- /dev/null +++ b/cli/assets/latest/console.html @@ -0,0 +1,75 @@ + + + + + + + + +
+
+ + Loading... + +
+
+
+ + + {{ if .cliStaticDir }} + + + + + + {{ else }} + + + + + + {{ end }} + + + diff --git a/cli/cli.go b/cli/cli.go index 505a5888f9794..e377921bcf33e 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -298,7 +298,7 @@ func (ec *ExecutionContext) checkServerVersion() error { ec.Logger.Debugf("versions: cli: [%s] server: [%s]", ec.Version.GetCLIVersion(), ec.Version.GetServerVersion()) ec.Logger.Debugf("compatibility check: [%v] %v", isCompatible, reason) if !isCompatible { - return errors.Errorf("[cli: %s] [server: %s] versions incompatible: %s", ec.Version.GetCLIVersion(), ec.Version.GetServerVersion(), reason) + ec.Logger.Warnf("[cli: %s] [server: %s] version mismatch: %s", ec.Version.GetCLIVersion(), ec.Version.GetServerVersion(), reason) } return nil } diff --git a/cli/commands/console.go b/cli/commands/console.go index d87dc012014e4..21b3ed4caa4c2 100644 --- a/cli/commands/console.go +++ b/cli/commands/console.go @@ -275,6 +275,10 @@ func serveConsole(assetsVersion, staticDir string, opts gin.H) (*gin.Engine, err // An Engine instance with the Logger and Recovery middleware already attached. r := gin.New() + if !util.DoAssetExist("assets/" + assetsVersion + "/console.html") { + assetsVersion = "latest" + } + // Template console.html templateRender, err := util.LoadTemplates("assets/"+assetsVersion+"/", "console.html") if err != nil { diff --git a/cli/commands/update-cli.go b/cli/commands/update-cli.go index f4f3430978f60..2c0d28f2a4e08 100644 --- a/cli/commands/update-cli.go +++ b/cli/commands/update-cli.go @@ -59,6 +59,8 @@ func (o *updateOptions) run(showPrompt bool) error { return errors.Wrap(err, "command: check update") } + ec.Logger.Debugln("hasUpdate: ", hasUpdate, "latestVersion: ", latestVersion, "currentVersion:", currentVersion) + if !hasUpdate { o.EC.Logger.WithField("version", currentVersion).Info("hasura cli is up to date") return nil diff --git a/cli/update/update.go b/cli/update/update.go index f047a955216bf..914727644d536 100644 --- a/cli/update/update.go +++ b/cli/update/update.go @@ -99,7 +99,7 @@ func HasUpdate(currentVersion *semver.Version, timeFile string) (bool, *semver.V return false, nil, errors.Wrap(err, "get latest version") } - c, err := semver.NewConstraint(fmt.Sprintf("> %s", currentVersion.String())) + c, err := semver.NewConstraint(fmt.Sprintf("> %s-0", currentVersion.String())) if err != nil { return false, nil, errors.Wrap(err, "semver constraint build") } diff --git a/cli/util/template.go b/cli/util/template.go index 830a4d89f0fb3..e8d9dd4846faf 100644 --- a/cli/util/template.go +++ b/cli/util/template.go @@ -29,6 +29,7 @@ func (b *binaryFileSystem) Exists(prefix string, filepath string) bool { return false } +// BinaryFileSystem creates a binary file system at root from the assets func BinaryFileSystem(root string) *binaryFileSystem { fs := &assetfs.AssetFS{assets.Asset, assets.AssetDir, assets.AssetInfo, root} return &binaryFileSystem{ @@ -36,6 +37,7 @@ func BinaryFileSystem(root string) *binaryFileSystem { } } +// LoadTemplates loads templates from path for the given list func LoadTemplates(path string, list ...string) (multitemplate.Render, error) { r := multitemplate.New() @@ -55,3 +57,12 @@ func LoadTemplates(path string, list ...string) (multitemplate.Render, error) { return r, nil } + +// DoAssetExist returns true if an asset exists at pathk +func DoAssetExist(path string) bool { + _, err := assets.AssetInfo(path) + if err != nil { + return false + } + return true +} diff --git a/cli/version/compatibility.go b/cli/version/compatibility.go index 369632648727a..b2bbd75c3d3af 100644 --- a/cli/version/compatibility.go +++ b/cli/version/compatibility.go @@ -1,12 +1,12 @@ package version const ( - untaggedBuild = "for untagged builds, server and cli versions should match" - taggedBuild = "cli version (major.minor) should be equal or ahead of server version, please update cli" + untaggedBuild = "untagged build, there could be inconsistencies" + taggedBuild = "older cli version might not be compatible with latest server apis, please update cli" noServerVersion = "server with no version treated as pre-release build" noCLIVersion = "cli version is empty, indicates a broken build" untaggedCLI = "untagged cli build can work with tagged server build" - devCLI = "dev version of cli, compatible with all servers" + devCLI = "dev version of cli, there could be inconsistencies" ) // CheckCLIServerCompatibility compares server and cli for compatibility, From 9ed8f717a76c427d69efdc8fa55396356d9034be Mon Sep 17 00:00:00 2001 From: Phil Freeman Date: Wed, 15 Jan 2020 20:53:28 -0800 Subject: [PATCH 2/3] remove hdb_views for inserts (#3598) * WIP: Remove hdb_views for inserts * Show failing row in check constraint error * Revert "Show failing row in check constraint error" This reverts commit dd2cac29d0dbef350695e25e4fb27b9401ff9006. * Use the better query plan * Simplify things * fix cli test * Update downgrading.rst * remove 1.1 asset for cli --- .../graphql/manual/deployment/downgrading.rst | 6 + server/graphql-engine.cabal | 1 - .../src-lib/Hasura/GraphQL/Resolve/Insert.hs | 60 ++++---- .../src-lib/Hasura/GraphQL/Resolve/Types.hs | 4 +- server/src-lib/Hasura/GraphQL/Schema.hs | 13 +- server/src-lib/Hasura/RQL/DDL/Permission.hs | 136 ++---------------- .../Hasura/RQL/DDL/Permission/Internal.hs | 23 +-- .../Hasura/RQL/DDL/Permission/Triggers.hs | 34 ----- .../Hasura/RQL/DDL/Schema/Cache/Permission.hs | 17 +-- server/src-lib/Hasura/RQL/DML/Count.hs | 2 +- server/src-lib/Hasura/RQL/DML/Insert.hs | 114 +++++++-------- server/src-lib/Hasura/RQL/DML/Internal.hs | 2 +- .../src-lib/Hasura/RQL/DML/Select/Internal.hs | 2 +- .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 1 - server/src-lib/Hasura/SQL/DML.hs | 29 ++-- server/src-lib/Hasura/SQL/Rewrite.hs | 2 +- server/src-lib/Hasura/SQL/Types.hs | 4 - .../src-lib/Hasura/Server/Migrate/Version.hs | 2 +- server/src-lib/Hasura/Server/Query.hs | 8 +- server/src-rsr/initialise.sql | 7 + server/src-rsr/insert_trigger.sql.shakespeare | 34 ----- server/src-rsr/migrations/28_to_29.sql | 2 +- server/src-rsr/migrations/29_to_30.sql | 6 + 23 files changed, 161 insertions(+), 348 deletions(-) delete mode 100644 server/src-lib/Hasura/RQL/DDL/Permission/Triggers.hs delete mode 100644 server/src-rsr/insert_trigger.sql.shakespeare create mode 100644 server/src-rsr/migrations/29_to_30.sql diff --git a/docs/graphql/manual/deployment/downgrading.rst b/docs/graphql/manual/deployment/downgrading.rst index 7bf76cf2632c5..7afd98f8ca330 100644 --- a/docs/graphql/manual/deployment/downgrading.rst +++ b/docs/graphql/manual/deployment/downgrading.rst @@ -103,6 +103,12 @@ You can downgrade the catalogue from a particular version to its previous versio :backlinks: none :depth: 1 :local: + +From 30 to 29 +""""""""""""" +.. code-block:: plpgsql + + DROP FUNCTION hdb_catalog.check_violation(); From 27 to 26 """"""""""""" diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index e44a44b85015e..b3e008187aad1 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -280,7 +280,6 @@ library , Hasura.RQL.DDL.Relationship , Hasura.RQL.DDL.Deps , Hasura.RQL.DDL.Permission.Internal - , Hasura.RQL.DDL.Permission.Triggers , Hasura.RQL.DDL.Permission , Hasura.RQL.DDL.Relationship.Rename , Hasura.RQL.DDL.Relationship.Types diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs index e389cb01bd833..d1024f3b68cbb 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs @@ -29,7 +29,10 @@ import Hasura.GraphQL.Resolve.Mutation import Hasura.GraphQL.Resolve.Select import Hasura.GraphQL.Validate.Field import Hasura.GraphQL.Validate.Types -import Hasura.RQL.DML.Internal (convPartialSQLExp, dmlTxErrorHandler, +import Hasura.RQL.DML.Insert (insertCheckExpr) +import Hasura.RQL.DML.Internal (convPartialSQLExp, + convAnnBoolExpPartialSQL, + dmlTxErrorHandler, sessVarFromCurrentSetting) import Hasura.RQL.DML.Mutation import Hasura.RQL.GBoolExp (toSQLBoolExp) @@ -47,7 +50,7 @@ data AnnIns a = AnnIns { _aiInsObj :: !a , _aiConflictClause :: !(Maybe RI.ConflictClauseP1) - , _aiView :: !QualifiedTable + , _aiCheckCond :: AnnBoolExpPartialSQL , _aiTableCols :: ![PGColumnInfo] , _aiDefVals :: !(Map.HashMap PGCol S.SQLExp) } deriving (Show, Eq, Functor, Foldable, Traversable) @@ -131,7 +134,7 @@ traverseInsObj rim allColMap (gName, annVal) defVal@(AnnInsObj cols objRels arrR throw500 $ "relation " <> relName <<> " not found" let rTable = riRTable relInfo - InsCtx rtView rtColMap rtDefVals rtRelInfoMap rtUpdPerm <- getInsCtx rTable + InsCtx rtColMap checkCond rtDefVals rtRelInfoMap rtUpdPerm <- getInsCtx rTable let rtCols = Map.elems rtColMap rtDefValsRes <- mapM (convPartialSQLExp sessVarFromCurrentSetting) rtDefVals @@ -140,7 +143,7 @@ traverseInsObj rim allColMap (gName, annVal) defVal@(AnnInsObj cols objRels arrR dataObj <- asObject dataVal annDataObj <- mkAnnInsObj rtRelInfoMap rtColMap dataObj ccM <- forM onConflictM $ parseOnConflict rTable rtUpdPerm rtColMap - let singleObjIns = AnnIns annDataObj ccM rtView rtCols rtDefValsRes + let singleObjIns = AnnIns annDataObj ccM checkCond rtCols rtDefValsRes objRelIns = RelIns singleObjIns relInfo return (AnnInsObj cols (objRelIns:objRels) arrRels) @@ -151,8 +154,7 @@ traverseInsObj rim allColMap (gName, annVal) defVal@(AnnInsObj cols objRels arrR dataObj <- asObject arrDataVal mkAnnInsObj rtRelInfoMap rtColMap dataObj ccM <- forM onConflictM $ parseOnConflict rTable rtUpdPerm rtColMap - let multiObjIns = AnnIns annDataObjs ccM rtView - rtCols rtDefValsRes + let multiObjIns = AnnIns annDataObjs ccM checkCond rtCols rtDefValsRes arrRelIns = RelIns multiObjIns relInfo return (AnnInsObj cols objRels (arrRelIns:arrRels)) -- if array relation insert input data has empty objects @@ -224,20 +226,26 @@ mkInsertQ -> [PGColWithValue] -> Map.HashMap PGCol S.SQLExp -> RoleName - -> m (CTEExp, Maybe RI.ConflictCtx) -mkInsertQ vn onConflictM insCols defVals role = do + -> AnnBoolExpSQL + -> m CTEExp +mkInsertQ tn onConflictM insCols defVals role checkCond = do (givenCols, args) <- flip runStateT Seq.Empty $ toSQLExps insCols let sqlConflict = RI.toSQLConflict <$> onConflictM sqlExps = mkSQLRow defVals givenCols valueExp = S.ValuesExp [S.TupleExp sqlExps] tableCols = Map.keys defVals sqlInsert = - S.SQLInsert vn tableCols valueExp sqlConflict $ Just S.returningStar - adminIns = return (CTEExp (S.CTEInsert sqlInsert) args, Nothing) + S.SQLInsert tn tableCols valueExp sqlConflict + . Just + $ S.RetExp + [ S.selectStar + , insertCheckExpr (toSQLBoolExp (S.QualTable tn) checkCond) + ] + + adminIns = return (CTEExp (S.CTEInsert sqlInsert) args) nonAdminInsert = do - ccM <- mapM RI.extractConflictCtx onConflictM - let cteIns = S.CTEInsert sqlInsert{S.siConflict=Nothing} - return (CTEExp cteIns args, ccM) + let cteIns = S.CTEInsert sqlInsert + return (CTEExp cteIns args) bool nonAdminInsert adminIns $ isAdmin role @@ -400,10 +408,11 @@ insertObj strfyNum role tn singleObjIns addCols = do finalInsCols = cols <> objRelDeterminedCols <> addCols -- prepare insert query as with expression - (CTEExp cte insPArgs, ccM) <- - mkInsertQ vn onConflictM finalInsCols defVals role + checkExpr <- convAnnBoolExpPartialSQL sessVarFromCurrentSetting checkCond + + CTEExp cte insPArgs <- + mkInsertQ tn onConflictM finalInsCols defVals role checkExpr - RI.setConflictCtx ccM MutateResp affRows colVals <- mutateAndFetchCols tn allCols (cte, insPArgs) strfyNum colValM <- asSingleObject colVals cteExp <- mkSelCTE tn allCols colValM @@ -413,7 +422,7 @@ insertObj strfyNum role tn singleObjIns addCols = do return (totAffRows, cteExp) where - AnnIns annObj onConflictM vn allCols defVals = singleObjIns + AnnIns annObj onConflictM checkCond allCols defVals = singleObjIns AnnInsObj cols objRels arrRels = annObj arrRelDepCols = flip getColInfos allCols $ @@ -445,7 +454,7 @@ insertMultipleObjects insertMultipleObjects strfyNum role tn multiObjIns addCols mutFlds errP = bool withoutRelsInsert withRelsInsert anyRelsToInsert where - AnnIns insObjs onConflictM vn tableColInfos defVals = multiObjIns + AnnIns insObjs onConflictM checkCond tableColInfos defVals = multiObjIns singleObjInserts = multiToSingles multiObjIns insCols = map _aioColumns insObjs allInsObjRels = concatMap _aioObjRels insObjs @@ -465,10 +474,13 @@ insertMultipleObjects strfyNum role tn multiObjIns addCols mutFlds errP = (sqlRows, prepArgs) <- flip runStateT Seq.Empty $ do rowsWithCol <- mapM toSQLExps withAddCols return $ map (mkSQLRow defVals) rowsWithCol - - let insQP1 = RI.InsertQueryP1 tn vn tableCols sqlRows onConflictM mutFlds tableColInfos + + checkExpr <- convAnnBoolExpPartialSQL sessVarFromCurrentSetting checkCond + + let insQP1 = RI.InsertQueryP1 tn tableCols sqlRows onConflictM + (Just checkExpr) mutFlds tableColInfos p1 = (insQP1, prepArgs) - bool (RI.nonAdminInsert strfyNum p1) (RI.insertP2 strfyNum p1) $ isAdmin role + RI.insertP2 strfyNum p1 -- insert each object with relations withRelsInsert = withErrPath $ do @@ -513,14 +525,14 @@ convertInsert role tn fld = prefixErrPath fld $ do (withEmptyObjs mutFldsRes) $ null annVals where withNonEmptyObjs annVals mutFlds = do - InsCtx vn tableColMap defValMap relInfoMap updPerm <- getInsCtx tn + InsCtx tableColMap checkCond defValMap relInfoMap updPerm <- getInsCtx tn annObjs <- mapM asObject annVals annInsObjs <- forM annObjs $ mkAnnInsObj relInfoMap tableColMap conflictClauseM <- forM onConflictM $ parseOnConflict tn updPerm tableColMap defValMapRes <- mapM (convPartialSQLExp sessVarFromCurrentSetting) defValMap - let multiObjIns = AnnIns annInsObjs conflictClauseM - vn tableCols defValMapRes + let multiObjIns = AnnIns annInsObjs conflictClauseM checkCond + tableCols defValMapRes tableCols = Map.elems tableColMap strfyNum <- stringifyNum <$> asks getter return $ prefixErrPath fld $ insertMultipleObjects strfyNum role tn diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs index 1d0d14ec9bc05..07ba9f4d5e26f 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs @@ -179,8 +179,8 @@ data UpdPermForIns data InsCtx = InsCtx - { icView :: !QualifiedTable - , icAllCols :: !PGColGNameMap + { icAllCols :: !PGColGNameMap + , icCheck :: !AnnBoolExpPartialSQL , icSet :: !PreSetColsPartial , icRelations :: !RelationInfoMap , icUpdPerm :: !(Maybe UpdPermForIns) diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs index 09f1b3e1c0eb3..099eaf28f396d 100644 --- a/server/src-lib/Hasura/GraphQL/Schema.hs +++ b/server/src-lib/Hasura/GraphQL/Schema.hs @@ -509,13 +509,13 @@ mkInsCtx role tableCache fields insPermInfo updPermM = do isInsertable insPermM viewInfoM && isValidRel relName remoteTable let relInfoMap = Map.fromList $ catMaybes relTupsM - return $ InsCtx iView gNamePGColMap setCols relInfoMap updPermForIns + return $ InsCtx gNamePGColMap checkCond setCols relInfoMap updPermForIns where gNamePGColMap = mkPGColGNameMap allCols allCols = getCols fields rels = getValidRels fields - iView = ipiView insPermInfo setCols = ipiSet insPermInfo + checkCond = ipiCheck insPermInfo updPermForIns = mkUpdPermForIns <$> updPermM mkUpdPermForIns upi = UpdPermForIns (toList $ upiCols upi) (upiFilter upi) (upiSet upi) @@ -525,11 +525,10 @@ mkInsCtx role tableCache fields insPermInfo updPermM = do mkAdminInsCtx :: MonadError QErr m - => QualifiedTable - -> TableCache + => TableCache -> FieldInfoMap FieldInfo -> m InsCtx -mkAdminInsCtx tn tc fields = do +mkAdminInsCtx tc fields = do relTupsM <- forM rels $ \relInfo -> do let remoteTable = riRTable relInfo relName = riName relInfo @@ -541,7 +540,7 @@ mkAdminInsCtx tn tc fields = do let relInfoMap = Map.fromList $ catMaybes relTupsM updPerm = UpdPermForIns updCols noFilter Map.empty - return $ InsCtx tn colGNameMap Map.empty relInfoMap (Just updPerm) + return $ InsCtx colGNameMap noFilter Map.empty relInfoMap (Just updPerm) where allCols = getCols fields colGNameMap = mkPGColGNameMap allCols @@ -667,7 +666,7 @@ mkGCtxMapTable tableCache funcCache tabInfo = do m <- flip Map.traverseWithKey rolePerms $ mkGCtxRole tableCache tn descM fields primaryKey validConstraints tabFuncs viewInfo customConfig - adminInsCtx <- mkAdminInsCtx tn tableCache fields + adminInsCtx <- mkAdminInsCtx tableCache fields adminSelFlds <- mkAdminSelFlds fields tableCache let adminCtx = mkGCtxRole' tn descM (Just (cols, icRelations adminInsCtx)) (Just (True, adminSelFlds)) (Just cols) (Just ()) diff --git a/server/src-lib/Hasura/RQL/DDL/Permission.hs b/server/src-lib/Hasura/RQL/DDL/Permission.hs index 4b26a912320b7..b6e3e9abfaf2d 100644 --- a/server/src-lib/Hasura/RQL/DDL/Permission.hs +++ b/server/src-lib/Hasura/RQL/DDL/Permission.hs @@ -7,37 +7,26 @@ module Hasura.RQL.DDL.Permission , InsPerm(..) , InsPermDef , CreateInsPerm - , clearInsInfra - , buildInsInfra , buildInsPermInfo - , DropInsPerm - , dropInsPermP2 , SelPerm(..) , SelPermDef , CreateSelPerm , buildSelPermInfo - , DropSelPerm - , dropSelPermP2 , UpdPerm(..) , UpdPermDef , CreateUpdPerm , buildUpdPermInfo - , DropUpdPerm - , dropUpdPermP2 , DelPerm(..) , DelPermDef , CreateDelPerm , buildDelPermInfo - , DropDelPerm - , dropDelPermP2 , IsPerm(..) , addPermP2 - , dropView , DropPerm , runDropPerm @@ -51,21 +40,17 @@ import Hasura.EncJSON import Hasura.Incremental (Cacheable) import Hasura.Prelude import Hasura.RQL.DDL.Permission.Internal -import Hasura.RQL.DDL.Permission.Triggers import Hasura.RQL.DML.Internal hiding (askPermInfo) -import Hasura.RQL.GBoolExp import Hasura.RQL.Types import Hasura.SQL.Types import qualified Database.PG.Query as Q -import qualified Hasura.SQL.DML as S import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH import Language.Haskell.TH.Syntax (Lift) -import qualified Crypto.Hash as CH import qualified Data.HashMap.Strict as HM import qualified Data.HashSet as HS import qualified Data.Text as T @@ -83,31 +68,6 @@ $(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''InsPerm) type InsPermDef = PermDef InsPerm type CreateInsPerm = CreatePerm InsPerm -buildViewName :: QualifiedTable -> RoleName -> PermType -> QualifiedTable -buildViewName qt rn pt = QualifiedObject hdbViewsSchema tableName - where - -- Generate a unique hash for view name from role name, permission type and qualified table. - -- See Note [Postgres identifier length limitations]. - -- Black2b_224 generates 56 character hash. See Note [Blake2b faster than SHA-256]. - -- Refer https://github.com/hasura/graphql-engine/issues/3444. - tableName = TableName $ T.pack $ show hash - hash :: CH.Digest CH.Blake2b_224 = - CH.hash $ txtToBs $ roleNameToTxt rn <> "__" <> T.pack (show pt) <> "__" <> qualObjectToText qt - -buildView :: QualifiedTable -> QualifiedTable -> Q.Query -buildView tn vn = - Q.fromBuilder $ mconcat - [ "CREATE VIEW " <> toSQL vn - , " AS SELECT * FROM " <> toSQL tn - ] - -dropView :: QualifiedTable -> Q.Tx () -dropView vn = - Q.unitQ dropViewS () False - where - dropViewS = Q.fromBuilder $ - "DROP VIEW IF EXISTS " <> toSQL vn - procSetObj :: (QErrM m) => QualifiedTable @@ -136,7 +96,7 @@ buildInsPermInfo -> FieldInfoMap FieldInfo -> PermDef InsPerm -> m (WithDeps InsPermInfo) -buildInsPermInfo tn fieldInfoMap (PermDef rn (InsPerm chk set mCols) _) = +buildInsPermInfo tn fieldInfoMap (PermDef _rn (InsPerm chk set mCols) _) = withPathK "permission" $ do (be, beDeps) <- withPathK "check" $ procBoolExp tn fieldInfoMap chk (setColsSQL, setHdrs, setColDeps) <- procSetObj tn fieldInfoMap set @@ -147,56 +107,19 @@ buildInsPermInfo tn fieldInfoMap (PermDef rn (InsPerm chk set mCols) _) = insColDeps = map (mkColDep DRUntyped tn) insCols deps = mkParentDep tn : beDeps ++ setColDeps ++ insColDeps insColsWithoutPresets = insCols \\ HM.keys setColsSQL - return (InsPermInfo (HS.fromList insColsWithoutPresets) vn be setColsSQL reqHdrs, deps) + return (InsPermInfo (HS.fromList insColsWithoutPresets) be setColsSQL reqHdrs, deps) where - vn = buildViewName tn rn PTInsert allCols = map pgiColumn $ getCols fieldInfoMap insCols = fromMaybe allCols $ convColSpec fieldInfoMap <$> mCols -buildInsInfra :: QualifiedTable -> InsPermInfo -> Q.TxE QErr () -buildInsInfra tn (InsPermInfo _ vn be _ _) = do - resolvedBoolExp <- {-# SCC "buildInsInfra/convAnnBoolExpPartialSQL" #-} convAnnBoolExpPartialSQL sessVarFromCurrentSetting be - let trigFnQ = {-# SCC "buildInsInfra/buildInsTrigFn" #-} buildInsTrigFn vn tn $ toSQLBoolExp (S.QualVar "NEW") resolvedBoolExp - {-# SCC "buildInsInfra/execute" #-} Q.catchE defaultTxErrorHandler $ do - -- Create the view - dropView vn - Q.unitQ (buildView tn vn) () False - -- Inject defaults on the view - Q.discardQ (injectDefaults vn tn) () False - -- Construct a trigger function - Q.unitQ trigFnQ () False - -- Add trigger for check expression - Q.unitQ (buildInsTrig vn) () False - -clearInsInfra :: QualifiedTable -> Q.TxE QErr () -clearInsInfra vn = - Q.catchE defaultTxErrorHandler $ do - dropView vn - Q.unitQ (dropInsTrigFn vn) () False - -type DropInsPerm = DropPerm InsPerm - -dropInsPermP2 :: (MonadTx m) => DropInsPerm -> QualifiedTable -> m () -dropInsPermP2 = dropPermP2 - type instance PermInfo InsPerm = InsPermInfo instance IsPerm InsPerm where - type DropPermP1Res InsPerm = QualifiedTable - permAccessor = PAInsert buildPermInfo = buildInsPermInfo - addPermP2Setup qt _ = liftTx . buildInsInfra qt - - buildDropPermP1Res dp = - ipiView <$> dropPermP1 dp - - dropPermP2Setup _ vn = - liftTx $ clearInsInfra vn - -- Select constraint data SelPerm = SelPerm @@ -263,29 +186,16 @@ buildSelPermInfo tn fieldInfoMap sp = withPathK "permission" $ do type SelPermDef = PermDef SelPerm type CreateSelPerm = CreatePerm SelPerm -type DropSelPerm = DropPerm SelPerm type instance PermInfo SelPerm = SelPermInfo -dropSelPermP2 :: (MonadTx m) => DropSelPerm -> m () -dropSelPermP2 dp = dropPermP2 dp () - instance IsPerm SelPerm where - type DropPermP1Res SelPerm = () - permAccessor = PASelect buildPermInfo tn fieldInfoMap (PermDef _ a _) = buildSelPermInfo tn fieldInfoMap a - buildDropPermP1Res = - void . dropPermP1 - - addPermP2Setup _ _ _ = return () - - dropPermP2Setup _ _ = return () - -- Update constraint data UpdPerm = UpdPerm @@ -330,27 +240,13 @@ buildUpdPermInfo tn fieldInfoMap (UpdPerm colSpec set fltr) = do type instance PermInfo UpdPerm = UpdPermInfo -type DropUpdPerm = DropPerm UpdPerm - -dropUpdPermP2 :: (MonadTx m) => DropUpdPerm -> m () -dropUpdPermP2 dp = dropPermP2 dp () - instance IsPerm UpdPerm where - type DropPermP1Res UpdPerm = () - permAccessor = PAUpdate buildPermInfo tn fieldInfoMap (PermDef _ a _) = buildUpdPermInfo tn fieldInfoMap a - addPermP2Setup _ _ _ = return () - - buildDropPermP1Res = - void . dropPermP1 - - dropPermP2Setup _ _ = return () - -- Delete permission data DelPerm = DelPerm { dcFilter :: !BoolExp } @@ -374,29 +270,15 @@ buildDelPermInfo tn fieldInfoMap (DelPerm fltr) = do depHeaders = getDependentHeaders fltr return (DelPermInfo tn be depHeaders, deps) -type DropDelPerm = DropPerm DelPerm - -dropDelPermP2 :: (MonadTx m) => DropDelPerm -> m () -dropDelPermP2 dp = dropPermP2 dp () - type instance PermInfo DelPerm = DelPermInfo instance IsPerm DelPerm where - type DropPermP1Res DelPerm = () - permAccessor = PADelete buildPermInfo tn fieldInfoMap (PermDef _ a _) = buildDelPermInfo tn fieldInfoMap a - addPermP2Setup _ _ _ = return () - - buildDropPermP1Res = - void . dropPermP1 - - dropPermP2Setup _ _ = return () - data SetPermComment = SetPermComment { apTable :: !QualifiedTable @@ -443,13 +325,13 @@ setPermCommentTx (SetPermComment (QualifiedObject sn tn) rn pt comment) = AND perm_type = $5 |] (comment, sn, tn, rn, permTypeToCode pt) True -purgePerm :: (MonadTx m) => QualifiedTable -> RoleName -> PermType -> m () -purgePerm qt rn pt = - case pt of - PTInsert -> dropInsPermP2 dp $ buildViewName qt rn PTInsert - PTSelect -> dropSelPermP2 dp - PTUpdate -> dropUpdPermP2 dp - PTDelete -> dropDelPermP2 dp +purgePerm :: MonadTx m => QualifiedTable -> RoleName -> PermType -> m () +purgePerm qt rn pt = + case pt of + PTInsert -> dropPermP2 @InsPerm dp + PTSelect -> dropPermP2 @SelPerm dp + PTUpdate -> dropPermP2 @UpdPerm dp + PTDelete -> dropPermP2 @DelPerm dp where dp :: DropPerm a dp = DropPerm qt rn diff --git a/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs b/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs index 10b4d25a875c2..1b5d195de2cdb 100644 --- a/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs +++ b/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs @@ -239,8 +239,6 @@ type family PermInfo a = r | r -> a class (ToJSON a) => IsPerm a where - type DropPermP1Res a - permAccessor :: PermAccessor (PermInfo a) @@ -251,16 +249,6 @@ class (ToJSON a) => IsPerm a where -> PermDef a -> m (WithDeps (PermInfo a)) - addPermP2Setup - :: (MonadTx m) => QualifiedTable -> PermDef a -> PermInfo a -> m () - - buildDropPermP1Res - :: (QErrM m, CacheRM m, UserInfoM m) - => DropPerm a - -> m (DropPermP1Res a) - - dropPermP2Setup :: (MonadTx m) => DropPerm a -> DropPermP1Res a -> m () - getPermAcc1 :: PermDef a -> PermAccessor (PermInfo a) getPermAcc1 _ = permAccessor @@ -268,7 +256,7 @@ class (ToJSON a) => IsPerm a where getPermAcc2 :: DropPerm a -> PermAccessor (PermInfo a) getPermAcc2 _ = permAccessor - + addPermP2 :: (IsPerm a, MonadTx m, HasSystemDefined m) => QualifiedTable -> PermDef a -> m () addPermP2 tn pd = do let pt = permAccToType $ getPermAcc1 pd @@ -291,9 +279,8 @@ dropPermP1 dp@(DropPerm tn rn) = do tabInfo <- askTabInfo tn askPermInfo tabInfo rn $ getPermAcc2 dp -dropPermP2 :: (MonadTx m, IsPerm a) => DropPerm a -> DropPermP1Res a -> m () -dropPermP2 dp@(DropPerm tn rn) p1Res = do - dropPermP2Setup dp p1Res +dropPermP2 :: forall a m. (MonadTx m, IsPerm a) => DropPerm a -> m () +dropPermP2 dp@(DropPerm tn rn) = do liftTx $ dropPermFromCatalog tn rn pt where pa = getPermAcc2 dp @@ -303,7 +290,7 @@ runDropPerm :: (IsPerm a, UserInfoM m, CacheRWM m, MonadTx m) => DropPerm a -> m EncJSON runDropPerm defn = do - permInfo <- buildDropPermP1Res defn - dropPermP2 defn permInfo + dropPermP1 defn + dropPermP2 defn withNewInconsistentObjsCheck buildSchemaCache return successMsg diff --git a/server/src-lib/Hasura/RQL/DDL/Permission/Triggers.hs b/server/src-lib/Hasura/RQL/DDL/Permission/Triggers.hs deleted file mode 100644 index 63d2be11c3490..0000000000000 --- a/server/src-lib/Hasura/RQL/DDL/Permission/Triggers.hs +++ /dev/null @@ -1,34 +0,0 @@ -module Hasura.RQL.DDL.Permission.Triggers - ( buildInsTrig - , dropInsTrigFn - , buildInsTrigFn - ) where - -import Hasura.Prelude -import Hasura.SQL.Types - -import qualified Database.PG.Query as Q -import qualified Hasura.SQL.DML as S - -import qualified Data.Text.Lazy as TL -import qualified Text.Shakespeare.Text as ST - -buildInsTrig :: QualifiedTable -> Q.Query -buildInsTrig qt@(QualifiedObject _ tn) = - Q.fromBuilder $ mconcat - [ "CREATE TRIGGER " <> toSQL tn - , " INSTEAD OF INSERT ON " <> toSQL qt - , " FOR EACH ROW EXECUTE PROCEDURE " - , toSQL qt <> "();" - ] - -dropInsTrigFn :: QualifiedTable -> Q.Query -dropInsTrigFn fn = - Q.fromBuilder $ "DROP FUNCTION " <> toSQL fn <> "()" - -buildInsTrigFn :: QualifiedTable -> QualifiedTable -> S.BoolExp -> Q.Query -buildInsTrigFn fn tn be = Q.fromText . TL.toStrict $ - let functionName = toSQLTxt fn - tableName = toSQLTxt tn - checkExpression = toSQLTxt be - in $(ST.stextFile "src-rsr/insert_trigger.sql.shakespeare") diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Permission.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Permission.hs index e8b8c5c6e687a..0d546f2da0eaa 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Permission.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Permission.hs @@ -25,7 +25,7 @@ import Hasura.SQL.Types buildTablePermissions :: ( ArrowChoice arr, Inc.ArrowDistribute arr, Inc.ArrowCache m arr - , ArrowWriter (Seq CollectedInfo) arr, MonadTx m, MonadReader BuildReason m ) + , ArrowWriter (Seq CollectedInfo) arr, MonadTx m ) => ( Inc.Dependency TableCoreCache , QualifiedTable , FieldInfoMap FieldInfo @@ -80,8 +80,9 @@ withPermission f = proc (e, (permission, s)) -> do buildPermission :: ( ArrowChoice arr, Inc.ArrowCache m arr - , ArrowWriter (Seq CollectedInfo) arr, MonadTx m, MonadReader BuildReason m - , Inc.Cacheable a, IsPerm a, FromJSON a, Inc.Cacheable (PermInfo a) ) + , ArrowWriter (Seq CollectedInfo) arr + , MonadTx m, IsPerm a, FromJSON a + ) => ( Inc.Dependency TableCoreCache , QualifiedTable , FieldInfoMap FieldInfo @@ -98,16 +99,6 @@ buildPermission = Inc.cache proc (tableCache, tableName, tableFields, permission (info, dependencies) <- liftEitherA <<< Inc.bindDepend -< runExceptT $ runTableCoreCacheRT (buildPermInfo tableName tableFields permDef) tableCache tellA -< Seq.fromList dependencies - rebuildViewsIfNeeded -< (tableName, permDef, info) returnA -< info) |) permission) |) >-> (\info -> join info >- returnA) - -rebuildViewsIfNeeded - :: ( Inc.ArrowCache m arr, MonadTx m, MonadReader BuildReason m - , Inc.Cacheable a, IsPerm a, Inc.Cacheable (PermInfo a) ) - => (QualifiedTable, PermDef a, PermInfo a) `arr` () -rebuildViewsIfNeeded = Inc.cache $ arrM \(tableName, permDef, info) -> do - buildReason <- ask - when (buildReason == CatalogUpdate) $ - addPermP2Setup tableName permDef info diff --git a/server/src-lib/Hasura/RQL/DML/Count.hs b/server/src-lib/Hasura/RQL/DML/Count.hs index 0c720e5d14d94..a9ed370573517 100644 --- a/server/src-lib/Hasura/RQL/DML/Count.hs +++ b/server/src-lib/Hasura/RQL/DML/Count.hs @@ -56,7 +56,7 @@ mkSQLCount (CountQueryP1 tn (permFltr, mWc) mDistCols) = , S.selExtr = extrs } Nothing -> S.mkSelect - { S.selExtr = [S.Extractor S.SEStar Nothing] } + { S.selExtr = [S.Extractor (S.SEStar Nothing) Nothing] } -- SELECT count(*) FROM (SELECT DISTINCT c1, .. cn FROM .. WHERE ..) r; -- SELECT count(*) FROM (SELECT * FROM .. WHERE ..) r; diff --git a/server/src-lib/Hasura/RQL/DML/Insert.hs b/server/src-lib/Hasura/RQL/DML/Insert.hs index 65c729d0b66ce..e4f1446bc7397 100644 --- a/server/src-lib/Hasura/RQL/DML/Insert.hs +++ b/server/src-lib/Hasura/RQL/DML/Insert.hs @@ -3,7 +3,6 @@ module Hasura.RQL.DML.Insert where import Data.Aeson.Types import Instances.TH.Lift () -import qualified Data.Aeson.Extended as J import qualified Data.HashMap.Strict as HM import qualified Data.HashSet as HS import qualified Data.Sequence as DS @@ -33,22 +32,31 @@ data ConflictClauseP1 data InsertQueryP1 = InsertQueryP1 - { iqp1Table :: !QualifiedTable - , iqp1View :: !QualifiedTable - , iqp1Cols :: ![PGCol] - , iqp1Tuples :: ![[S.SQLExp]] - , iqp1Conflict :: !(Maybe ConflictClauseP1) - , iqp1MutFlds :: !MutFlds - , iqp1AllCols :: ![PGColumnInfo] + { iqp1Table :: !QualifiedTable + , iqp1Cols :: ![PGCol] + , iqp1Tuples :: ![[S.SQLExp]] + , iqp1Conflict :: !(Maybe ConflictClauseP1) + , iqp1CheckCond :: !(Maybe AnnBoolExpSQL) + , iqp1MutFlds :: !MutFlds + , iqp1AllCols :: ![PGColumnInfo] } deriving (Show, Eq) mkInsertCTE :: InsertQueryP1 -> S.CTE -mkInsertCTE (InsertQueryP1 _ vn cols vals c _ _) = - S.CTEInsert insert +mkInsertCTE (InsertQueryP1 tn cols vals c checkCond _ _) = + S.CTEInsert insert where tupVals = S.ValuesExp $ map S.TupleExp vals insert = - S.SQLInsert vn cols tupVals (toSQLConflict <$> c) $ Just S.returningStar + S.SQLInsert tn cols tupVals (toSQLConflict <$> c) + . Just + . S.RetExp + $ maybe + [S.selectStar] + (\e -> + [ S.selectStar + , insertCheckExpr (toSQLBoolExp (S.QualTable tn) e) + ]) + checkCond toSQLConflict :: ConflictClauseP1 -> S.SQLConflict toSQLConflict conflict = case conflict of @@ -199,7 +207,6 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRet map pgiColumn $ getCols fieldInfoMap allCols = getCols fieldInfoMap insCols = HM.keys defInsVals - insView = ipiView insPerm resolvedPreSet <- mapM (convPartialSQLExp sessVarBldr) setInsVals @@ -208,16 +215,17 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRet let sqlExps = map snd insTuples inpCols = HS.toList $ HS.fromList $ concatMap fst insTuples + checkExpr <- convAnnBoolExpPartialSQL sessVarFromCurrentSetting (ipiCheck insPerm) + conflictClause <- withPathK "on_conflict" $ forM oC $ \c -> do roleName <- askCurRole unless (isTabUpdatable roleName tableInfo) $ throw400 PermissionDenied $ "upsert is not allowed for role " <> roleName <<> " since update permissions are not defined" buildConflictClause sessVarBldr tableInfo inpCols c - - return $ InsertQueryP1 tableName insView insCols sqlExps - conflictClause mutFlds allCols - + + return $ InsertQueryP1 tableName insCols sqlExps + conflictClause (Just checkExpr) mutFlds allCols where selNecessaryMsg = "; \"returning\" can only be used if the role has " @@ -241,53 +249,38 @@ convInsQ = insertP2 :: Bool -> (InsertQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr EncJSON insertP2 strfyNum (u, p) = - runMutation $ Mutation (iqp1Table u) (insertCTE, p) + runMutation + $ Mutation (iqp1Table u) (insertCTE, p) (iqp1MutFlds u) (iqp1AllCols u) strfyNum where insertCTE = mkInsertCTE u -data ConflictCtx - = CCUpdate !ConstraintName ![PGCol] !PreSetCols !S.BoolExp - | CCDoNothing !(Maybe ConstraintName) - deriving (Show, Eq) - -nonAdminInsert :: Bool -> (InsertQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr EncJSON -nonAdminInsert strfyNum (insQueryP1, args) = do - conflictCtxM <- mapM extractConflictCtx conflictClauseP1 - setConflictCtx conflictCtxM - insertP2 strfyNum (withoutConflictClause, args) - where - withoutConflictClause = insQueryP1{iqp1Conflict=Nothing} - conflictClauseP1 = iqp1Conflict insQueryP1 - -extractConflictCtx :: (MonadError QErr m) => ConflictClauseP1 -> m ConflictCtx -extractConflictCtx cp = - case cp of - (CP1DoNothing mConflictTar) -> do - mConstraintName <- mapM extractConstraintName mConflictTar - return $ CCDoNothing mConstraintName - (CP1Update conflictTar inpCols preSet filtr) -> do - constraintName <- extractConstraintName conflictTar - return $ CCUpdate constraintName inpCols preSet filtr - where - extractConstraintName (CTConstraint cn) = return cn - extractConstraintName _ = throw400 NotSupported - "\"constraint_on\" not supported for non admin insert. use \"constraint\" instead" - -setConflictCtx :: Maybe ConflictCtx -> Q.TxE QErr () -setConflictCtx conflictCtxM = do - let t = maybe "null" conflictCtxToJSON conflictCtxM - setVal = toSQL $ S.SELit t - setVar = "SET LOCAL hasura.conflict_clause = " - q = Q.fromBuilder $ setVar <> setVal - Q.unitQE defaultTxErrorHandler q () False - where - conflictCtxToJSON (CCDoNothing constrM) = - J.encodeToStrictText $ InsertTxConflictCtx CAIgnore constrM Nothing - conflictCtxToJSON (CCUpdate constr updCols preSet filtr) = - J.encodeToStrictText $ InsertTxConflictCtx CAUpdate (Just constr) $ - Just $ toSQLTxt (S.buildUpsertSetExp updCols preSet) - <> " " <> toSQLTxt (S.WhereFrag filtr) +-- | Create an expression which will fail with a check constraint violation error +-- if the condition is not met on any of the inserted rows. +-- +-- The resulting SQL will look something like this: +-- +-- > INSERT INTO +-- > ... +-- > RETURNING +-- > *, +-- > CASE WHEN {cond} +-- > THEN NULL +-- > ELSE hdb_catalog.check_violation('insert check constraint failed') +-- > END +insertCheckExpr + :: S.BoolExp + -> S.Extractor +insertCheckExpr condExpr = + S.Extractor + (S.SECond condExpr S.SENull + (S.SEFunction + (S.FunctionExp + (QualifiedObject (SchemaName "hdb_catalog") (FunctionName "check_violation")) + (S.FunctionArgs [S.SELit "insert check constraint failed"] mempty) + Nothing) + )) + Nothing runInsert :: (QErrM m, UserInfoM m, CacheRM m, MonadTx m, HasSQLGenCtx m) @@ -295,6 +288,5 @@ runInsert -> m EncJSON runInsert q = do res <- convInsQ q - role <- userRole <$> askUserInfo strfyNum <- stringifyNum <$> askSQLGenCtx - liftTx $ bool (nonAdminInsert strfyNum res) (insertP2 strfyNum res) $ isAdmin role + liftTx $ insertP2 strfyNum res diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs index a5900e5115a9d..3015d58635b6a 100644 --- a/server/src-lib/Hasura/RQL/DML/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Internal.hs @@ -38,7 +38,7 @@ mkAdminRolePermInfo ti = getComputedFieldInfos fields tn = _tciName ti - i = InsPermInfo (HS.fromList pgCols) tn annBoolExpTrue M.empty [] + i = InsPermInfo (HS.fromList pgCols) annBoolExpTrue M.empty [] s = SelPermInfo (HS.fromList pgCols) (HS.fromList scalarComputedFields) tn annBoolExpTrue Nothing True [] u = UpdPermInfo (HS.fromList pgCols) tn annBoolExpTrue M.empty [] diff --git a/server/src-lib/Hasura/RQL/DML/Select/Internal.hs b/server/src-lib/Hasura/RQL/DML/Select/Internal.hs index 14243f2a8a145..a5503b8ec0ea5 100644 --- a/server/src-lib/Hasura/RQL/DML/Select/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Select/Internal.hs @@ -705,7 +705,7 @@ baseNodeToSel joinCond baseNode = = baseNode -- this is the table which is aliased as "pfx.base" baseSel = S.mkSelect - { S.selExtr = [S.Extractor S.SEStar Nothing] + { S.selExtr = [S.Extractor (S.SEStar Nothing) Nothing] , S.selFrom = Just $ S.FromExp [fromItem] , S.selWhere = Just $ injectJoinCond joinCond whr } diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index 001e02455fc95..4266a058f7aaf 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -208,7 +208,6 @@ isPGColInfo _ = False data InsPermInfo = InsPermInfo { ipiCols :: !(HS.HashSet PGCol) - , ipiView :: !QualifiedTable , ipiCheck :: !AnnBoolExpPartialSQL , ipiSet :: !PreSetColsPartial , ipiRequiredHeaders :: ![T.Text] diff --git a/server/src-lib/Hasura/SQL/DML.hs b/server/src-lib/Hasura/SQL/DML.hs index f888ff672df15..01314b6785aba 100644 --- a/server/src-lib/Hasura/SQL/DML.hs +++ b/server/src-lib/Hasura/SQL/DML.hs @@ -283,7 +283,8 @@ data SQLExp | SELit !T.Text | SEUnsafe !T.Text | SESelect !Select - | SEStar + | SEStar !(Maybe Qual) + -- ^ all fields (@*@) or all fields from relation (@iden.*@) | SEIden !Iden -- iden and row identifier are distinguished for easier rewrite rules | SERowIden !Iden @@ -336,8 +337,10 @@ instance ToSQL SQLExp where TB.text t toSQL (SESelect se) = paren $ toSQL se - toSQL SEStar = + toSQL (SEStar Nothing) = TB.char '*' + toSQL (SEStar (Just qual)) = + mconcat [toSQL qual, TB.char '.', TB.char '*'] toSQL (SEIden iden) = toSQL iden toSQL (SERowIden iden) = @@ -725,7 +728,10 @@ newtype RetExp = RetExp [Extractor] deriving (Show, Eq) selectStar :: Extractor -selectStar = Extractor SEStar Nothing +selectStar = Extractor (SEStar Nothing) Nothing + +selectStar' :: Qual -> Extractor +selectStar' q = Extractor (SEStar (Just q)) Nothing returningStar :: RetExp returningStar = RetExp [selectStar] @@ -804,15 +810,14 @@ data SQLInsert = SQLInsert instance ToSQL SQLInsert where toSQL si = - let insConflict = maybe "" toSQL - in "INSERT INTO" - <-> toSQL (siTable si) - <-> "(" - <-> (", " <+> siCols si) - <-> ")" - <-> toSQL (siValues si) - <-> insConflict (siConflict si) - <-> toSQL (siRet si) + "INSERT INTO" + <-> toSQL (siTable si) + <-> "(" + <-> (", " <+> siCols si) + <-> ")" + <-> toSQL (siValues si) + <-> maybe "" toSQL (siConflict si) + <-> toSQL (siRet si) data CTE = CTESelect !Select diff --git a/server/src-lib/Hasura/SQL/Rewrite.hs b/server/src-lib/Hasura/SQL/Rewrite.hs index 1eb1a43b51428..eebe38d54e27d 100644 --- a/server/src-lib/Hasura/SQL/Rewrite.hs +++ b/server/src-lib/Hasura/SQL/Rewrite.hs @@ -161,7 +161,7 @@ uSqlExp = restoringIdens . \case S.SELit t -> return $ S.SELit t S.SEUnsafe t -> return $ S.SEUnsafe t S.SESelect s -> S.SESelect <$> uSelect s - S.SEStar -> return S.SEStar + S.SEStar qual -> S.SEStar <$> traverse uQual qual -- this is for row expressions -- todo: check if this is always okay S.SEIden iden -> return $ S.SEIden iden diff --git a/server/src-lib/Hasura/SQL/Types.hs b/server/src-lib/Hasura/SQL/Types.hs index 24f72d63eb1b0..763a894fb404d 100644 --- a/server/src-lib/Hasura/SQL/Types.hs +++ b/server/src-lib/Hasura/SQL/Types.hs @@ -40,7 +40,6 @@ module Hasura.SQL.Types , SchemaName(..) , publicSchema - , hdbViewsSchema , TableName(..) , FunctionName(..) @@ -239,9 +238,6 @@ newtype SchemaName publicSchema :: SchemaName publicSchema = SchemaName "public" -hdbViewsSchema :: SchemaName -hdbViewsSchema = SchemaName "hdb_views" - instance IsIden SchemaName where toIden (SchemaName t) = Iden t diff --git a/server/src-lib/Hasura/Server/Migrate/Version.hs b/server/src-lib/Hasura/Server/Migrate/Version.hs index 2bc527161e272..6968137765f62 100644 --- a/server/src-lib/Hasura/Server/Migrate/Version.hs +++ b/server/src-lib/Hasura/Server/Migrate/Version.hs @@ -12,7 +12,7 @@ import Hasura.Prelude import qualified Data.Text as T latestCatalogVersion :: Integer -latestCatalogVersion = 29 +latestCatalogVersion = 30 latestCatalogVersionString :: T.Text latestCatalogVersionString = T.pack $ show latestCatalogVersion diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index a640734097c15..033f54aad13a8 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -61,10 +61,10 @@ data RQLQueryV1 | RQCreateUpdatePermission !CreateUpdPerm | RQCreateDeletePermission !CreateDelPerm - | RQDropInsertPermission !DropInsPerm - | RQDropSelectPermission !DropSelPerm - | RQDropUpdatePermission !DropUpdPerm - | RQDropDeletePermission !DropDelPerm + | RQDropInsertPermission !(DropPerm InsPerm) + | RQDropSelectPermission !(DropPerm SelPerm) + | RQDropUpdatePermission !(DropPerm UpdPerm) + | RQDropDeletePermission !(DropPerm DelPerm) | RQSetPermissionComment !SetPermComment | RQGetInconsistentMetadata !GetInconsistentMetadata diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index 940fea2174580..23dd2ea8a0e2c 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -656,3 +656,10 @@ CREATE VIEW hdb_catalog.hdb_computed_field_function AS END AS function_schema FROM hdb_catalog.hdb_computed_field ); + +CREATE OR REPLACE FUNCTION hdb_catalog.check_violation(msg text) RETURNS bool AS +$$ + BEGIN + RAISE check_violation USING message=msg; + END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/server/src-rsr/insert_trigger.sql.shakespeare b/server/src-rsr/insert_trigger.sql.shakespeare deleted file mode 100644 index 1e8ce7c8e5250..0000000000000 --- a/server/src-rsr/insert_trigger.sql.shakespeare +++ /dev/null @@ -1,34 +0,0 @@ -CREATE OR REPLACE FUNCTION #{functionName}() RETURNS trigger LANGUAGE plpgsql AS $$ - DECLARE r #{tableName}%ROWTYPE; - DECLARE conflict_clause jsonb; - DECLARE action text; - DECLARE constraint_name text; - DECLARE set_expression text; - BEGIN - conflict_clause = current_setting('hasura.conflict_clause')::jsonb; - IF (#{checkExpression}) THEN - CASE - WHEN conflict_clause = 'null'::jsonb THEN INSERT INTO #{tableName} VALUES (NEW.*) RETURNING * INTO r; - ELSE - action = conflict_clause ->> 'action'; - constraint_name = quote_ident(conflict_clause ->> 'constraint'); - set_expression = conflict_clause ->> 'set_expression'; - IF action is NOT NULL THEN - CASE - WHEN action = 'ignore'::text AND constraint_name IS NULL THEN - INSERT INTO #{tableName} VALUES (NEW.*) ON CONFLICT DO NOTHING RETURNING * INTO r; - WHEN action = 'ignore'::text AND constraint_name is NOT NULL THEN - EXECUTE 'INSERT INTO #{tableName} VALUES ($1.*) ON CONFLICT ON CONSTRAINT ' || constraint_name || - ' DO NOTHING RETURNING *' INTO r USING NEW; - ELSE - EXECUTE 'INSERT INTO #{tableName} VALUES ($1.*) ON CONFLICT ON CONSTRAINT ' || constraint_name || - ' DO UPDATE ' || set_expression || ' RETURNING *' INTO r USING NEW; - END CASE; - ELSE - RAISE internal_error using message = 'action is not found'; RETURN NULL; - END IF; - END CASE; - IF r IS NULL THEN RETURN null; ELSE RETURN r; END IF; - ELSE RAISE check_violation using message = 'insert check constraint failed'; RETURN NULL; - END IF; - END $$; diff --git a/server/src-rsr/migrations/28_to_29.sql b/server/src-rsr/migrations/28_to_29.sql index 3b7d4f449641d..3ea28e21be2c0 100644 --- a/server/src-rsr/migrations/28_to_29.sql +++ b/server/src-rsr/migrations/28_to_29.sql @@ -128,4 +128,4 @@ CREATE VIEW hdb_catalog.hdb_table_info_agg AS ) foreign_key_constraints ON true -- all these identify table-like things - WHERE "table".relkind IN ('r', 't', 'v', 'm', 'f', 'p'); + WHERE "table".relkind IN ('r', 't', 'v', 'm', 'f', 'p'); \ No newline at end of file diff --git a/server/src-rsr/migrations/29_to_30.sql b/server/src-rsr/migrations/29_to_30.sql new file mode 100644 index 0000000000000..4de238a7ef06d --- /dev/null +++ b/server/src-rsr/migrations/29_to_30.sql @@ -0,0 +1,6 @@ +CREATE OR REPLACE FUNCTION hdb_catalog.check_violation(msg text) RETURNS bool AS +$$ + BEGIN + RAISE check_violation USING message=msg; + END; +$$ LANGUAGE plpgsql; \ No newline at end of file From 9c3e9ff4bea7a46e097cbac856650af0c68b78e6 Mon Sep 17 00:00:00 2001 From: Shahidh K Muhammed Date: Thu, 16 Jan 2020 11:35:35 +0530 Subject: [PATCH 3/3] ci: push the latest assets only on stable releases (close #3714) (#3716) [skip ci] --- .circleci/deploy.sh | 45 +++++++++++++++++++++++++++++++++++---------- 1 file changed, 35 insertions(+), 10 deletions(-) diff --git a/.circleci/deploy.sh b/.circleci/deploy.sh index dc3954521c0a6..16b34e71e4b96 100755 --- a/.circleci/deploy.sh +++ b/.circleci/deploy.sh @@ -11,6 +11,17 @@ CHANGELOG_TEXT="" # reviewers for pull requests opened to update installation manifests REVIEWERS="shahidhk,coco98,arvi3411301" +IS_STABLE_RELEASE=false +STABLE_SEMVER_REGEX="^v(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)$" +if [ ! -z "${CIRCLE_TAG}" ]; then + if [[ "$CIRCLE_TAG" =~ $STABLE_SEMVER_REGEX ]]; then + echo + echo "this is a stable release" + echo + IS_STABLE_RELEASE=true + fi +fi + changelog() { CHANGELOG=$(git log ${PREVIOUS_TAG}..${LATEST_TAG} --pretty="tformat:- $1: %s" --reverse -- $ROOT/$1) if [ -n "$CHANGELOG" ] @@ -33,10 +44,10 @@ deploy_server() { } deploy_server_latest() { - echo "deloying server latest tag" - cd "$ROOT/server" - echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USER" --password-stdin - make push-latest + echo "deloying server latest tag" + cd "$ROOT/server" + echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USER" --password-stdin + make push-latest } draft_github_release() { @@ -56,6 +67,7 @@ configure_git() { } send_pr_to_repo() { + configure_git git clone https://github.com/hasura/$1.git ~/$1 cd ~/$1 git checkout -b ${LATEST_TAG} @@ -86,17 +98,23 @@ deploy_console() { # build and push container for auto-migrations build_and_push_cli_migrations_image() { IMAGE_TAG="hasura/graphql-engine:${CIRCLE_TAG}.cli-migrations" - LATEST_IMAGE_TAG="hasura/graphql-engine:latest.cli-migrations" cd "$ROOT/scripts/cli-migrations" cp /build/_cli_output/binaries/cli-hasura-linux-amd64 . docker build -t "$IMAGE_TAG" . docker push "$IMAGE_TAG" +} + +# build and push latest container for auto-migrations +push_latest_cli_migrations_image() { + IMAGE_TAG="hasura/graphql-engine:${CIRCLE_TAG}.cli-migrations" + LATEST_IMAGE_TAG="hasura/graphql-engine:latest.cli-migrations" # push latest.cli-migrations tag docker tag "$IMAGE_TAG" "$LATEST_IMAGE_TAG" docker push "$LATEST_IMAGE_TAG" } + # copy docker-compose-https manifests to gcr for digital ocean one-click app deploy_do_manifests() { gsutil cp "$ROOT/install-manifests/docker-compose-https/docker-compose.yaml" \ @@ -148,9 +166,19 @@ fi deploy_console deploy_server if [[ ! -z "$CIRCLE_TAG" ]]; then - deploy_server_latest - push_server_binary build_and_push_cli_migrations_image + + # if this is a stable release, update all latest assets + if [ $IS_STABLE_RELEASE = true ]; then + deploy_server_latest + push_server_binary + push_latest_cli_migrations_image + send_pr_to_repo graphql-engine-heroku + deploy_do_manifests + fi + + # submit a release draft to github + # build changelog CHANGELOG_TEXT=$(changelog server) CHANGELOG_TEXT+=$(changelog cli) CHANGELOG_TEXT+=$(changelog console) @@ -159,7 +187,4 @@ $(<$ROOT/.circleci/release_notes.template.md) EOF ") draft_github_release - configure_git - send_pr_to_repo graphql-engine-heroku - deploy_do_manifests fi