From 0cc8531de7b9d8a1865fcaf3c249ec15c3635c7a Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Mon, 29 Jul 2024 14:30:10 -0400 Subject: [PATCH 01/47] Fixed 'DEFAULT is not allowed in this context' problem with createRecord when using array columns --- Test/SchemaCompilerSpec.hs | 25 +++++++++++++++++++++++++ ihp-ide/IHP/SchemaCompiler.hs | 15 ++++++++++++--- 2 files changed, 37 insertions(+), 3 deletions(-) diff --git a/Test/SchemaCompilerSpec.hs b/Test/SchemaCompilerSpec.hs index b89b2f137..02c4d7ff5 100644 --- a/Test/SchemaCompilerSpec.hs +++ b/Test/SchemaCompilerSpec.hs @@ -471,6 +471,31 @@ tests = do builder |> QueryBuilder.filterWhere (#id, id) {-# INLINE filterWhereId #-} |] + it "should not use DEFAULT for array columns" do + let statement = StatementCreateTable CreateTable + { name = "users" + , columns = + [ Column "id" PUUID Nothing True True Nothing + , Column {name = "keywords", columnType = PArray PText, defaultValue = Just (VarExpression "NULL"), notNull = False, isUnique = False, generator = Nothing} + ] + , primaryKeyConstraint = PrimaryKeyConstraint ["id"] + , constraints = [] + , unlogged = False + } + let compileOutput = compileStatementPreview [statement] statement |> Text.strip + + getInstanceDecl "CanCreate" compileOutput `shouldBe` [trimming| + instance CanCreate User where + create :: (?modelContext :: ModelContext) => User -> IO User + create model = do + sqlQuerySingleRow "INSERT INTO users (id, keywords) VALUES (?, ? :: TEXT[]) RETURNING id, keywords" ((model.id, model.keywords)) + createMany [] = pure [] + createMany models = do + sqlQuery (Query $ "INSERT INTO users (id, keywords) VALUES " <> (ByteString.intercalate ", " (List.map (\_ -> "(?, ? :: TEXT[])") models)) <> " RETURNING id, keywords") (List.concat $ List.map (\model -> [toField (model.id), toField (model.keywords)]) models) + createRecordDiscardResult :: (?modelContext :: ModelContext) => User -> IO () + createRecordDiscardResult model = do + sqlExecDiscardResult "INSERT INTO users (id, keywords) VALUES (?, ? :: TEXT[])" ((model.id, model.keywords)) + |] describe "compileStatementPreview for table with arbitrarily named primary key" do let statements = parseSqlStatements [trimming| CREATE TABLE things ( diff --git a/ihp-ide/IHP/SchemaCompiler.hs b/ihp-ide/IHP/SchemaCompiler.hs index 260a13fe9..31237b3c8 100644 --- a/ihp-ide/IHP/SchemaCompiler.hs +++ b/ihp-ide/IHP/SchemaCompiler.hs @@ -551,9 +551,18 @@ compileCreate table@(CreateTable { name, columns }) = values = commaSep (map columnPlaceholder writableColumns) toBinding column@(Column { name }) = - if hasExplicitOrImplicitDefault column - then "fieldWithDefault #" <> columnNameToFieldName name <> " model" - else "model." <> columnNameToFieldName name + if hasExplicitOrImplicitDefault column && not isArrayColumn + then "fieldWithDefault #" <> columnNameToFieldName name <> " model" + else "model." <> columnNameToFieldName name + where + -- We cannot use DEFAULT with array columns as postgres will throw an error: + -- + -- > DEFAULT is not allowed in this context + -- + -- To walk around this error, we explicitly specify an empty array. + isArrayColumn = case column.columnType of + PArray _ -> True + _ -> False bindings :: [Text] From 147895e2e74e0fdae788ba446c83f50eb8a74597 Mon Sep 17 00:00:00 2001 From: mokshasoft Date: Tue, 30 Jul 2024 20:51:33 +0200 Subject: [PATCH 02/47] Document how to use the AWS terraform config (#1991) * Document how to use the AWS terraform config * Update Guide/deployment.markdown Co-authored-by: Marc Scholten * Update Guide/deployment.markdown Co-authored-by: Marc Scholten * Update Guide/deployment.markdown Co-authored-by: Marc Scholten * Update Guide/deployment.markdown Co-authored-by: Marc Scholten * Update Guide/deployment.markdown Co-authored-by: Marc Scholten --------- Co-authored-by: Marc Scholten --- Guide/deployment.markdown | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/Guide/deployment.markdown b/Guide/deployment.markdown index 21180735d..03b60b79e 100644 --- a/Guide/deployment.markdown +++ b/Guide/deployment.markdown @@ -12,6 +12,35 @@ AWS EC2 is a good choice for deploying IHP in a professional setup. ### AWS infrastructure preparation +#### Creating infrastructure with Terraform + +The EC2 instance, RDS database, VPS, subnets, security groups, etc, can be setup automatically using [Terraform](https://www.terraform.io/). + +1. Install terraform +2. Setup AWS credentials in `.aws/config` and `.aws/credentials` +3. Go to the folder IaC/aws. If the folder doesn't exist in the repo, get it from [ihp-boilerplate](https://github.com/digitallyinduced/ihp-boilerplate) Run: + ``` + terraform init + ``` +4. Create the file `terraform.tfvars` with the following content: + ``` + prefix = "Project prefix for the resource names" + region = "AWS Region to deploy to" + az_1 = "Availability Zone 1" + az_2 = "Availability Zone 2" + key_name = "The key name of the SSH key-pair" + db_password = "The password for the RDS database" + ``` + - The two AZs are needed to setup the RDS database. + - The SSH key-pair should be created in the AWS web interface. +5. Run: + ``` + terraform apply + ``` +6. Important data like the RDS endpoint and the EC2 instance URL is written to the file `db_info.txt` + +Now the NixOS instance and Postgres database is setup and an SSH conncetion can be established to it. + #### Creating a new EC2 Instance Start a new EC2 instance and use the official NixOS AMI `NixOS-23.05.426.afc48694f2a-x86_64-linux`. You can find the latest NixOS AMI at https://nixos.org/download#nixos-amazon From e974d626dc324c61440a19c9723e2bffcdacfb56 Mon Sep 17 00:00:00 2001 From: Jonas Claeson Date: Wed, 31 Jul 2024 07:38:24 +0200 Subject: [PATCH 03/47] Clarify deploying instructions with terraform --- Guide/deployment.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Guide/deployment.markdown b/Guide/deployment.markdown index 03b60b79e..ba10732c2 100644 --- a/Guide/deployment.markdown +++ b/Guide/deployment.markdown @@ -18,7 +18,7 @@ The EC2 instance, RDS database, VPS, subnets, security groups, etc, can be setup 1. Install terraform 2. Setup AWS credentials in `.aws/config` and `.aws/credentials` -3. Go to the folder IaC/aws. If the folder doesn't exist in the repo, get it from [ihp-boilerplate](https://github.com/digitallyinduced/ihp-boilerplate) Run: +3. Copy the files from the IaC/aws folder from [the branch IaC-aws in ihp-boilerplate](https://github.com/digitallyinduced/ihp-boilerplate/tree/IaC-aws) to you IHP project repo. Run the init command from the IaC/aws folder: ``` terraform init ``` From 040d1b29fe50a74bb38e0ad4828d0d7b9fdbe88c Mon Sep 17 00:00:00 2001 From: Montmorency Date: Thu, 1 Aug 2024 17:07:43 +0100 Subject: [PATCH 04/47] Add appName To options for flake. (#1993) * adding appName to flake options and trying to work around hls issue. * updated appName in mkDerivation. * updated flake. * removing changes to hls from appName PR. * delted pName comments, fixed indentation and spacing around equals sign. --- NixSupport/default.nix | 3 +- flake-module.nix | 10 + flake.lock | 707 +++++++++++++++++++++++++++++++++++++---- 3 files changed, 662 insertions(+), 58 deletions(-) diff --git a/NixSupport/default.nix b/NixSupport/default.nix index 1b1f9b801..30b6cb3f4 100644 --- a/NixSupport/default.nix +++ b/NixSupport/default.nix @@ -11,6 +11,7 @@ , optimized ? false , includeDevTools ? !optimized # Include Postgres? , rtsFlags ? "" +, appName ? "app" , optimizationLevel ? "2" }: @@ -32,7 +33,7 @@ let else "build/bin/RunJobs"; in pkgs.stdenv.mkDerivation { - name = "app"; + name = appName; buildPhase = '' runHook preBuild diff --git a/flake-module.nix b/flake-module.nix index e11b36e9d..2d6457577 100644 --- a/flake-module.nix +++ b/flake-module.nix @@ -52,6 +52,14 @@ ihpFlake: ]; }; + appName = lib.mkOption { + description = '' + The derivation name. + ''; + type = lib.types.str; + default = "app"; + }; + projectPath = lib.mkOption { description = '' Path to the IHP project. You likely want to set this to `./.`. @@ -139,6 +147,7 @@ ihpFlake: pkgs = pkgs; rtsFlags = cfg.rtsFlags; optimizationLevel = cfg.optimizationLevel; + appName = cfg.appName; }; unoptimized-prod-server = import "${ihp}/NixSupport/default.nix" { @@ -152,6 +161,7 @@ ihpFlake: pkgs = pkgs; rtsFlags = cfg.rtsFlags; optimizationLevel = "0"; + appName = cfg.appName; }; unoptimized-docker-image = pkgs.dockerTools.buildImage { diff --git a/flake.lock b/flake.lock index 0b2eb957c..71f7a85f1 100644 --- a/flake.lock +++ b/flake.lock @@ -30,6 +30,42 @@ "type": "github" } }, + "cachix_2": { + "inputs": { + "devenv": "devenv_4", + "flake-compat": [ + "ihp-boilerplate", + "ihp", + "devenv", + "flake-compat" + ], + "nixpkgs": [ + "ihp-boilerplate", + "ihp", + "devenv", + "nixpkgs" + ], + "pre-commit-hooks": [ + "ihp-boilerplate", + "ihp", + "devenv", + "pre-commit-hooks" + ] + }, + "locked": { + "lastModified": 1712055811, + "narHash": "sha256-7FcfMm5A/f02yyzuavJe06zLa9hcMHsagE28ADcmQvk=", + "owner": "cachix", + "repo": "cachix", + "rev": "02e38da89851ec7fec3356a5c04bc8349cae0e30", + "type": "github" + }, + "original": { + "owner": "cachix", + "repo": "cachix", + "type": "github" + } + }, "devenv": { "inputs": { "cachix": "cachix", @@ -88,8 +124,9 @@ }, "devenv_3": { "inputs": { - "flake-compat": "flake-compat_3", - "nix": "nix_3", + "cachix": "cachix_2", + "flake-compat": "flake-compat_4", + "nix": "nix_4", "nixpkgs": [ "ihp-boilerplate", "ihp", @@ -97,6 +134,69 @@ ], "pre-commit-hooks": "pre-commit-hooks_2" }, + "locked": { + "lastModified": 1714390914, + "narHash": "sha256-W5DFIifCjGYJXJzLU3RpqBeqes4zrf0Sr/6rwzTygPU=", + "owner": "cachix", + "repo": "devenv", + "rev": "34e6461fd76b5f51ad5f8214f5cf22c4cd7a196e", + "type": "github" + }, + "original": { + "owner": "cachix", + "ref": "refs/tags/v1.0.5", + "repo": "devenv", + "type": "github" + } + }, + "devenv_4": { + "inputs": { + "flake-compat": [ + "ihp-boilerplate", + "ihp", + "devenv", + "cachix", + "flake-compat" + ], + "nix": "nix_3", + "nixpkgs": "nixpkgs_2", + "poetry2nix": "poetry2nix_2", + "pre-commit-hooks": [ + "ihp-boilerplate", + "ihp", + "devenv", + "cachix", + "pre-commit-hooks" + ] + }, + "locked": { + "lastModified": 1708704632, + "narHash": "sha256-w+dOIW60FKMaHI1q5714CSibk99JfYxm0CzTinYWr+Q=", + "owner": "cachix", + "repo": "devenv", + "rev": "2ee4450b0f4b95a1b90f2eb5ffea98b90e48c196", + "type": "github" + }, + "original": { + "owner": "cachix", + "ref": "python-rewrite", + "repo": "devenv", + "type": "github" + } + }, + "devenv_5": { + "inputs": { + "flake-compat": "flake-compat_5", + "nix": "nix_5", + "nixpkgs": [ + "ihp-boilerplate", + "ihp", + "ihp-boilerplate", + "ihp", + "nixpkgs" + ], + "pre-commit-hooks": "pre-commit-hooks_3" + }, "locked": { "lastModified": 1694422554, "narHash": "sha256-s5NTPzT66yIMmau+ZGP7q9z4NjgceDETL4xZ6HJ/TBg=", @@ -111,18 +211,20 @@ "type": "github" } }, - "devenv_4": { + "devenv_6": { "inputs": { - "flake-compat": "flake-compat_4", - "nix": "nix_4", + "flake-compat": "flake-compat_6", + "nix": "nix_6", "nixpkgs": [ "ihp-boilerplate", "ihp", "ihp-boilerplate", "ihp", + "ihp-boilerplate", + "ihp", "nixpkgs" ], - "pre-commit-hooks": "pre-commit-hooks_3" + "pre-commit-hooks": "pre-commit-hooks_4" }, "locked": { "lastModified": 1686054274, @@ -187,6 +289,38 @@ } }, "flake-compat_4": { + "flake": false, + "locked": { + "lastModified": 1696426674, + "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-compat_5": { + "flake": false, + "locked": { + "lastModified": 1673956053, + "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-compat_6": { "flake": false, "locked": { "lastModified": 1673956053, @@ -208,6 +342,28 @@ "nixpkgs" ] }, + "locked": { + "lastModified": 1719994518, + "narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "flake-parts", + "type": "github" + } + }, + "flake-parts_2": { + "inputs": { + "nixpkgs-lib": [ + "ihp-boilerplate", + "ihp", + "nixpkgs" + ] + }, "locked": { "lastModified": 1714641030, "narHash": "sha256-yzcRNDoyVP7+SCNX0wmuDju1NUCt8Dz9+lyUXEI0dbI=", @@ -222,7 +378,7 @@ "type": "github" } }, - "flake-parts_2": { + "flake-parts_3": { "inputs": { "nixpkgs-lib": "nixpkgs-lib" }, @@ -240,7 +396,7 @@ "type": "github" } }, - "flake-parts_3": { + "flake-parts_4": { "inputs": { "nixpkgs-lib": "nixpkgs-lib_2" }, @@ -298,6 +454,42 @@ "inputs": { "systems": "systems_3" }, + "locked": { + "lastModified": 1689068808, + "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "flake-utils_4": { + "inputs": { + "systems": "systems_4" + }, + "locked": { + "lastModified": 1710146030, + "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "flake-utils_5": { + "inputs": { + "systems": "systems_5" + }, "locked": { "lastModified": 1685518550, "narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=", @@ -312,7 +504,7 @@ "type": "github" } }, - "flake-utils_4": { + "flake-utils_6": { "locked": { "lastModified": 1667395993, "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", @@ -359,6 +551,32 @@ "nixpkgs" ] }, + "locked": { + "lastModified": 1709087332, + "narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=", + "owner": "hercules-ci", + "repo": "gitignore.nix", + "rev": "637db329424fd7e46cf4185293b9cc8c88c95394", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "gitignore.nix", + "type": "github" + } + }, + "gitignore_3": { + "inputs": { + "nixpkgs": [ + "ihp-boilerplate", + "ihp", + "ihp-boilerplate", + "ihp", + "devenv", + "pre-commit-hooks", + "nixpkgs" + ] + }, "locked": { "lastModified": 1660459072, "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", @@ -373,13 +591,15 @@ "type": "github" } }, - "gitignore_3": { + "gitignore_4": { "inputs": { "nixpkgs": [ "ihp-boilerplate", "ihp", "ihp-boilerplate", "ihp", + "ihp-boilerplate", + "ihp", "devenv", "pre-commit-hooks", "nixpkgs" @@ -404,21 +624,21 @@ "devenv": "devenv_3", "flake-parts": "flake-parts_2", "ihp-boilerplate": "ihp-boilerplate_2", - "nix-filter": "nix-filter", - "nixpkgs": "nixpkgs_3", - "systems": "systems_5" + "nix-filter": "nix-filter_2", + "nixpkgs": "nixpkgs_5", + "systems": "systems_8" }, "locked": { - "lastModified": 1700013490, - "narHash": "sha256-oQz7ZBrHe6WwYMwnxxUgnYM55CuH5Oxjz6mrLnYbB7U=", + "lastModified": 1714870134, + "narHash": "sha256-DmaIr9kF+TG24wVNPVufxC74TYMCLziLYS9hCZHBDTc=", "owner": "digitallyinduced", "repo": "ihp", - "rev": "d59a65d71943cb506eee3ad6255f017963237359", + "rev": "29436fd63f11ccad9b10168bba7d14df737ee287", "type": "github" }, "original": { "owner": "digitallyinduced", - "ref": "v1.2", + "ref": "v1.3", "repo": "ihp", "type": "github" } @@ -447,6 +667,52 @@ "systems" ] }, + "locked": { + "lastModified": 1714870368, + "narHash": "sha256-40eI5uHSTrKgHX6qJz4muP3MVjg2Zhxt3fIktqsx7GU=", + "owner": "digitallyinduced", + "repo": "ihp-boilerplate", + "rev": "68eb3debd8e353653391214a658deafa6f72d91c", + "type": "github" + }, + "original": { + "owner": "digitallyinduced", + "repo": "ihp-boilerplate", + "type": "github" + } + }, + "ihp-boilerplate_2": { + "inputs": { + "devenv": [ + "ihp-boilerplate", + "ihp", + "ihp-boilerplate", + "ihp", + "devenv" + ], + "flake-parts": [ + "ihp-boilerplate", + "ihp", + "ihp-boilerplate", + "ihp", + "flake-parts" + ], + "ihp": "ihp_2", + "nixpkgs": [ + "ihp-boilerplate", + "ihp", + "ihp-boilerplate", + "ihp", + "nixpkgs" + ], + "systems": [ + "ihp-boilerplate", + "ihp", + "ihp-boilerplate", + "ihp", + "systems" + ] + }, "locked": { "lastModified": 1710175252, "narHash": "sha256-QIFqo64U69uUGJ7pgBr37T3yAKK0n1ueqagKmnm+XWw=", @@ -461,13 +727,15 @@ "type": "github" } }, - "ihp-boilerplate_2": { + "ihp-boilerplate_3": { "inputs": { "devenv": [ "ihp-boilerplate", "ihp", "ihp-boilerplate", "ihp", + "ihp-boilerplate", + "ihp", "devenv" ], "flake-parts": [ @@ -475,14 +743,18 @@ "ihp", "ihp-boilerplate", "ihp", + "ihp-boilerplate", + "ihp", "flake-parts" ], - "ihp": "ihp_2", + "ihp": "ihp_3", "nixpkgs": [ "ihp-boilerplate", "ihp", "ihp-boilerplate", "ihp", + "ihp-boilerplate", + "ihp", "nixpkgs" ], "systems": [ @@ -490,6 +762,8 @@ "ihp", "ihp-boilerplate", "ihp", + "ihp-boilerplate", + "ihp", "systems" ] }, @@ -507,7 +781,7 @@ "type": "github" } }, - "ihp-boilerplate_3": { + "ihp-boilerplate_4": { "flake": false, "locked": { "lastModified": 1686165507, @@ -526,11 +800,35 @@ }, "ihp_2": { "inputs": { - "devenv": "devenv_4", + "devenv": "devenv_5", "flake-parts": "flake-parts_3", "ihp-boilerplate": "ihp-boilerplate_3", - "nixpkgs": "nixpkgs_2", - "systems": "systems_4" + "nix-filter": "nix-filter", + "nixpkgs": "nixpkgs_4", + "systems": "systems_7" + }, + "locked": { + "lastModified": 1700013490, + "narHash": "sha256-oQz7ZBrHe6WwYMwnxxUgnYM55CuH5Oxjz6mrLnYbB7U=", + "owner": "digitallyinduced", + "repo": "ihp", + "rev": "d59a65d71943cb506eee3ad6255f017963237359", + "type": "github" + }, + "original": { + "owner": "digitallyinduced", + "ref": "v1.2", + "repo": "ihp", + "type": "github" + } + }, + "ihp_3": { + "inputs": { + "devenv": "devenv_6", + "flake-parts": "flake-parts_4", + "ihp-boilerplate": "ihp-boilerplate_4", + "nixpkgs": "nixpkgs_3", + "systems": "systems_6" }, "locked": { "lastModified": 1689949405, @@ -622,54 +920,154 @@ }, "nix-filter_2": { "locked": { - "lastModified": 1710156097, - "narHash": "sha256-1Wvk8UP7PXdf8bCCaEoMnOT1qe5/Duqgj+rL8sRQsSM=", - "owner": "numtide", - "repo": "nix-filter", - "rev": "3342559a24e85fc164b295c3444e8a139924675b", + "lastModified": 1710156097, + "narHash": "sha256-1Wvk8UP7PXdf8bCCaEoMnOT1qe5/Duqgj+rL8sRQsSM=", + "owner": "numtide", + "repo": "nix-filter", + "rev": "3342559a24e85fc164b295c3444e8a139924675b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "nix-filter", + "type": "github" + } + }, + "nix-filter_3": { + "locked": { + "lastModified": 1710156097, + "narHash": "sha256-1Wvk8UP7PXdf8bCCaEoMnOT1qe5/Duqgj+rL8sRQsSM=", + "owner": "numtide", + "repo": "nix-filter", + "rev": "3342559a24e85fc164b295c3444e8a139924675b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "nix-filter", + "type": "github" + } + }, + "nix-github-actions": { + "inputs": { + "nixpkgs": [ + "devenv", + "cachix", + "devenv", + "poetry2nix", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1688870561, + "narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=", + "owner": "nix-community", + "repo": "nix-github-actions", + "rev": "165b1650b753316aa7f1787f3005a8d2da0f5301", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "nix-github-actions", + "type": "github" + } + }, + "nix-github-actions_2": { + "inputs": { + "nixpkgs": [ + "ihp-boilerplate", + "ihp", + "devenv", + "cachix", + "devenv", + "poetry2nix", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1688870561, + "narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=", + "owner": "nix-community", + "repo": "nix-github-actions", + "rev": "165b1650b753316aa7f1787f3005a8d2da0f5301", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "nix-github-actions", + "type": "github" + } + }, + "nix_2": { + "inputs": { + "flake-compat": [ + "devenv", + "flake-compat" + ], + "nixpkgs": [ + "devenv", + "nixpkgs" + ], + "nixpkgs-regression": "nixpkgs-regression_2" + }, + "locked": { + "lastModified": 1712911606, + "narHash": "sha256-BGvBhepCufsjcUkXnEEXhEVjwdJAwPglCC2+bInc794=", + "owner": "domenkozar", + "repo": "nix", + "rev": "b24a9318ea3f3600c1e24b4a00691ee912d4de12", "type": "github" }, "original": { - "owner": "numtide", - "repo": "nix-filter", + "owner": "domenkozar", + "ref": "devenv-2.21", + "repo": "nix", "type": "github" } }, - "nix-github-actions": { + "nix_3": { "inputs": { + "flake-compat": "flake-compat_3", "nixpkgs": [ + "ihp-boilerplate", + "ihp", "devenv", "cachix", "devenv", - "poetry2nix", "nixpkgs" - ] + ], + "nixpkgs-regression": "nixpkgs-regression_3" }, "locked": { - "lastModified": 1688870561, - "narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=", - "owner": "nix-community", - "repo": "nix-github-actions", - "rev": "165b1650b753316aa7f1787f3005a8d2da0f5301", + "lastModified": 1712911606, + "narHash": "sha256-BGvBhepCufsjcUkXnEEXhEVjwdJAwPglCC2+bInc794=", + "owner": "domenkozar", + "repo": "nix", + "rev": "b24a9318ea3f3600c1e24b4a00691ee912d4de12", "type": "github" }, "original": { - "owner": "nix-community", - "repo": "nix-github-actions", + "owner": "domenkozar", + "ref": "devenv-2.21", + "repo": "nix", "type": "github" } }, - "nix_2": { + "nix_4": { "inputs": { "flake-compat": [ + "ihp-boilerplate", + "ihp", "devenv", "flake-compat" ], "nixpkgs": [ + "ihp-boilerplate", + "ihp", "devenv", "nixpkgs" ], - "nixpkgs-regression": "nixpkgs-regression_2" + "nixpkgs-regression": "nixpkgs-regression_4" }, "locked": { "lastModified": 1712911606, @@ -686,16 +1084,18 @@ "type": "github" } }, - "nix_3": { + "nix_5": { "inputs": { "lowdown-src": "lowdown-src", "nixpkgs": [ + "ihp-boilerplate", + "ihp", "ihp-boilerplate", "ihp", "devenv", "nixpkgs" ], - "nixpkgs-regression": "nixpkgs-regression_3" + "nixpkgs-regression": "nixpkgs-regression_5" }, "locked": { "lastModified": 1676545802, @@ -712,7 +1112,7 @@ "type": "github" } }, - "nix_4": { + "nix_6": { "inputs": { "lowdown-src": "lowdown-src_2", "nixpkgs": [ @@ -720,10 +1120,12 @@ "ihp", "ihp-boilerplate", "ihp", + "ihp-boilerplate", + "ihp", "devenv", "nixpkgs" ], - "nixpkgs-regression": "nixpkgs-regression_4" + "nixpkgs-regression": "nixpkgs-regression_6" }, "locked": { "lastModified": 1676545802, @@ -856,6 +1258,38 @@ "type": "github" } }, + "nixpkgs-regression_5": { + "locked": { + "lastModified": 1643052045, + "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + } + }, + "nixpkgs-regression_6": { + "locked": { + "lastModified": 1643052045, + "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + } + }, "nixpkgs-stable": { "locked": { "lastModified": 1710695816, @@ -873,6 +1307,22 @@ } }, "nixpkgs-stable_2": { + "locked": { + "lastModified": 1710695816, + "narHash": "sha256-3Eh7fhEID17pv9ZxrPwCLfqXnYP006RKzSs0JptsN84=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "614b4613980a522ba49f0d194531beddbb7220d3", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-23.11", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-stable_3": { "locked": { "lastModified": 1685801374, "narHash": "sha256-otaSUoFEMM+LjBI1XL/xGB5ao6IwnZOXc47qhIgJe8U=", @@ -888,7 +1338,7 @@ "type": "github" } }, - "nixpkgs-stable_3": { + "nixpkgs-stable_4": { "locked": { "lastModified": 1678872516, "narHash": "sha256-/E1YwtMtFAu2KUQKV/1+KFuReYPANM2Rzehk84VxVoc=", @@ -905,6 +1355,22 @@ } }, "nixpkgs_2": { + "locked": { + "lastModified": 1692808169, + "narHash": "sha256-x9Opq06rIiwdwGeK2Ykj69dNc2IvUH1fY55Wm7atwrE=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "9201b5ff357e781bf014d0330d18555695df7ba8", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs_3": { "locked": { "lastModified": 1681488673, "narHash": "sha256-PmojOyePBNvbY3snYE7NAQHTLB53t7Ro+pgiJ4wPCuk=", @@ -920,7 +1386,7 @@ "type": "github" } }, - "nixpkgs_3": { + "nixpkgs_4": { "locked": { "lastModified": 1696291921, "narHash": "sha256-isKgVAoUxuxYEuO3Q4xhbfKcZrF/+UkJtOTv0eb/W5E=", @@ -936,7 +1402,22 @@ "type": "github" } }, - "nixpkgs_4": { + "nixpkgs_5": { + "locked": { + "lastModified": 1714864423, + "narHash": "sha256-Wx3Y6arRJD1pd3c8SnD7dfW7KWuCr/r248P/5XLaMdM=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "54b4bb956f9891b872904abdb632cea85a033ff2", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs_6": { "locked": { "lastModified": 1714864423, "narHash": "sha256-Wx3Y6arRJD1pd3c8SnD7dfW7KWuCr/r248P/5XLaMdM=", @@ -977,6 +1458,33 @@ "type": "github" } }, + "poetry2nix_2": { + "inputs": { + "flake-utils": "flake-utils_3", + "nix-github-actions": "nix-github-actions_2", + "nixpkgs": [ + "ihp-boilerplate", + "ihp", + "devenv", + "cachix", + "devenv", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1692876271, + "narHash": "sha256-IXfZEkI0Mal5y1jr6IRWMqK8GW2/f28xJenZIPQqkY0=", + "owner": "nix-community", + "repo": "poetry2nix", + "rev": "d5006be9c2c2417dafb2e2e5034d83fabd207ee3", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "poetry2nix", + "type": "github" + } + }, "pre-commit-hooks": { "inputs": { "flake-compat": [ @@ -1013,7 +1521,7 @@ "devenv", "flake-compat" ], - "flake-utils": "flake-utils_3", + "flake-utils": "flake-utils_4", "gitignore": "gitignore_2", "nixpkgs": [ "ihp-boilerplate", @@ -1024,11 +1532,11 @@ "nixpkgs-stable": "nixpkgs-stable_2" }, "locked": { - "lastModified": 1688056373, - "narHash": "sha256-2+SDlNRTKsgo3LBRiMUcoEUb6sDViRNQhzJquZ4koOI=", + "lastModified": 1713775815, + "narHash": "sha256-Wu9cdYTnGQQwtT20QQMg7jzkANKQjwBD9iccfGKkfls=", "owner": "cachix", "repo": "pre-commit-hooks.nix", - "rev": "5843cf069272d92b60c3ed9e55b7a8989c01d4c7", + "rev": "2ac4dcbf55ed43f3be0bae15e181f08a57af24a4", "type": "github" }, "original": { @@ -1047,7 +1555,7 @@ "devenv", "flake-compat" ], - "flake-utils": "flake-utils_4", + "flake-utils": "flake-utils_5", "gitignore": "gitignore_3", "nixpkgs": [ "ihp-boilerplate", @@ -1059,6 +1567,46 @@ ], "nixpkgs-stable": "nixpkgs-stable_3" }, + "locked": { + "lastModified": 1688056373, + "narHash": "sha256-2+SDlNRTKsgo3LBRiMUcoEUb6sDViRNQhzJquZ4koOI=", + "owner": "cachix", + "repo": "pre-commit-hooks.nix", + "rev": "5843cf069272d92b60c3ed9e55b7a8989c01d4c7", + "type": "github" + }, + "original": { + "owner": "cachix", + "repo": "pre-commit-hooks.nix", + "type": "github" + } + }, + "pre-commit-hooks_4": { + "inputs": { + "flake-compat": [ + "ihp-boilerplate", + "ihp", + "ihp-boilerplate", + "ihp", + "ihp-boilerplate", + "ihp", + "devenv", + "flake-compat" + ], + "flake-utils": "flake-utils_6", + "gitignore": "gitignore_4", + "nixpkgs": [ + "ihp-boilerplate", + "ihp", + "ihp-boilerplate", + "ihp", + "ihp-boilerplate", + "ihp", + "devenv", + "nixpkgs" + ], + "nixpkgs-stable": "nixpkgs-stable_4" + }, "locked": { "lastModified": 1682596858, "narHash": "sha256-Hf9XVpqaGqe/4oDGr30W8HlsWvJXtMsEPHDqHZA6dDg=", @@ -1078,9 +1626,9 @@ "devenv": "devenv", "flake-parts": "flake-parts", "ihp-boilerplate": "ihp-boilerplate", - "nix-filter": "nix-filter_2", - "nixpkgs": "nixpkgs_4", - "systems": "systems_6" + "nix-filter": "nix-filter_3", + "nixpkgs": "nixpkgs_6", + "systems": "systems_9" } }, "systems": { @@ -1172,6 +1720,51 @@ "repo": "default", "type": "github" } + }, + "systems_7": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + }, + "systems_8": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + }, + "systems_9": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } } }, "root": "root", From d87ce54312e1cedae069566a8507bbb374bcf34d Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Fri, 2 Aug 2024 13:27:41 -0400 Subject: [PATCH 05/47] Moved hsx testsuite to ihp-hsx package --- {Test => ihp-hsx/Test/IHP}/HSX/ParserSpec.hs | 10 ++-- {Test => ihp-hsx/Test/IHP}/HSX/QQSpec.hs | 8 +-- ihp-hsx/Test/Main.hs | 12 +++++ ihp-hsx/ihp-hsx.cabal | 55 +++++++++++++++++++- 4 files changed, 76 insertions(+), 9 deletions(-) rename {Test => ihp-hsx/Test/IHP}/HSX/ParserSpec.hs (93%) rename {Test => ihp-hsx/Test/IHP}/HSX/QQSpec.hs (98%) create mode 100644 ihp-hsx/Test/Main.hs diff --git a/Test/HSX/ParserSpec.hs b/ihp-hsx/Test/IHP/HSX/ParserSpec.hs similarity index 93% rename from Test/HSX/ParserSpec.hs rename to ihp-hsx/Test/IHP/HSX/ParserSpec.hs index eb6849115..11b6c1506 100644 --- a/Test/HSX/ParserSpec.hs +++ b/ihp-hsx/Test/IHP/HSX/ParserSpec.hs @@ -1,11 +1,11 @@ {-| -Module: Test.HSX.QQSpec +Module: IHP.HSX.QQSpec Copyright: (c) digitally induced GmbH, 2020 -} -module Test.HSX.ParserSpec where +module IHP.HSX.ParserSpec where import Test.Hspec -import IHP.Prelude +import Prelude import IHP.HSX.Parser import qualified Text.Megaparsec as Megaparsec import qualified Text.Megaparsec.Error as Megaparsec @@ -62,12 +62,12 @@ tests = do it "should parse spread values" do let p = parseHsx position extensions "
" -- We cannot easily construct the @VarE variables@ expression, therefore we use show here for comparison - tshow p `shouldBe` "Right (Children [Node \"div\" [SpreadAttributes (VarE variables)] [] False])" + show p `shouldBe` "Right (Children [Node \"div\" [SpreadAttributes (VarE variables)] [] False])" it "should parse spread values with a space" do -- See https://github.com/digitallyinduced/ihp/issues/1588 let p = parseHsx position extensions "
" - tshow p `shouldBe` "Right (Children [Node \"div\" [SpreadAttributes (VarE variables)] [] False])" + show p `shouldBe` "Right (Children [Node \"div\" [SpreadAttributes (VarE variables)] [] False])" it "should accept underscores in data attributes" do let p = parseHsx position extensions "
" diff --git a/Test/HSX/QQSpec.hs b/ihp-hsx/Test/IHP/HSX/QQSpec.hs similarity index 98% rename from Test/HSX/QQSpec.hs rename to ihp-hsx/Test/IHP/HSX/QQSpec.hs index 5f69648c7..9bd65c05d 100644 --- a/Test/HSX/QQSpec.hs +++ b/ihp-hsx/Test/IHP/HSX/QQSpec.hs @@ -1,15 +1,17 @@ {-| -Module: Test.HSX.QQSpec +Module: IHP.HSX.QQSpec Copyright: (c) digitally induced GmbH, 2020 -} -module Test.HSX.QQSpec where +module IHP.HSX.QQSpec where import Test.Hspec -import IHP.Prelude +import Prelude import IHP.HSX.QQ import qualified Text.Blaze.Renderer.Text as Blaze import Text.Blaze (preEscapedTextValue) +import Data.Text +tests :: SpecWith () tests = do describe "HSX" do it "should work with static html" do diff --git a/ihp-hsx/Test/Main.hs b/ihp-hsx/Test/Main.hs new file mode 100644 index 000000000..288e12321 --- /dev/null +++ b/ihp-hsx/Test/Main.hs @@ -0,0 +1,12 @@ +module Main where + +import Prelude + +import Test.Hspec +import qualified IHP.HSX.QQSpec +import qualified IHP.HSX.ParserSpec + +main :: IO () +main = hspec do + IHP.HSX.QQSpec.tests + IHP.HSX.ParserSpec.tests \ No newline at end of file diff --git a/ihp-hsx/ihp-hsx.cabal b/ihp-hsx/ihp-hsx.cabal index 0fd3ef0f0..b6973912d 100644 --- a/ihp-hsx/ihp-hsx.cabal +++ b/ihp-hsx/ihp-hsx.cabal @@ -88,4 +88,57 @@ library , IHP.HSX.ConvertibleStrings , IHP.HSX.HaskellParser , IHP.HSX.HsExpToTH - , IHP.HSX.Attribute \ No newline at end of file + , IHP.HSX.Attribute + +test-suite ihp-hsx-tests + type: exitcode-stdio-1.0 + hs-source-dirs: Test + main-is: Main.hs + other-modules: + IHP.HSX.ParserSpec + IHP.HSX.QQSpec + default-language: Haskell2010 + build-depends: + base >= 4.17.0 && < 4.20 + , ihp-hsx + , hspec + , text >= 2.0.1 && < 2.2 + , bytestring >= 0.11.3 && < 0.13 + , containers >= 0.6.6 && < 0.7 + , blaze-markup + , megaparsec + , template-haskell + default-extensions: + OverloadedStrings + , NoImplicitPrelude + , ImplicitParams + , Rank2Types + , NamedFieldPuns + , TypeSynonymInstances + , FlexibleInstances + , DisambiguateRecordFields + , DuplicateRecordFields + , OverloadedLabels + , FlexibleContexts + , DataKinds + , QuasiQuotes + , TypeFamilies + , PackageImports + , ScopedTypeVariables + , RecordWildCards + , TypeApplications + , DataKinds + , InstanceSigs + , DeriveGeneric + , MultiParamTypeClasses + , TypeOperators + , DeriveDataTypeable + , DefaultSignatures + , BangPatterns + , FunctionalDependencies + , PartialTypeSignatures + , BlockArguments + , LambdaCase + , StandaloneDeriving + , TemplateHaskell + , OverloadedRecordDot \ No newline at end of file From 56f9e435d6fdccc8767b24ea4f745317d28972ef Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Fri, 2 Aug 2024 17:58:52 -0400 Subject: [PATCH 06/47] Added hspec dep to ihp-hsx --- NixSupport/haskell-packages/ihp-hsx.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/NixSupport/haskell-packages/ihp-hsx.nix b/NixSupport/haskell-packages/ihp-hsx.nix index 3e1559116..4e3b3d7ef 100644 --- a/NixSupport/haskell-packages/ihp-hsx.nix +++ b/NixSupport/haskell-packages/ihp-hsx.nix @@ -12,6 +12,7 @@ , haskell-src-meta , containers , unordered-containers +, hspec }: mkDerivation { pname = "ihp-hsx"; @@ -32,6 +33,7 @@ mkDerivation { containers unordered-containers ]; + testHaskellDepends = [ hspec ]; license = lib.licenses.mit; enableLibraryForGhci = true; homepage = "https://ihp.digitallyinduced.com"; From 2c234cb84a912274f0704ce0445d1bf5c605048a Mon Sep 17 00:00:00 2001 From: Andrey Troeglazov Date: Tue, 6 Aug 2024 14:01:46 +0700 Subject: [PATCH 07/47] Add empty option value to styledSelectFormField for required html validation --- IHP/View/CSSFramework.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/IHP/View/CSSFramework.hs b/IHP/View/CSSFramework.hs index 45fcbb77d..62dbe7421 100644 --- a/IHP/View/CSSFramework.hs +++ b/IHP/View/CSSFramework.hs @@ -179,7 +179,7 @@ instance Default CSSFramework where autofocus={autofocus} {...additionalAttributes} > - + {forEach (options fieldType) (getOption)} From 17c09bf91c32ccc91c351a2d0016bd476bfeba58 Mon Sep 17 00:00:00 2001 From: Aron Novak Date: Tue, 6 Aug 2024 14:11:04 +0000 Subject: [PATCH 08/47] Update testing.markdown - workaround GH Runner disk space issue (#1997) * Update testing.markdown - workaround GH Runner disk space issue * gh actions - link --- Guide/testing.markdown | 53 +----------------------------------------- 1 file changed, 1 insertion(+), 52 deletions(-) diff --git a/Guide/testing.markdown b/Guide/testing.markdown index e9b3ced68..93792a3e9 100644 --- a/Guide/testing.markdown +++ b/Guide/testing.markdown @@ -303,55 +303,4 @@ For more details on how to structure test suites see the [Hspec manual](http://h ## GitHub Actions -The following GitHub Action workflow can be used to run the tests on CI: - -```yaml -# .github/workflows/test.yml - -name: Test - -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - branches: [ main ] - pull_request: - branches: [ main ] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - tests: - name: Run Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: cachix/install-nix-action@v27 - with: - nix_path: nixpkgs=https://github.com/NixOS/nixpkgs/archive/51bcdc4cdaac48535dabf0ad4642a66774c609ed.tar.gz - - # Use the cachix cache for faster builds. - - name: Cachix Init - uses: cachix/cachix-action@v15 - with: - name: digitallyinduced - skipPush: true - - # Install direnv, which also `direnv allow`s the project. - - uses: HatsuneMiku3939/direnv-action@v1.0.7 - with: - direnvVersion: 2.32.3 - - - name: Run project and tests - run: | - # Build generated files. - nix-shell --run "make build/Generated/Types.hs" - - # Start the project in the background. - nix-shell --run "devenv up &" - - # Execute the tests. - nix-shell --run "runghc $(make print-ghc-extensions) -i. -ibuild -iConfig Test/Main.hs" -``` +A GitHub Action workflow can be used to run the tests on CI and do deployments. Consult the [IHP Boilerplate example](https://github.com/digitallyinduced/ihp-boilerplate/blob/master/.github/workflows/test.yml) for more details. From 56300596ac8522759db953407ab99696f97b28e6 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Wed, 7 Aug 2024 11:00:05 -0400 Subject: [PATCH 09/47] Fixed tests The HSX tests have been moved to the ihp-hsx package --- Test/Main.hs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/Test/Main.hs b/Test/Main.hs index c6b52477c..4a26b94cd 100644 --- a/Test/Main.hs +++ b/Test/Main.hs @@ -27,8 +27,6 @@ import qualified Test.IDE.CodeGeneration.ControllerGenerator import qualified Test.IDE.CodeGeneration.ViewGenerator import qualified Test.IDE.CodeGeneration.MailGenerator import qualified Test.IDE.CodeGeneration.JobGenerator -import qualified Test.HSX.QQSpec -import qualified Test.HSX.ParserSpec import qualified Test.NameSupportSpec import qualified Test.HaskellSupportSpec import qualified Test.View.CSSFrameworkSpec @@ -64,10 +62,8 @@ main = hspec do Test.IDE.CodeGeneration.ViewGenerator.tests Test.IDE.CodeGeneration.MailGenerator.tests Test.IDE.CodeGeneration.JobGenerator.tests - Test.HSX.QQSpec.tests Test.NameSupportSpec.tests Test.HaskellSupportSpec.tests - Test.HSX.ParserSpec.tests Test.View.CSSFrameworkSpec.tests Test.View.FormSpec.tests Test.Controller.ContextSpec.tests From 6bba47921a27b61e1098cc01106f17fe01a3a4ad Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Wed, 7 Aug 2024 14:19:30 -0400 Subject: [PATCH 10/47] fixed tests not running on master --- .github/workflows/tests.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 22d1692e3..b7c85192d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,7 +2,8 @@ name: "Tests + Compile" on: pull_request: branches: [master] - + push: + branches: 'master' jobs: tests: runs-on: ARM64 From 1ad92f01664db9e3b62fd06d7f816f0978d6baa2 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Thu, 8 Aug 2024 10:10:41 -0400 Subject: [PATCH 11/47] Updated devenv --- flake.lock | 8 ++++---- flake.nix | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/flake.lock b/flake.lock index 71f7a85f1..05ead6bf6 100644 --- a/flake.lock +++ b/flake.lock @@ -77,17 +77,17 @@ "pre-commit-hooks": "pre-commit-hooks" }, "locked": { - "lastModified": 1716484006, - "narHash": "sha256-2gtN5jf21HS9TAZXhf9G+OSUY1TQ/95n6clcuFjYQ58=", + "lastModified": 1723102610, + "narHash": "sha256-oLJREEBBbzWEpne9IvNOVHSdVNPt1W6SNxPJQxUyShc=", "owner": "cachix", "repo": "devenv", - "rev": "800f19d1b999f89464fd8e0226abf4b3b444b0fa", + "rev": "6e318854efa95c5e67a1152547f838754e8f0306", "type": "github" }, "original": { "owner": "cachix", "repo": "devenv", - "rev": "800f19d1b999f89464fd8e0226abf4b3b444b0fa", + "rev": "6e318854efa95c5e67a1152547f838754e8f0306", "type": "github" } }, diff --git a/flake.nix b/flake.nix index 598cf821a..3301cee59 100644 --- a/flake.nix +++ b/flake.nix @@ -12,7 +12,7 @@ flake-parts.inputs.nixpkgs-lib.follows = "nixpkgs"; # used for setting up development environments - devenv.url = "github:cachix/devenv?rev=800f19d1b999f89464fd8e0226abf4b3b444b0fa"; + devenv.url = "github:cachix/devenv?rev=6e318854efa95c5e67a1152547f838754e8f0306"; devenv.inputs.nixpkgs.follows = "nixpkgs"; # TODO use a corresponding release branch From 07873b46383a285699a2e2abf2e0c0a29307d908 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Thu, 8 Aug 2024 11:14:07 -0400 Subject: [PATCH 12/47] Added failing test for AutoRefresh triggers --- Test/IDE/CodeGeneration/MigrationGenerator.hs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/Test/IDE/CodeGeneration/MigrationGenerator.hs b/Test/IDE/CodeGeneration/MigrationGenerator.hs index abe4ad431..b4b87cfa0 100644 --- a/Test/IDE/CodeGeneration/MigrationGenerator.hs +++ b/Test/IDE/CodeGeneration/MigrationGenerator.hs @@ -1362,6 +1362,17 @@ CREATE POLICY "Users can read and edit their own record" ON public.users USING ( diffSchemas targetSchema actualSchema `shouldBe` migration + it "should ignore ar_did_update_.. triggers by IHP.AutoRefresh" do + let actualSchema = sql $ cs [plain| + CREATE TRIGGER ar_did_update_plans AFTER UPDATE ON public.plans FOR EACH ROW EXECUTE FUNCTION public.notify_did_change_plans(); + CREATE TRIGGER ar_did_insert_offices AFTER INSERT ON public.offices FOR EACH STATEMENT EXECUTE FUNCTION public.notify_did_change_offices(); + CREATE TRIGGER ar_did_delete_company_profiles AFTER DELETE ON public.company_profiles FOR EACH STATEMENT EXECUTE FUNCTION public.notify_did_change_company_profiles(); + |] + let targetSchema = [] + let migration = [] + + diffSchemas targetSchema actualSchema `shouldBe` migration + it "should deal with truncated identifiers" do let actualSchema = sql $ cs [plain| CREATE POLICY "Users can manage the prepare_context_jobs if they can see the C" ON public.prepare_context_jobs USING ((EXISTS ( SELECT 1 From 86410da14b77b85ed02f2ad09495ea2d6d7c4931 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Thu, 8 Aug 2024 11:15:21 -0400 Subject: [PATCH 13/47] Fixed migration generator dropping auto refresh triggers in migration --- ihp-ide/IHP/IDE/CodeGen/MigrationGenerator.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ihp-ide/IHP/IDE/CodeGen/MigrationGenerator.hs b/ihp-ide/IHP/IDE/CodeGen/MigrationGenerator.hs index c21631b52..26d06faf9 100644 --- a/ihp-ide/IHP/IDE/CodeGen/MigrationGenerator.hs +++ b/ihp-ide/IHP/IDE/CodeGen/MigrationGenerator.hs @@ -188,7 +188,7 @@ removeNoise = filter \case StatementCreateTable { unsafeGetCreateTable = CreateTable { name = "schema_migrations" } } -> False AddConstraint { tableName = "schema_migrations" } -> False CreateFunction { functionName } | "notify_" `Text.isPrefixOf` functionName -> False - CreateTrigger { name } | any (`Text.isPrefixOf` name) ["did_update_", "did_delete_", "did_insert_"] -> False + CreateTrigger { name } | any (`Text.isPrefixOf` name) ["did_update_", "did_delete_", "did_insert_", "ar_did_update_", "ar_did_delete_", "ar_did_insert_"] -> False StatementCreateTable { unsafeGetCreateTable = CreateTable { name = "large_pg_notifications" } } -> False CreateIndex { tableName = "large_pg_notifications" } -> False _ -> True From 80143c74a5a513f38c756f566dacdb07e73228fd Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Thu, 8 Aug 2024 11:55:26 -0400 Subject: [PATCH 14/47] fixed tests --- Test/View/CSSFrameworkSpec.hs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Test/View/CSSFrameworkSpec.hs b/Test/View/CSSFrameworkSpec.hs index 945defe00..e0c2903fb 100644 --- a/Test/View/CSSFrameworkSpec.hs +++ b/Test/View/CSSFrameworkSpec.hs @@ -198,23 +198,23 @@ tests = do it "should render" do let select = baseSelect - styledFormField cssFramework cssFramework select `shouldRenderTo` "
" + styledFormField cssFramework cssFramework select `shouldRenderTo` "
" it "should render with disabled" do let select = baseSelect { disabled = True } - styledFormField cssFramework cssFramework select `shouldRenderTo` "
" + styledFormField cssFramework cssFramework select `shouldRenderTo` "
" it "should render with selected" do let select = baseSelect { fieldValue = "b" } - styledFormField cssFramework cssFramework select `shouldRenderTo` "
" + styledFormField cssFramework cssFramework select `shouldRenderTo` "
" it "should render with custom placeholder" do let select = baseSelect { placeholder = "Pick something" } - styledFormField cssFramework cssFramework select `shouldRenderTo` "
" + styledFormField cssFramework cssFramework select `shouldRenderTo` "
" it "should render with additional attributes" do let select = baseSelect { additionalAttributes = [ ("data-x", "true") ] } - styledFormField cssFramework cssFramework select `shouldRenderTo` "
" + styledFormField cssFramework cssFramework select `shouldRenderTo` "
" describe "radio" do let baseRadio = FormField From 27a696af90efcd0f84260d84805dd2dc753e818c Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Tue, 13 Aug 2024 16:49:44 -0400 Subject: [PATCH 15/47] Added mainInParentDirectory for easier IHP development --- ihp-ide/IHP/IDE/Postgres.hs | 2 +- ihp-ide/IHP/IDE/Types.hs | 7 +++++++ ihp-ide/exe/IHP/IDE/DevServer.hs | 27 ++++++++++++++++++++++----- 3 files changed, 30 insertions(+), 6 deletions(-) diff --git a/ihp-ide/IHP/IDE/Postgres.hs b/ihp-ide/IHP/IDE/Postgres.hs index 72d3b39d7..6c287c66e 100644 --- a/ihp-ide/IHP/IDE/Postgres.hs +++ b/ihp-ide/IHP/IDE/Postgres.hs @@ -19,7 +19,7 @@ startPostgres = do shouldInit <- needsDatabaseInit when shouldInit initDatabase let args = ["-D", "build/db/state", "-k", currentDir <> "/build/db", "-c", "listen_addresses="] - let params = (Process.proc "postgres" args) + let params = (procDirenvAware "postgres" args) { Process.std_in = Process.CreatePipe , Process.std_out = Process.CreatePipe , Process.std_err = Process.CreatePipe diff --git a/ihp-ide/IHP/IDE/Types.hs b/ihp-ide/IHP/IDE/Types.hs index efc287240..ee248aa02 100644 --- a/ihp-ide/IHP/IDE/Types.hs +++ b/ihp-ide/IHP/IDE/Types.hs @@ -21,6 +21,12 @@ data ManagedProcess = ManagedProcess , processHandle :: !ProcessHandle } deriving (Show) +procDirenvAware :: (?context :: Context) => FilePath -> [String] -> Process.CreateProcess +procDirenvAware command args = + if ?context.wrapWithDirenv + then Process.proc "direnv" (["exec", ".", command] <> args) + else Process.proc command args + createManagedProcess :: CreateProcess -> IO ManagedProcess createManagedProcess config = do process <- Process.createProcess config @@ -128,6 +134,7 @@ data Context = Context , ghciInChan :: !(Queue.InChan OutputLine) -- ^ Output of the app ghci is written here , ghciOutChan :: !(Queue.OutChan OutputLine) -- ^ Output of the app ghci is consumed here , liveReloadClients :: !(IORef (Map UUID Websocket.Connection)) + , wrapWithDirenv :: !Bool } dispatch :: (?context :: Context) => Action -> IO () diff --git a/ihp-ide/exe/IHP/IDE/DevServer.hs b/ihp-ide/exe/IHP/IDE/DevServer.hs index a5ccb5cc2..9293650af 100644 --- a/ihp-ide/exe/IHP/IDE/DevServer.hs +++ b/ihp-ide/exe/IHP/IDE/DevServer.hs @@ -1,4 +1,4 @@ -module Main (main) where +module Main (main, mainInParentDirectory) where import ClassyPrelude import qualified System.Process as Process @@ -30,9 +30,25 @@ import qualified IHP.FrameworkConfig as FrameworkConfig import qualified Control.Concurrent.Chan.Unagi as Queue import IHP.IDE.FileWatcher import qualified System.Environment as Env +import qualified System.Directory as Directory + +mainInParentDirectory :: IO () +mainInParentDirectory = do + cwd <- Directory.getCurrentDirectory + let projectDir = cwd <> "/../" + Directory.setCurrentDirectory projectDir + + Env.setEnv "IHP_LIB" (cwd <> "/ihp-ide/lib/IHP") + Env.setEnv "TOOLSERVER_STATIC" (cwd <> "/ihp-ide/lib/IHP/static") + Env.setEnv "IHP_STATIC" (cwd <> "/lib/IHP/static") + + mainWithOptions True main :: IO () -main = withUtf8 do +main = mainWithOptions False + +mainWithOptions :: Bool -> IO () +mainWithOptions wrapWithDirenv = withUtf8 do actionVar <- newEmptyMVar appStateRef <- emptyAppState >>= newIORef portConfig <- findAvailablePortConfig @@ -45,7 +61,7 @@ main = withUtf8 do logger <- Log.newLogger def (ghciInChan, ghciOutChan) <- Queue.newChan liveReloadClients <- newIORef mempty - let ?context = Context { actionVar, portConfig, appStateRef, isDebugMode, logger, ghciInChan, ghciOutChan, liveReloadClients } + let ?context = Context { actionVar, portConfig, appStateRef, isDebugMode, logger, ghciInChan, ghciOutChan, liveReloadClients, wrapWithDirenv } -- Print IHP Version when in debug mode when isDebugMode (Log.debug ("IHP Version: " <> Version.ihpVersion)) @@ -215,7 +231,7 @@ startOrWaitPostgres = do startPostgres pure () -startGHCI :: IO ManagedProcess +startGHCI :: (?context :: Context) => IO ManagedProcess startGHCI = do let args = [ "-threaded" @@ -227,7 +243,8 @@ startGHCI = do , "-ghci-script", ".ghci" -- Because the previous line ignored default ghci config file locations, we have to manual load our .ghci , "+RTS", "-A128m", "-n2m", "-H2m", "--nonmoving-gc", "-N" ] - createManagedProcess (Process.proc "ghci" args) + + createManagedProcess (procDirenvAware "ghci" args) { Process.std_in = Process.CreatePipe , Process.std_out = Process.CreatePipe , Process.std_err = Process.CreatePipe From 20522b70b1fd573429bf19064d29b98ddbf1abcc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Kukie=C5=82a?= <53443372+kukimik@users.noreply.github.com> Date: Tue, 3 Sep 2024 23:04:56 +0200 Subject: [PATCH 16/47] Fix grepping for direnv, add profile files to messages Because if one of the files ~/.bashrc, ~/.bash_profile didn't exist, but the other file contained the string "direnv", grep returned non-zero exit status. --- ProjectGenerator/bin/ihp-new | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ProjectGenerator/bin/ihp-new b/ProjectGenerator/bin/ihp-new index f85039bb6..106d03588 100755 --- a/ProjectGenerator/bin/ihp-new +++ b/ProjectGenerator/bin/ihp-new @@ -133,8 +133,8 @@ if ! [ -x "$(command -v direnv)" ]; then if [ $DIRENV_HOOK_ADDED = 0 ]; then echo "" echo "Please add the hook manually." - echo -e "Bash: Add \e[4meval \"\$(direnv hook bash)\"\e[0m to ~/.bashrc" - echo -e "ZSH: Add \e[4meval \"\$(direnv hook zsh)\"\e[0m to ~/.zshrc" + echo -e "Bash: Add \e[4meval \"\$(direnv hook bash)\"\e[0m to ~/.bashrc or ~/.bash_profile" + echo -e "ZSH: Add \e[4meval \"\$(direnv hook zsh)\"\e[0m to ~/.zshrc or ~/.zprofile" echo "Other shells: See https://direnv.net/#README" echo -e "\e[1mHave you hooked direnv into your shell? (Type y to proceed) \e[0m" while true; do @@ -160,12 +160,12 @@ fi DIRENV_SETUP=0; case "${SHELL##*/}" in zsh ) - if grep direnv "$HOME/.zshrc" "$HOME/.zprofile" 2>&1 >/dev/null; then + if grep -q -s direnv "$HOME/.zshrc" "$HOME/.zprofile"; then DIRENV_SETUP=1; fi; ;; bash ) - if grep direnv "$HOME/.bashrc" "$HOME/.bash_profile" 2>&1 >/dev/null; then + if grep -q -s direnv "$HOME/.bashrc" "$HOME/.bash_profile"; then DIRENV_SETUP=1; fi; ;; From 04773f872c5f2b1a08b486dff00d657b4c1e454e Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Thu, 5 Sep 2024 13:31:22 -0700 Subject: [PATCH 17/47] Reindented file --- NixSupport/default.nix | 138 ++++++++++++++++++++--------------------- 1 file changed, 69 insertions(+), 69 deletions(-) diff --git a/NixSupport/default.nix b/NixSupport/default.nix index 30b6cb3f4..805a00004 100644 --- a/NixSupport/default.nix +++ b/NixSupport/default.nix @@ -17,105 +17,105 @@ let allHaskellPackages = - (if withHoogle - then ghc.ghcWithHoogle - else ghc.ghcWithPackages) haskellDeps; + (if withHoogle + then ghc.ghcWithHoogle + else ghc.ghcWithPackages) haskellDeps; allNativePackages = builtins.concatLists [ - (otherDeps pkgs) + (otherDeps pkgs) ]; appBinary = if optimized - then "build/bin/RunOptimizedProdServer" - else "build/bin/RunUnoptimizedProdServer"; + then "build/bin/RunOptimizedProdServer" + else "build/bin/RunUnoptimizedProdServer"; jobsBinary = if optimized - then "build/bin/RunJobsOptimized" - else "build/bin/RunJobs"; + then "build/bin/RunJobsOptimized" + else "build/bin/RunJobs"; in pkgs.stdenv.mkDerivation { name = appName; buildPhase = '' - runHook preBuild - - mkdir -p build - - # When npm install is executed by the project's makefile it will fail with: - # - # EACCES: permission denied, mkdir '/homeless-shelter' - # - # To avoid this error we use /tmp as our home directory for the build - # - # See https://github.com/svanderburg/node2nix/issues/217#issuecomment-751311272 - export HOME=/tmp - - export IHP_LIB=${ihp}/lib/IHP - export IHP=${ihp}/lib/IHP - export APP_RTS_FLAGS="${rtsFlags}" - export OPTIMIZATION_LEVEL="${optimizationLevel}" - - make -j ${appBinary} - - # Build job runner if there are any jobs - if find -type d -iwholename \*/Job|grep .; then - make -j ${jobsBinary}; - fi; - - # Build all scripts if there are any - mkdir -p Application/Script - SCRIPT_TARGETS=`find Application/Script -type f -iwholename '*.hs' -not -name 'Prelude.hs' -exec basename {} .hs ';' | sed 's#^#build/bin/Script/#' | tr "\n" " "` - if [[ ! -z "$SCRIPT_TARGETS" ]]; then - # Need to use -j1 here to avoid race conditions of temp files created by GHC. + runHook preBuild + + mkdir -p build + + # When npm install is executed by the project's makefile it will fail with: # - # These errors look like: + # EACCES: permission denied, mkdir '/homeless-shelter' # - # : error: - # build/RunUnoptimizedProdServer/Application/Script/Prelude.o.tmp: renameFile:renamePath:rename: does not exist (No such file or directory) + # To avoid this error we use /tmp as our home directory for the build # - make -j1 $SCRIPT_TARGETS; - fi; - - runHook postBuild + # See https://github.com/svanderburg/node2nix/issues/217#issuecomment-751311272 + export HOME=/tmp + + export IHP_LIB=${ihp}/lib/IHP + export IHP=${ihp}/lib/IHP + export APP_RTS_FLAGS="${rtsFlags}" + export OPTIMIZATION_LEVEL="${optimizationLevel}" + + make -j ${appBinary} + + # Build job runner if there are any jobs + if find -type d -iwholename \*/Job|grep .; then + make -j ${jobsBinary}; + fi; + + # Build all scripts if there are any + mkdir -p Application/Script + SCRIPT_TARGETS=`find Application/Script -type f -iwholename '*.hs' -not -name 'Prelude.hs' -exec basename {} .hs ';' | sed 's#^#build/bin/Script/#' | tr "\n" " "` + if [[ ! -z "$SCRIPT_TARGETS" ]]; then + # Need to use -j1 here to avoid race conditions of temp files created by GHC. + # + # These errors look like: + # + # : error: + # build/RunUnoptimizedProdServer/Application/Script/Prelude.o.tmp: renameFile:renamePath:rename: does not exist (No such file or directory) + # + make -j1 $SCRIPT_TARGETS; + fi; + + runHook postBuild ''; installPhase = '' - runHook preInstall + runHook preInstall - mkdir -p "$out" - mkdir -p $out/bin $out/lib + mkdir -p "$out" + mkdir -p $out/bin $out/lib - mv ${appBinary} $out/bin/RunProdServerWithoutOptions + mv ${appBinary} $out/bin/RunProdServerWithoutOptions - INPUT_HASH="$((basename $out) | cut -d - -f 1)" - makeWrapper $out/bin/RunProdServerWithoutOptions $out/bin/RunProdServer --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihp}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} + INPUT_HASH="$((basename $out) | cut -d - -f 1)" + makeWrapper $out/bin/RunProdServerWithoutOptions $out/bin/RunProdServer --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihp}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} - # Copy job runner binary to bin/ if we built it - if [ -f ${jobsBinary} ]; then - mv ${jobsBinary} $out/bin/RunJobsWithoutOptions; - makeWrapper $out/bin/RunJobsWithoutOptions $out/bin/RunJobs --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihp}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} - fi; + # Copy job runner binary to bin/ if we built it + if [ -f ${jobsBinary} ]; then + mv ${jobsBinary} $out/bin/RunJobsWithoutOptions; + makeWrapper $out/bin/RunJobsWithoutOptions $out/bin/RunJobs --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihp}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} + fi; - # Copy IHP Script binaries to bin/ - mkdir -p build/bin/Script - find build/bin/Script/ -type f -print0 | - while read -d $'\0' script; do - script_basename=$(basename "$script") - mv "build/bin/Script/$script_basename" "$out/bin/$script_basename"; - done + # Copy IHP Script binaries to bin/ + mkdir -p build/bin/Script + find build/bin/Script/ -type f -print0 | + while read -d $'\0' script; do + script_basename=$(basename "$script") + mv "build/bin/Script/$script_basename" "$out/bin/$script_basename"; + done - mv static "$out/lib/static" + mv static "$out/lib/static" - runHook postInstall + runHook postInstall ''; dontFixup = true; src = pkgs.nix-gitignore.gitignoreSource [] projectPath; buildInputs = builtins.concatLists [ [allHaskellPackages] allNativePackages ]; nativeBuildInputs = builtins.concatLists [ - [ pkgs.makeWrapper + [ pkgs.makeWrapper pkgs.cacert # Needed for npm install to work from within the IHP build process ghc.ihp-ide # Needed for build-generated-code - ] - (if includeDevTools then [(pkgs.postgresql_13.withPackages postgresExtensions)] else []) + ] + (if includeDevTools then [(pkgs.postgresql_13.withPackages postgresExtensions)] else []) ]; shellHook = "eval $(egrep ^export ${allHaskellPackages}/bin/ghc)"; enableParallelBuilding = true; impureEnvVars = pkgs.lib.fetchers.proxyImpureEnvVars; # Needed for npm install to work from within the IHP build process - } + } \ No newline at end of file From b5e5cdafc4d0980c333db0afcafaf038be4e2eee Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Sun, 8 Sep 2024 20:19:30 -0700 Subject: [PATCH 18/47] Speed up 'nix build' by caching Generated.Types modules The Generated.Types only needs to be rebuild when the Application/Schema.sql has changed --- NixSupport/default.nix | 34 ++++++++++++++++++++++++++++++++-- flake-module.nix | 2 ++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/NixSupport/default.nix b/NixSupport/default.nix index 805a00004..473cb6ce2 100644 --- a/NixSupport/default.nix +++ b/NixSupport/default.nix @@ -13,6 +13,7 @@ , rtsFlags ? "" , appName ? "app" , optimizationLevel ? "2" +, filter }: let @@ -31,13 +32,43 @@ let jobsBinary = if optimized then "build/bin/RunJobsOptimized" else "build/bin/RunJobs"; + + odir = if optimized then "RunOptimizedProdServer" else "RunUnoptimizedProdServer"; + + schemaObjectFiles = + let + self = projectPath; + in + pkgs.stdenv.mkDerivation { + name = appName + "-schema"; + buildPhase = '' + mkdir -p build/Generated + build-generated-code + + export IHP=${ihp}/lib/IHP + ghc -O${if optimized then optimizationLevel else "0"} $(make print-ghc-options) --make build/Generated/Types.hs -odir build/${odir} -hidir build/${odir} + + cp -r build $out + ''; + src = filter { root = self; include = ["Application/Schema.sql" "Makefile"]; }; + nativeBuildInputs = + [ (ghc.ghcWithPackages (p: [ p.ihp-ide ])) # Needed for build-generated-code + ] + ; + dontInstall = true; + dontFixup = false; + }; in pkgs.stdenv.mkDerivation { name = appName; buildPhase = '' runHook preBuild - mkdir -p build + mkdir -p build/Generated build/${odir} + cp -r ${schemaObjectFiles}/${odir} build/ + cp -r ${schemaObjectFiles}/Generated build/ + + chmod -R +w build/${odir}/* # When npm install is executed by the project's makefile it will fail with: # @@ -111,7 +142,6 @@ in nativeBuildInputs = builtins.concatLists [ [ pkgs.makeWrapper pkgs.cacert # Needed for npm install to work from within the IHP build process - ghc.ihp-ide # Needed for build-generated-code ] (if includeDevTools then [(pkgs.postgresql_13.withPackages postgresExtensions)] else []) ]; diff --git a/flake-module.nix b/flake-module.nix index 2d6457577..9748e195d 100644 --- a/flake-module.nix +++ b/flake-module.nix @@ -148,6 +148,7 @@ ihpFlake: rtsFlags = cfg.rtsFlags; optimizationLevel = cfg.optimizationLevel; appName = cfg.appName; + filter = ihpFlake.inputs.nix-filter.lib; }; unoptimized-prod-server = import "${ihp}/NixSupport/default.nix" { @@ -162,6 +163,7 @@ ihpFlake: rtsFlags = cfg.rtsFlags; optimizationLevel = "0"; appName = cfg.appName; + filter = ihpFlake.inputs.nix-filter.lib; }; unoptimized-docker-image = pkgs.dockerTools.buildImage { From c50d6af471c5838a3d9fba5da89ca4156515fe56 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Mon, 9 Sep 2024 22:34:54 -0700 Subject: [PATCH 19/47] Speed up nix build by splitting built into multiple nix packages --- NixSupport/default.nix | 134 +++++++++++++++++++++++++---------------- 1 file changed, 83 insertions(+), 51 deletions(-) diff --git a/NixSupport/default.nix b/NixSupport/default.nix index 473cb6ce2..0b44c4fd4 100644 --- a/NixSupport/default.nix +++ b/NixSupport/default.nix @@ -25,16 +25,6 @@ let (otherDeps pkgs) ]; - appBinary = if optimized - then "build/bin/RunOptimizedProdServer" - else "build/bin/RunUnoptimizedProdServer"; - - jobsBinary = if optimized - then "build/bin/RunJobsOptimized" - else "build/bin/RunJobs"; - - odir = if optimized then "RunOptimizedProdServer" else "RunUnoptimizedProdServer"; - schemaObjectFiles = let self = projectPath; @@ -46,7 +36,7 @@ let build-generated-code export IHP=${ihp}/lib/IHP - ghc -O${if optimized then optimizationLevel else "0"} $(make print-ghc-options) --make build/Generated/Types.hs -odir build/${odir} -hidir build/${odir} + ghc -O${if optimized then optimizationLevel else "0"} $(make print-ghc-options) --make build/Generated/Types.hs -odir build/RunProdServer -hidir build/RunProdServer cp -r build $out ''; @@ -58,18 +48,85 @@ let dontInstall = true; dontFixup = false; }; + + prodGhcOptions = "-funbox-strict-fields -fconstraint-solver-iterations=100 -fdicts-strict -with-rtsopts=\"${rtsFlags}\""; + + binaries = + pkgs.stdenv.mkDerivation { + name = appName + "-binaries"; + buildPhase = '' + mkdir -p build/Generated build/RunProdServer + cp -r ${schemaObjectFiles}/RunProdServer build/ + cp -r ${schemaObjectFiles}/Generated build/ + + chmod -R +w build/RunProdServer/* + + export IHP_LIB=${ihp}/lib/IHP + export IHP=${ihp}/lib/IHP + + mkdir -p build/bin build/RunUnoptimizedProdServer + + echo ghc -O${if optimized then optimizationLevel else "0"} $(make print-ghc-options) ${if optimized then prodGhcOptions else ""} Main.hs -o build/bin/RunProdServer -odir build/RunProdServer -hidir build/RunProdServer + ghc -O${if optimized then optimizationLevel else "0"} $(make print-ghc-options) ${if optimized then prodGhcOptions else ""} Main.hs -o build/bin/RunProdServer -odir build/RunProdServer -hidir build/RunProdServer + + # Build job runner if there are any jobs + if find -type d -iwholename \*/Job|grep .; then + echo "module RunJobs (main) where" > build/RunJobs.hs + echo "import Application.Script.Prelude" >> build/RunJobs.hs + echo "import IHP.ScriptSupport" >> build/RunJobs.hs + echo "import IHP.Job.Runner" >> build/RunJobs.hs + echo "import qualified Config" >> build/RunJobs.hs + echo "import Main ()" >> build/RunJobs.hs + echo "main :: IO ()" >> build/RunJobs.hs + echo "main = runScript Config.config (runJobWorkers (workers RootApplication))" >> build/RunJobs.hs + ghc -O${if optimized then optimizationLevel else "0"} -main-is 'RunJobs.main' $(make print-ghc-options) ${if optimized then prodGhcOptions else ""} build/RunJobs.hs -o build/bin/RunJobs -odir build/RunProdServer -hidir build/RunProdServer + fi; + + # Build all scripts if there are any + mkdir -p Application/Script + SCRIPT_TARGETS=`find Application/Script -type f -iwholename '*.hs' -not -name 'Prelude.hs' -exec basename {} .hs ';' | sed 's#^#build/bin/Script/#' | tr "\n" " "` + if [[ ! -z "$SCRIPT_TARGETS" ]]; then + # Need to use -j1 here to avoid race conditions of temp files created by GHC. + # + # These errors look like: + # + # : error: + # build/RunUnoptimizedProdServer/Application/Script/Prelude.o.tmp: renameFile:renamePath:rename: does not exist (No such file or directory) + # + make -j1 $SCRIPT_TARGETS; + fi; + ''; + installPhase = '' + mkdir -p "$out" + mkdir -p $out/bin $out/lib + + mv build/bin/RunProdServer $out/bin/RunProdServer + + # Copy job runner binary to bin/ if we built it + if [ -f build/bin/RunJobs ]; then + mv build/bin/RunJobs $out/bin/RunJobs; + fi; + + # Copy IHP Script binaries to bin/ + mkdir -p build/bin/Script + find build/bin/Script/ -type f -print0 | + while read -d $'\0' script; do + script_basename=$(basename "$script") + mv "build/bin/Script/$script_basename" "$out/bin/$script_basename"; + done + ''; + dontFixup = true; + src = filter { root = pkgs.nix-gitignore.gitignoreSource [] projectPath; include = [filter.isDirectory "Makefile" (filter.matchExt "hs")]; exclude = ["static" "Frontend"]; }; + buildInputs = [allHaskellPackages]; + nativeBuildInputs = [ pkgs.makeWrapper ]; + enableParallelBuilding = true; + }; in pkgs.stdenv.mkDerivation { name = appName; buildPhase = '' runHook preBuild - mkdir -p build/Generated build/${odir} - cp -r ${schemaObjectFiles}/${odir} build/ - cp -r ${schemaObjectFiles}/Generated build/ - - chmod -R +w build/${odir}/* - # When npm install is executed by the project's makefile it will fail with: # # EACCES: permission denied, mkdir '/homeless-shelter' @@ -81,29 +138,8 @@ in export IHP_LIB=${ihp}/lib/IHP export IHP=${ihp}/lib/IHP - export APP_RTS_FLAGS="${rtsFlags}" - export OPTIMIZATION_LEVEL="${optimizationLevel}" - make -j ${appBinary} - - # Build job runner if there are any jobs - if find -type d -iwholename \*/Job|grep .; then - make -j ${jobsBinary}; - fi; - - # Build all scripts if there are any - mkdir -p Application/Script - SCRIPT_TARGETS=`find Application/Script -type f -iwholename '*.hs' -not -name 'Prelude.hs' -exec basename {} .hs ';' | sed 's#^#build/bin/Script/#' | tr "\n" " "` - if [[ ! -z "$SCRIPT_TARGETS" ]]; then - # Need to use -j1 here to avoid race conditions of temp files created by GHC. - # - # These errors look like: - # - # : error: - # build/RunUnoptimizedProdServer/Application/Script/Prelude.o.tmp: renameFile:renamePath:rename: does not exist (No such file or directory) - # - make -j1 $SCRIPT_TARGETS; - fi; + make -j static/app.css static/app.js runHook postBuild ''; @@ -113,23 +149,19 @@ in mkdir -p "$out" mkdir -p $out/bin $out/lib - mv ${appBinary} $out/bin/RunProdServerWithoutOptions - INPUT_HASH="$((basename $out) | cut -d - -f 1)" - makeWrapper $out/bin/RunProdServerWithoutOptions $out/bin/RunProdServer --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihp}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} + makeWrapper ${binaries}/bin/RunProdServer $out/bin/RunProdServer --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihp}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} # Copy job runner binary to bin/ if we built it - if [ -f ${jobsBinary} ]; then - mv ${jobsBinary} $out/bin/RunJobsWithoutOptions; - makeWrapper $out/bin/RunJobsWithoutOptions $out/bin/RunJobs --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihp}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} + if [ -f ${binaries}/bin/RunJobs ]; then + makeWrapper ${binaries}/bin/RunJobs $out/bin/RunJobs --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihp}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} fi; - # Copy IHP Script binaries to bin/ - mkdir -p build/bin/Script - find build/bin/Script/ -type f -print0 | - while read -d $'\0' script; do - script_basename=$(basename "$script") - mv "build/bin/Script/$script_basename" "$out/bin/$script_basename"; + # Copy other binaries, excluding RunProdServer and RunJobs + find ${binaries}/bin/ -type f -not -name 'RunProdServer' -not -name 'RunJobs' -print0 | + while read -d $'\0' binary; do + binary_basename=$(basename "$binary") + cp "$binary" "$out/bin/$binary_basename"; done mv static "$out/lib/static" From 6b2d51d6e9f43dd0df95b9dff188166844ce65b3 Mon Sep 17 00:00:00 2001 From: Leif Metcalf Date: Thu, 12 Sep 2024 10:11:38 +1000 Subject: [PATCH 20/47] Update your-first-project.markdown --- Guide/your-first-project.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Guide/your-first-project.markdown b/Guide/your-first-project.markdown index e33b8a126..d2fbed2ce 100644 --- a/Guide/your-first-project.markdown +++ b/Guide/your-first-project.markdown @@ -20,7 +20,7 @@ The first time you set up IHP, this command might take 10 - 15 minutes to instal If you don't already use cachix, you will be prompted to install it. You don't need it, but it is highly recommended, as it reduces build time dramatically. Learn more about cachix [here](https://cachix.org/). -While the build is running, take a look at ["What Is Nix"](https://engineering.shopify.com/blogs/engineering/what-is-nix) by Shopify to get a general understanding of how Nix works. +While the build is running, take a look at ["What Is Nix"](https://shopify.engineering/what-is-nix) by Shopify to get a general understanding of how Nix works. In case some errors appear now or in later steps: From 66518dee183d71bc5a1e1df59c1e8019298a0c0c Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Sun, 6 Oct 2024 18:06:50 +0200 Subject: [PATCH 21/47] Workaround GHC failure https://gitlab.haskell.org/ghc/ghc/-/issues/25042 We can not trigger the GHC bug by using INLINABLE instead of INLINE --- IHP/QueryBuilder.hs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/IHP/QueryBuilder.hs b/IHP/QueryBuilder.hs index fd3c8d093..f07a2e8f0 100644 --- a/IHP/QueryBuilder.hs +++ b/IHP/QueryBuilder.hs @@ -165,7 +165,7 @@ class HasQueryBuilder queryBuilderProvider joinRegister | queryBuilderProvider - injectQueryBuilder :: QueryBuilder table -> queryBuilderProvider table getQueryIndex :: queryBuilderProvider table -> Maybe ByteString getQueryIndex _ = Nothing - {-# INLINE getQueryIndex #-} + {-# INLINABLE getQueryIndex #-} -- Wrapper for QueryBuilders resulting from joins. Associates a joinRegister type. newtype JoinQueryBuilderWrapper joinRegister table = JoinQueryBuilderWrapper (QueryBuilder table) @@ -186,24 +186,24 @@ instance HasQueryBuilder QueryBuilder EmptyModelList where -- JoinQueryBuilderWrappers have query builders instance HasQueryBuilder (JoinQueryBuilderWrapper joinRegister) joinRegister where getQueryBuilder (JoinQueryBuilderWrapper queryBuilder) = queryBuilder - {-# INLINE getQueryBuilder #-} + {-# INLINABLE getQueryBuilder #-} injectQueryBuilder = JoinQueryBuilderWrapper - {-# INLINE injectQueryBuilder #-} + {-# INLINABLE injectQueryBuilder #-} -- NoJoinQueryBuilderWrapper have query builders and the join register does not allow any joins instance HasQueryBuilder NoJoinQueryBuilderWrapper NoJoins where getQueryBuilder (NoJoinQueryBuilderWrapper queryBuilder) = queryBuilder - {-# INLINE getQueryBuilder #-} + {-# INLINABLE getQueryBuilder #-} injectQueryBuilder = NoJoinQueryBuilderWrapper - {-# INLINE injectQueryBuilder #-} + {-# INLINABLE injectQueryBuilder #-} instance (KnownSymbol foreignTable, foreignModel ~ GetModelByTableName foreignTable , KnownSymbol indexColumn, HasField indexColumn foreignModel indexValue) => HasQueryBuilder (LabeledQueryBuilderWrapper foreignTable indexColumn indexValue) NoJoins where getQueryBuilder (LabeledQueryBuilderWrapper queryBuilder) = queryBuilder - {-# INLINE getQueryBuilder #-} + {-# INLINABLE getQueryBuilder #-} injectQueryBuilder = LabeledQueryBuilderWrapper - {-# INLINE injectQueryBuilder #-} + {-# INLINABLE injectQueryBuilder #-} getQueryIndex _ = Just $ symbolToByteString @foreignTable <> "." <> (Text.encodeUtf8 . fieldNameToColumnName) (symbolToText @indexColumn) - {-# INLINE getQueryIndex #-} + {-# INLINABLE getQueryIndex #-} data QueryBuilder (table :: Symbol) = From ff45e48aae73823f524e4c01b02b74f38b55a2a6 Mon Sep 17 00:00:00 2001 From: Amitai Burstein Date: Mon, 7 Oct 2024 10:18:07 +0300 Subject: [PATCH 22/47] Save only the relative path of the filename for `StaticDirStorage` (#1990) * Save only the relative path of the filename for `StaticDirStorage` fixes https://github.com/digitallyinduced/ihp/issues/1989 * Adapt createTemporaryDownloadUrlFromPathWithExpiredAt * Change slash logic * Start adding tests * Use newControllerContext * Start fix tests * More fixes * Copy from another test * Add withFrameworkConfig * Fix compile * Fix tests * Fix logic * Ignore test folders * Fix tests * Re-enable all tests * Remove line break * Import cleanups * More line breaks * Line breaks * Fix typo * Prefix objectPath with slash --- .gitignore | 3 + IHP/FileStorage/ControllerFunctions.hs | 13 +++- Test/FileStorage/ControllerFunctionsSpec.hs | 70 +++++++++++++++++++++ Test/Main.hs | 2 + 4 files changed, 85 insertions(+), 3 deletions(-) create mode 100644 Test/FileStorage/ControllerFunctionsSpec.hs diff --git a/.gitignore b/.gitignore index e8d960f41..548b8c738 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,6 @@ devenv.local.nix result* .idea + +# Test folders +static/Test.FileStorage.ControllerFunctionsSpec \ No newline at end of file diff --git a/IHP/FileStorage/ControllerFunctions.hs b/IHP/FileStorage/ControllerFunctions.hs index 864b9a74e..cd9901ea8 100644 --- a/IHP/FileStorage/ControllerFunctions.hs +++ b/IHP/FileStorage/ControllerFunctions.hs @@ -112,7 +112,8 @@ storeFileWithOptions fileInfo options = do |> LBS.writeFile (cs destPath) let frameworkConfig = ?context.frameworkConfig - pure $ frameworkConfig.baseUrl <> "/" <> objectPath + -- Prefix with a slash so it can be used in URLs, even if the baseUrl is empty. + pure $ "/" <> objectPath S3Storage { connectInfo, bucket, baseUrl } -> do let payload = fileInfo |> (.fileContent) @@ -226,7 +227,13 @@ createTemporaryDownloadUrlFromPathWithExpiredAt validInSeconds objectPath = do case storage of StaticDirStorage -> do let frameworkConfig = ?context.frameworkConfig - let url = frameworkConfig.baseUrl <> "/" <> objectPath + let urlSchemes = ["http://", "https://"] + + let url = if any (`isPrefixOf` objectPath) urlSchemes + -- BC, before we saved only the relative path of a file, we saved the full URL. So use it as is. + then objectPath + -- We have the relative path (prefixed with slash), so add the baseUrl. + else frameworkConfig.baseUrl <> objectPath pure TemporaryDownloadUrl { url = cs url, expiredAt = publicUrlExpiredAt } S3Storage { connectInfo, bucket} -> do @@ -409,4 +416,4 @@ storage = ?context.frameworkConfig.appConfig storagePrefix :: (?context :: ControllerContext) => Text storagePrefix = case storage of StaticDirStorage -> "static/" - _ -> "" \ No newline at end of file + _ -> "" diff --git a/Test/FileStorage/ControllerFunctionsSpec.hs b/Test/FileStorage/ControllerFunctionsSpec.hs new file mode 100644 index 000000000..02f6892a0 --- /dev/null +++ b/Test/FileStorage/ControllerFunctionsSpec.hs @@ -0,0 +1,70 @@ +module Test.FileStorage.ControllerFunctionsSpec where + +import Test.Hspec +import IHP.Prelude +import IHP.FileStorage.ControllerFunctions +import IHP.Controller.Context +import IHP.FrameworkConfig +import Network.Wai as Wai (defaultRequest) +import Network.Wai.Parse (FileInfo(..)) +import IHP.Controller.RequestContext +import IHP.FileStorage.Types +import IHP.FileStorage.Config + +tests :: Spec +tests = describe "IHP.FileStorage.ControllerFunctions" $ do + + let config :: ConfigBuilder + config = do + initStaticDirStorage + + let withFrameworkConfig = IHP.FrameworkConfig.withFrameworkConfig config + + describe "storeFileWithOptions" $ do + it "returns the objectPath without the baseUrl" $ do + withFrameworkConfig \frameworkConfig -> do + context <- createControllerContext frameworkConfig + let ?context = context + + let fileInfo = FileInfo + { fileName = "test.txt" + , fileContentType = "text/plain" + , fileContent = "Hello, world!" + } + + -- We pass the UUID that will be used as the filename, so we can easily assert the objectPath. + let options :: StoreFileOptions = def + { fileName = Just "4c55dac2-e411-45ac-aa10-b957b01221df" + , directory = "Test.FileStorage.ControllerFunctionsSpec" + } + + result <- storeFileWithOptions fileInfo options + + result.url `shouldBe` ("/Test.FileStorage.ControllerFunctionsSpec/4c55dac2-e411-45ac-aa10-b957b01221df") + + describe "createTemporaryDownloadUrlFromPath" $ do + it "returns baseUrl concatenated with objectPath when objectPath does not start with http:// or https://" $ do + withFrameworkConfig \frameworkConfig -> do + context <- createControllerContext frameworkConfig + let ?context = context + let objectPath = "/static/test.txt" + temporaryDownloadUrl <- createTemporaryDownloadUrlFromPath objectPath + + temporaryDownloadUrl.url `shouldBe` "http://localhost:8000/static/test.txt" + + it "returns the objectPath when objectPath starts with 'http://' or 'https://'" $ do + withFrameworkConfig \frameworkConfig -> do + context <- createControllerContext frameworkConfig + let ?context = context + let objectPath = "https://example.com/static/test.txt" + temporaryDownloadUrl <- createTemporaryDownloadUrlFromPath objectPath + + temporaryDownloadUrl.url `shouldBe` "https://example.com/static/test.txt" + +createControllerContext frameworkConfig = do + let + requestBody = FormBody { params = [], files = [] } + request = Wai.defaultRequest + requestContext = RequestContext { request, respond = error "respond", requestBody, frameworkConfig = frameworkConfig } + let ?requestContext = requestContext + newControllerContext diff --git a/Test/Main.hs b/Test/Main.hs index 4a26b94cd..7a03cfb2c 100644 --- a/Test/Main.hs +++ b/Test/Main.hs @@ -44,6 +44,7 @@ import qualified Test.RouterSupportSpec import qualified Test.ViewSupportSpec import qualified Test.ServerSideComponent.HtmlParserSpec import qualified Test.ServerSideComponent.HtmlDiffSpec +import qualified Test.FileStorage.ControllerFunctionsSpec import qualified Test.FileStorage.MimeTypesSpec import qualified Test.DataSync.DynamicQueryCompiler import qualified Test.IDE.CodeGeneration.MigrationGenerator @@ -78,6 +79,7 @@ main = hspec do Test.ViewSupportSpec.tests Test.ServerSideComponent.HtmlParserSpec.tests Test.ServerSideComponent.HtmlDiffSpec.tests + Test.FileStorage.ControllerFunctionsSpec.tests Test.FileStorage.MimeTypesSpec.tests Test.DataSync.DynamicQueryCompiler.tests Test.IDE.SchemaDesigner.SchemaOperationsSpec.tests From bbd3107dea50323d73ee6c79f1ff3cdaf4a35b65 Mon Sep 17 00:00:00 2001 From: Liam McDermott Date: Wed, 9 Oct 2024 15:07:38 -0400 Subject: [PATCH 23/47] Fix incorrectly referenced variable in View generator. --- ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs b/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs index 7ffe6d24c..72c06b81e 100644 --- a/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs +++ b/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs @@ -80,7 +80,7 @@ buildPlan' schema config = genericView = [trimming| ${viewHeader} - data ${nameWithSuffix} = {$nameWithSuffix} + data ${nameWithSuffix} = ${nameWithSuffix} instance View ${nameWithSuffix} where html ${nameWithSuffix} { .. } = [hsx| From e02a4e38e2475329ec40413d80becb464811b8a1 Mon Sep 17 00:00:00 2001 From: Liam McDermott Date: Wed, 9 Oct 2024 15:22:41 -0400 Subject: [PATCH 24/47] Fix missing brackets in View generator. --- ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs b/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs index 72c06b81e..ae3e19765 100644 --- a/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs +++ b/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs @@ -80,7 +80,7 @@ buildPlan' schema config = genericView = [trimming| ${viewHeader} - data ${nameWithSuffix} = ${nameWithSuffix} + data ${nameWithSuffix} = {${nameWithSuffix}} instance View ${nameWithSuffix} where html ${nameWithSuffix} { .. } = [hsx| From db98a1f2279a9e56bc249504f3abb86fe0e0bf82 Mon Sep 17 00:00:00 2001 From: Liam McDermott Date: Wed, 9 Oct 2024 16:29:14 -0400 Subject: [PATCH 25/47] Add `tableNameToViewName` and make view generator use it. --- IHP/NameSupport.hs | 12 ++++++++++++ Test/NameSupportSpec.hs | 11 +++++++++++ ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs | 5 +++-- 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/IHP/NameSupport.hs b/IHP/NameSupport.hs index 2d9c0ef73..e4e9e9c22 100644 --- a/IHP/NameSupport.hs +++ b/IHP/NameSupport.hs @@ -13,6 +13,7 @@ module IHP.NameSupport , fieldNameToColumnName , escapeHaskellKeyword , tableNameToControllerName +, tableNameToViewName , enumValueToControllerName , toSlug , module IHP.NameSupport.Inflections @@ -67,6 +68,17 @@ tableNameToControllerName tableName = do else ucfirst tableName {-# INLINABLE tableNameToControllerName #-} +-- | Transforms an underscore table name to a name for a view +-- +-- >>> tableNameToViewName "users" +-- +-- >>> tableNameToViewName "projects" +-- +-- >>> tableNameToViewName "user_projects" +tableNameToViewName :: Text -> Text +tableNameToViewName = tableNameToControllerName +{-# INLINABLE tableNameToViewName #-} + -- | Transforms a enum value to a name for a model -- -- >>> enumValueToControllerName "happy" diff --git a/Test/NameSupportSpec.hs b/Test/NameSupportSpec.hs index d7036c3ee..8216c5a79 100644 --- a/Test/NameSupportSpec.hs +++ b/Test/NameSupportSpec.hs @@ -33,6 +33,17 @@ tests = do tableNameToControllerName "users_projects" `shouldBe` "UsersProjects" tableNameToControllerName "people" `shouldBe` "People" + describe "tableNameToViewName" do + it "should deal with empty input" do + tableNameToViewName "" `shouldBe` "" + + it "should transform table names to controller names" do + tableNameToViewName "users" `shouldBe` "Users" + tableNameToViewName "projects" `shouldBe` "Projects" + tableNameToViewName "user_projects" `shouldBe` "UserProjects" + tableNameToViewName "users_projects" `shouldBe` "UsersProjects" + tableNameToViewName "people" `shouldBe` "People" + describe "enumValueToControllerName" do it "should handle spaces in table names" do enumValueToControllerName "very happy" `shouldBe` "VeryHappy" diff --git a/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs b/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs index ae3e19765..dd32eb8e1 100644 --- a/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs +++ b/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs @@ -15,8 +15,8 @@ data ViewConfig = ViewConfig } deriving (Eq, Show) buildPlan :: Text -> Text -> Text -> IO (Either Text [GeneratorAction]) -buildPlan viewName applicationName controllerName' = - if (null viewName || null controllerName') +buildPlan viewName' applicationName controllerName' = + if (null viewName' || null controllerName') then pure $ Left "Neither view name nor controller name can be empty" else do schema <- SchemaDesigner.parseSchemaSql >>= \case @@ -24,6 +24,7 @@ buildPlan viewName applicationName controllerName' = Right statements -> pure statements let modelName = tableNameToModelName controllerName' let controllerName = tableNameToControllerName controllerName' + let viewName = tableNameToViewName viewName' let paginationEnabled = False let viewConfig = ViewConfig { .. } pure $ Right $ buildPlan' schema viewConfig From 84f2cbdda9fb23a5c15c0d108f318cfa66733413 Mon Sep 17 00:00:00 2001 From: Liam McDermott Date: Wed, 9 Oct 2024 17:03:57 -0400 Subject: [PATCH 26/47] Fix view generator tests. - Tests were enforcing incorrect Haskell code generation for `genericView` - Update tests to use `tableNameToViewName` like controller tests. - Add tests view names with underscores and camel case. --- Test/IDE/CodeGeneration/ViewGenerator.hs | 43 +++++++++++++++++++++--- ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs | 2 +- 2 files changed, 40 insertions(+), 5 deletions(-) diff --git a/Test/IDE/CodeGeneration/ViewGenerator.hs b/Test/IDE/CodeGeneration/ViewGenerator.hs index c229172f9..4c96bbdfd 100644 --- a/Test/IDE/CodeGeneration/ViewGenerator.hs +++ b/Test/IDE/CodeGeneration/ViewGenerator.hs @@ -35,7 +35,8 @@ tests = do } ] it "should build a view with name \"EditView\"" do - let viewName = "EditView" + let rawViewName = "EditView" + let viewName = tableNameToViewName rawViewName let rawControllerName = "Pages" let controllerName = tableNameToControllerName rawControllerName let modelName = tableNameToModelName rawControllerName @@ -50,8 +51,41 @@ tests = do + it "should build a view with name \"edit_view\"" do + let rawViewName = "edit_view" + let viewName = tableNameToViewName rawViewName + let rawControllerName = "Pages" + let controllerName = tableNameToControllerName rawControllerName + let modelName = tableNameToModelName rawControllerName + let applicationName = "Web" + let paginationEnabled = False + let config = ViewGenerator.ViewConfig { .. } + let builtPlan = ViewGenerator.buildPlan' schema config + + builtPlan `shouldBe` + [ EnsureDirectory {directory = "Web/View/Pages"},CreateFile {filePath = "Web/View/Pages/Edit.hs", fileContent = "module Web.View.Pages.Edit where\nimport Web.View.Prelude\n\ndata EditView = EditView { page :: Page }\n\ninstance View EditView where\n html EditView { .. } = [hsx|\n {breadcrumb}\n

Edit Page

\n {renderForm page}\n |]\n where\n breadcrumb = renderBreadcrumb\n [ breadcrumbLink \"Pages\" PagesAction\n , breadcrumbText \"Edit Page\"\n ]\n\nrenderForm :: Page -> Html\nrenderForm page = formFor page [hsx|\n \n {submitButton}\n\n|]"},AddImport {filePath = "Web/Controller/Pages.hs", fileContent = "import Web.View.Pages.Edit"} + ] + + + it "should build a view with name \"editView\"" do + let rawViewName = "editView" + let viewName = tableNameToViewName rawViewName + let rawControllerName = "Pages" + let controllerName = tableNameToControllerName rawControllerName + let modelName = tableNameToModelName rawControllerName + let applicationName = "Web" + let paginationEnabled = False + let config = ViewGenerator.ViewConfig { .. } + let builtPlan = ViewGenerator.buildPlan' schema config + + builtPlan `shouldBe` + [ EnsureDirectory {directory = "Web/View/Pages"},CreateFile {filePath = "Web/View/Pages/Edit.hs", fileContent = "module Web.View.Pages.Edit where\nimport Web.View.Prelude\n\ndata EditView = EditView { page :: Page }\n\ninstance View EditView where\n html EditView { .. } = [hsx|\n {breadcrumb}\n

Edit Page

\n {renderForm page}\n |]\n where\n breadcrumb = renderBreadcrumb\n [ breadcrumbLink \"Pages\" PagesAction\n , breadcrumbText \"Edit Page\"\n ]\n\nrenderForm :: Page -> Html\nrenderForm page = formFor page [hsx|\n \n {submitButton}\n\n|]"},AddImport {filePath = "Web/Controller/Pages.hs", fileContent = "import Web.View.Pages.Edit"} + ] + + it "should build a view with name \"Edit\"" do - let viewName = "Edit" + let rawViewName = "Edit" + let viewName = tableNameToViewName rawViewName let rawControllerName = "Pages" let controllerName = tableNameToControllerName rawControllerName let modelName = tableNameToModelName rawControllerName @@ -66,7 +100,8 @@ tests = do it "should build a view with name \"Test\"" do - let viewName = "Test" + let rawViewName = "Test" + let viewName = tableNameToViewName rawViewName let rawControllerName = "Pages" let controllerName = tableNameToControllerName rawControllerName let modelName = tableNameToModelName rawControllerName @@ -76,5 +111,5 @@ tests = do let builtPlan = ViewGenerator.buildPlan' schema config builtPlan `shouldBe` - [ EnsureDirectory {directory = "Web/View/Pages"},CreateFile {filePath = "Web/View/Pages/Test.hs", fileContent = "module Web.View.Pages.Test where\nimport Web.View.Prelude\ndata TestView = {TestView}\n\ninstance View TestView where\n html TestView { .. } = [hsx|\n {breadcrumb}\n

TestView

\n |]\n where\n breadcrumb = renderBreadcrumb\n [ breadcrumbLink \"Tests\" PagesAction\n , breadcrumbText \"TestView\"\n ]"},AddImport {filePath = "Web/Controller/Pages.hs", fileContent = "import Web.View.Pages.Test"} + [ EnsureDirectory {directory = "Web/View/Pages"},CreateFile {filePath = "Web/View/Pages/Test.hs", fileContent = "module Web.View.Pages.Test where\nimport Web.View.Prelude\ndata TestView = TestView\n\ninstance View TestView where\n html TestView { .. } = [hsx|\n {breadcrumb}\n

TestView

\n |]\n where\n breadcrumb = renderBreadcrumb\n [ breadcrumbLink \"Tests\" PagesAction\n , breadcrumbText \"TestView\"\n ]"},AddImport {filePath = "Web/Controller/Pages.hs", fileContent = "import Web.View.Pages.Test"} ] \ No newline at end of file diff --git a/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs b/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs index dd32eb8e1..3cecdae59 100644 --- a/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs +++ b/ihp-ide/IHP/IDE/CodeGen/ViewGenerator.hs @@ -81,7 +81,7 @@ buildPlan' schema config = genericView = [trimming| ${viewHeader} - data ${nameWithSuffix} = {${nameWithSuffix}} + data ${nameWithSuffix} = ${nameWithSuffix} instance View ${nameWithSuffix} where html ${nameWithSuffix} { .. } = [hsx| From a5bcb8364971af693c6f3dd1bb98471113d69bd4 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Fri, 11 Oct 2024 19:36:01 +0200 Subject: [PATCH 27/47] Reallow custom postgres extensions like postgis in dev mode --- Guide/package-management.markdown | 60 +++++++++++++++++-------------- NixSupport/default.nix | 3 -- flake-module.nix | 18 +++++++--- 3 files changed, 47 insertions(+), 34 deletions(-) diff --git a/Guide/package-management.markdown b/Guide/package-management.markdown index 3d781b355..0c29b701e 100644 --- a/Guide/package-management.markdown +++ b/Guide/package-management.markdown @@ -371,47 +371,53 @@ After that try to run `devenv up`. ### Building Postgres With Extensions -**TODO: Fix this for IHP v1.1.0** - For some applications you may want to install custom postgres extension libraries and have them available in the nix store. For example to enable the [postgis](https://postgis.net/) spatial and geographic objects in PostgreSQL add -`postgresExtensions = (p: [ p.postgis ]);` to your project's `default.nix` file as -an attribute of the `"{ihp}/NixSupport/default.nix"` expression. +`services.postgres.extensions = extensions: [ extensions.postgis ];` to your project's `flake.nix`: ```nix -let - ihp = builtins.fetchGit { - url = "https://github.com/digitallyinduced/ihp.git"; - rev = "c6d40612697bb7905802f23b7753702d33b9e2c1"; - }; - haskellEnv = import "${ihp}/NixSupport/default.nix" { - ihp = ihp; - postgresExtensions = (p: [ p.postgis ]); - haskellDeps = p: with p; [ - cabal-install - base - wai - text - hlint - p.ihp - google-oauth2 - ]; - otherDeps = p: with p; [ - ]; - projectPath = ./.; +{ + inputs = { + ihp.url = "path:///Users/marc/digitallyinduced/ihp"; + ihp.inputs.nixpkgs.url = "github:mpscholten/nixpkgs/fix-th-desugar"; + nixpkgs.follows = "ihp/nixpkgs"; + flake-parts.follows = "ihp/flake-parts"; + devenv.follows = "ihp/devenv"; + systems.follows = "ihp/systems"; }; -in - haskellEnv + + outputs = inputs@{ ihp, flake-parts, systems, nixpkgs, ... }: + flake-parts.lib.mkFlake { inherit inputs; } { + systems = import systems; + imports = [ ihp.flakeModules.default ]; + + perSystem = { pkgs, ... }: { + ihp = { + inherit appName; + enable = true; + projectPath = ./.; + packages = with pkgs; []; + haskellPackages = p: with p; [ + # ... + ]; + }; + devenv.shells.default = { + services.postgres.extensions = extensions: [ extensions.postgis ]; + }; + }; + }; +} + ``` Behind the scenes this will pass a function to the postgres nix expressions `postgresql.withPackages` function making the extension in your app's nix store postgres package. -After the install you can run `create extension postgis;` to enable all the features of the +After the install you can run `CREATE EXTENSION postgis;` to enable all the features of the installed extension. ### Stopping Nix From Running Tests for a Haskell Dependency diff --git a/NixSupport/default.nix b/NixSupport/default.nix index 0b44c4fd4..b754aeaea 100644 --- a/NixSupport/default.nix +++ b/NixSupport/default.nix @@ -7,9 +7,7 @@ , otherDeps ? (p: []) , projectPath ? ./. , withHoogle ? false -, postgresExtensions ? (p: []) , optimized ? false -, includeDevTools ? !optimized # Include Postgres? , rtsFlags ? "" , appName ? "app" , optimizationLevel ? "2" @@ -175,7 +173,6 @@ in [ pkgs.makeWrapper pkgs.cacert # Needed for npm install to work from within the IHP build process ] - (if includeDevTools then [(pkgs.postgresql_13.withPackages postgresExtensions)] else []) ]; shellHook = "eval $(egrep ^export ${allHaskellPackages}/bin/ghc)"; enableParallelBuilding = true; diff --git a/flake-module.nix b/flake-module.nix index 9748e195d..a9e54de7d 100644 --- a/flake-module.nix +++ b/flake-module.nix @@ -139,8 +139,6 @@ ihpFlake: haskellDeps = cfg.haskellPackages; otherDeps = p: cfg.packages; projectPath = cfg.projectPath; - # Dev tools are not needed in the release build - includeDevTools = false; # Set optimized = true to get more optimized binaries, but slower build times optimized = true; ghc = ghcCompiler; @@ -156,7 +154,6 @@ ihpFlake: haskellDeps = cfg.haskellPackages; otherDeps = p: cfg.packages; projectPath = cfg.projectPath; - includeDevTools = false; optimized = false; ghc = ghcCompiler; pkgs = pkgs; @@ -204,9 +201,21 @@ ihpFlake: }; devenv.shells.default = lib.mkIf cfg.enable { - packages = [ ghcCompiler.ihp ghcCompiler.ihp-ide pkgs.postgresql_13 pkgs.gnumake ] + packages = [ ghcCompiler.ihp ghcCompiler.ihp-ide pkgs.gnumake ] ++ cfg.packages ++ [pkgs.mktemp] # Without this 'make build/bin/RunUnoptimizedProdServer' fails on macOS + ++ [(let cfg = config.devenv.shells.default.services.postgres; in + if cfg.extensions != null + then + if builtins.hasAttr "withPackages" cfg.package + then cfg.package.withPackages cfg.extensions + else + builtins.throw '' + Cannot add extensions to the PostgreSQL package. + `services.postgres.package` is missing the `withPackages` attribute. Did you already add extensions to the package? + '' + else cfg.package + )] ; /* @@ -239,6 +248,7 @@ ihpFlake: # As the devenv postgres uses a different location for the socket # this would break lots of known commands such as `make db` services.postgres.enable = false; + services.postgres.package = pkgs.postgresql_13; services.postgres.initialDatabases = [ { name = "app"; From c37be20011ded07120f4ee6efd75175ee52f7438 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Thu, 24 Oct 2024 12:11:48 +0200 Subject: [PATCH 28/47] DataSync: fixed crash when the table name has a space --- IHP/DataSync/ChangeNotifications.hs | 14 +++++++------- IHP/DataSync/RowLevelSecurity.hs | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/IHP/DataSync/ChangeNotifications.hs b/IHP/DataSync/ChangeNotifications.hs index f492248a8..a8ad25d38 100644 --- a/IHP/DataSync/ChangeNotifications.hs +++ b/IHP/DataSync/ChangeNotifications.hs @@ -41,7 +41,7 @@ createNotificationFunction :: RLS.TableWithRLS -> PG.Query createNotificationFunction table = [i| DO $$ BEGIN - CREATE FUNCTION #{functionName}() RETURNS TRIGGER AS $BODY$ + CREATE FUNCTION "#{functionName}"() RETURNS TRIGGER AS $BODY$ DECLARE payload TEXT; large_pg_notification_id UUID; @@ -86,14 +86,14 @@ createNotificationFunction table = [i| RETURN new; END; $BODY$ language plpgsql; - DROP TRIGGER IF EXISTS #{insertTriggerName} ON #{tableName}; - DROP TRIGGER IF EXISTS #{updateTriggerName} ON #{tableName}; - DROP TRIGGER IF EXISTS #{deleteTriggerName} ON #{tableName}; + DROP TRIGGER IF EXISTS "#{insertTriggerName}" ON "#{tableName}"; + DROP TRIGGER IF EXISTS "#{updateTriggerName}" ON "#{tableName}"; + DROP TRIGGER IF EXISTS "#{deleteTriggerName}" ON "#{tableName}"; - CREATE TRIGGER #{insertTriggerName} AFTER INSERT ON "#{tableName}" FOR EACH ROW EXECUTE PROCEDURE #{functionName}(); - CREATE TRIGGER #{updateTriggerName} AFTER UPDATE ON "#{tableName}" FOR EACH ROW EXECUTE PROCEDURE #{functionName}(); - CREATE TRIGGER #{deleteTriggerName} AFTER DELETE ON "#{tableName}" FOR EACH ROW EXECUTE PROCEDURE #{functionName}(); + CREATE TRIGGER "#{insertTriggerName}" AFTER INSERT ON "#{tableName}" FOR EACH ROW EXECUTE PROCEDURE "#{functionName}"(); + CREATE TRIGGER "#{updateTriggerName}" AFTER UPDATE ON "#{tableName}" FOR EACH ROW EXECUTE PROCEDURE "#{functionName}"(); + CREATE TRIGGER "#{deleteTriggerName}" AFTER DELETE ON "#{tableName}" FOR EACH ROW EXECUTE PROCEDURE "#{functionName}"(); EXCEPTION WHEN duplicate_function THEN null; diff --git a/IHP/DataSync/RowLevelSecurity.hs b/IHP/DataSync/RowLevelSecurity.hs index d26d63612..42c8a2835 100644 --- a/IHP/DataSync/RowLevelSecurity.hs +++ b/IHP/DataSync/RowLevelSecurity.hs @@ -128,7 +128,7 @@ makeCachedEnsureRLSEnabled = do -- >>> hasRLSEnabled "my_table" -- True hasRLSEnabled :: (?modelContext :: ModelContext) => Text -> IO Bool -hasRLSEnabled table = sqlQueryScalar "SELECT relrowsecurity FROM pg_class WHERE oid = ?::regclass" [table] +hasRLSEnabled table = sqlQueryScalar "SELECT relrowsecurity FROM pg_class WHERE oid = quote_ident(?)::regclass" [table] -- | Can be constructed using 'ensureRLSEnabled' -- From 9bbe416854e6490133ea3af33b69342c840d23bd Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Fri, 25 Oct 2024 15:39:06 +0200 Subject: [PATCH 29/47] DataSync: Extracted column mapping Will be used soon for allowing custom column mapping functions --- IHP/DataSync/DynamicQueryCompiler.hs | 36 ++++++++++++++++++++++++---- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/IHP/DataSync/DynamicQueryCompiler.hs b/IHP/DataSync/DynamicQueryCompiler.hs index db50078ea..68e40f681 100644 --- a/IHP/DataSync/DynamicQueryCompiler.hs +++ b/IHP/DataSync/DynamicQueryCompiler.hs @@ -14,7 +14,10 @@ import qualified Database.PostgreSQL.Simple.Types as PG import qualified Data.List as List compileQuery :: DynamicSQLQuery -> (PG.Query, [PG.Action]) -compileQuery DynamicSQLQuery { .. } = (sql, args) +compileQuery query = compileQueryMapped (mapColumnNames fieldNameToColumnName query) + +compileQueryMapped :: DynamicSQLQuery -> (PG.Query, [PG.Action]) +compileQueryMapped DynamicSQLQuery { .. } = (sql, args) where sql = "SELECT" <> distinctOnSql <> "? FROM ?" <> whereSql <> orderBySql <> limitSql <> offsetSql args = distinctOnArgs @@ -28,7 +31,7 @@ compileQuery DynamicSQLQuery { .. } = (sql, args) <> offsetArgs (distinctOnSql, distinctOnArgs) = case distinctOnColumn of - Just column -> (" DISTINCT ON (?) ", [PG.toField $ PG.Identifier (fieldNameToColumnName $ cs column)]) + Just column -> (" DISTINCT ON (?) ", [PG.toField $ PG.Identifier (cs column)]) Nothing -> (" ", []) (orderBySql, orderByArgs) = case orderByClause of @@ -38,13 +41,13 @@ compileQuery DynamicSQLQuery { .. } = (sql, args) , orderByClauses |> map (\case OrderByClause { orderByColumn, orderByDirection } -> - [ PG.toField $ PG.Identifier (fieldNameToColumnName $ cs orderByColumn) + [ PG.toField $ PG.Identifier (cs orderByColumn) , PG.toField $ if orderByDirection == QueryBuilder.Desc then PG.Plain "DESC" else PG.Plain "" ] OrderByTSRank { tsvector, tsquery } -> - [ PG.toField $ PG.Identifier (fieldNameToColumnName tsvector) + [ PG.toField $ PG.Identifier tsvector , PG.toField tsquery ] ) @@ -63,6 +66,29 @@ compileQuery DynamicSQLQuery { .. } = (sql, args) Just offset -> (" OFFSET ?", [PG.toField offset]) Nothing -> ("", []) +-- | Used to transform column names from @camelCase@ to @snake_case@ +mapColumnNames :: (Text -> Text) -> DynamicSQLQuery -> DynamicSQLQuery +mapColumnNames rename query = + query + { selectedColumns = mapSelectedColumns query.selectedColumns + , whereCondition = mapConditionExpression <$> query.whereCondition + , orderByClause = map mapOrderByClause query.orderByClause + , distinctOnColumn = (cs . rename . cs) <$> query.distinctOnColumn + } + where + mapSelectedColumns :: SelectedColumns -> SelectedColumns + mapSelectedColumns SelectAll = SelectAll + mapSelectedColumns (SelectSpecific columns) = SelectSpecific (map rename columns) + + mapConditionExpression :: ConditionExpression -> ConditionExpression + mapConditionExpression ColumnExpression { field } = ColumnExpression { field = rename field } + mapConditionExpression InfixOperatorExpression { left, op, right } = InfixOperatorExpression { left = mapConditionExpression left, op, right = mapConditionExpression right } + mapConditionExpression otherwise = otherwise + + mapOrderByClause :: OrderByClause -> OrderByClause + mapOrderByClause OrderByClause { orderByColumn, orderByDirection } = OrderByClause { orderByColumn = cs (rename (cs orderByColumn)), orderByDirection } + mapOrderByClause otherwise = otherwise + compileOrderByClause :: OrderByClause -> Text compileOrderByClause OrderByClause {} = "? ?" compileOrderByClause OrderByTSRank { tsvector, tsquery } = "ts_rank(?, to_tsquery('english', ?))" @@ -79,7 +105,7 @@ compileSelectedColumns (SelectSpecific fields) = PG.Many args -- TODO: validate query against schema compileCondition :: ConditionExpression -> (PG.Query, [PG.Action]) -compileCondition (ColumnExpression column) = ("?", [PG.toField $ PG.Identifier (fieldNameToColumnName column)]) +compileCondition (ColumnExpression column) = ("?", [PG.toField $ PG.Identifier column]) compileCondition (InfixOperatorExpression a OpEqual (LiteralExpression Null)) = compileCondition (InfixOperatorExpression a OpIs (LiteralExpression Null)) -- Turn 'a = NULL' into 'a IS NULL' compileCondition (InfixOperatorExpression a OpNotEqual (LiteralExpression Null)) = compileCondition (InfixOperatorExpression a OpIsNot (LiteralExpression Null)) -- Turn 'a <> NULL' into 'a IS NOT NULL' compileCondition (InfixOperatorExpression a OpIn (ListExpression { values })) | (Null `List.elem` values) = From 96d41a5d9f68aef9514bd8950e7c91ff4787db03 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Sat, 26 Oct 2024 11:12:34 +0200 Subject: [PATCH 30/47] DataSync: support column names that are not in snake_case --- IHP/DataSync/Controller.hs | 3 +- IHP/DataSync/ControllerImpl.hs | 55 +++++++++++++++++----------- IHP/DataSync/DynamicQuery.hs | 2 +- IHP/DataSync/DynamicQueryCompiler.hs | 34 ++++++++++++++++- 4 files changed, 70 insertions(+), 24 deletions(-) diff --git a/IHP/DataSync/Controller.hs b/IHP/DataSync/Controller.hs index 6cd6daeff..85790b5fb 100644 --- a/IHP/DataSync/Controller.hs +++ b/IHP/DataSync/Controller.hs @@ -8,6 +8,7 @@ import IHP.DataSync.RowLevelSecurity import qualified Database.PostgreSQL.Simple.ToField as PG import qualified IHP.DataSync.ChangeNotifications as ChangeNotifications import IHP.DataSync.ControllerImpl (runDataSyncController, cleanupAllSubscriptions) +import IHP.DataSync.DynamicQueryCompiler (camelCaseRenamer) instance ( PG.ToField (PrimaryKey (GetTableName CurrentUserRecord)) @@ -21,5 +22,5 @@ instance ( run = do ensureRLSEnabled <- makeCachedEnsureRLSEnabled installTableChangeTriggers <- ChangeNotifications.makeCachedInstallTableChangeTriggers - runDataSyncController ensureRLSEnabled installTableChangeTriggers (receiveData @ByteString) sendJSON (\_ _ -> pure ()) + runDataSyncController ensureRLSEnabled installTableChangeTriggers (receiveData @ByteString) sendJSON (\_ _ -> pure ()) (\_ -> camelCaseRenamer) onClose = cleanupAllSubscriptions diff --git a/IHP/DataSync/ControllerImpl.hs b/IHP/DataSync/ControllerImpl.hs index 431953432..f4a7480e5 100644 --- a/IHP/DataSync/ControllerImpl.hs +++ b/IHP/DataSync/ControllerImpl.hs @@ -45,11 +45,11 @@ runDataSyncController :: , Typeable CurrentUserRecord , HasNewSessionUrl CurrentUserRecord , Show (PrimaryKey (GetTableName CurrentUserRecord)) - ) => EnsureRLSEnabledFn -> InstallTableChangeTriggerFn -> IO ByteString -> SendJSONFn -> HandleCustomMessageFn -> IO () -runDataSyncController ensureRLSEnabled installTableChangeTriggers receiveData sendJSON handleCustomMessage = do + ) => EnsureRLSEnabledFn -> InstallTableChangeTriggerFn -> IO ByteString -> SendJSONFn -> HandleCustomMessageFn -> (Text -> Renamer) -> IO () +runDataSyncController ensureRLSEnabled installTableChangeTriggers receiveData sendJSON handleCustomMessage renamer = do setState DataSyncReady { subscriptions = HashMap.empty, transactions = HashMap.empty, asyncs = [] } - let handleMessage = buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleCustomMessage + let handleMessage :: DataSyncMessage -> IO () = buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleCustomMessage renamer forever do message <- Aeson.eitherDecodeStrict' <$> receiveData @@ -91,17 +91,18 @@ buildMessageHandler :: , HasNewSessionUrl CurrentUserRecord , Show (PrimaryKey (GetTableName CurrentUserRecord)) ) - => EnsureRLSEnabledFn -> InstallTableChangeTriggerFn -> SendJSONFn -> HandleCustomMessageFn -> (DataSyncMessage -> IO ()) -buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleCustomMessage = handleMessage + => EnsureRLSEnabledFn -> InstallTableChangeTriggerFn -> SendJSONFn -> HandleCustomMessageFn -> (Text -> Renamer) -> (DataSyncMessage -> IO ()) +buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleCustomMessage renamer = handleMessage where pgListener = ?applicationContext.pgListener handleMessage :: DataSyncMessage -> IO () handleMessage DataSyncQuery { query, requestId, transactionId } = do ensureRLSEnabled (query.table) - let (theQuery, theParams) = compileQuery query + let (theQuery, theParams) = compileQueryWithRenamer (renamer query.table) query - result :: [[Field]] <- sqlQueryWithRLSAndTransactionId transactionId theQuery theParams + rawResult :: [[Field]] <- sqlQueryWithRLSAndTransactionId transactionId theQuery theParams + let result = map (map (renameField (renamer query.table))) rawResult sendJSON DataSyncResult { result, requestId } @@ -118,16 +119,17 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC close <- MVar.newEmptyMVar atomicModifyIORef'' ?state (\state -> state |> modify #subscriptions (HashMap.insert subscriptionId close)) - let (theQuery, theParams) = compileQuery query + let (theQuery, theParams) = compileQueryWithRenamer (renamer query.table) query - result :: [[Field]] <- sqlQueryWithRLS theQuery theParams + rawResult :: [[Field]] <- sqlQueryWithRLS theQuery theParams + let result = map (map (renameField (renamer query.table))) rawResult let tableName = query.table -- We need to keep track of all the ids of entities we're watching to make -- sure that we only send update notifications to clients that can actually -- access the record (e.g. if a RLS policy denies access) - let watchedRecordIds = recordIds result + let watchedRecordIds = recordIds rawResult -- Store it in IORef as an INSERT requires us to add an id watchedRecordIdsRef <- newIORef (Set.fromList watchedRecordIds) @@ -149,11 +151,12 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC newRecord :: [[Field]] <- sqlQueryWithRLS ("SELECT * FROM (" <> theQuery <> ") AS records WHERE records.id = ? LIMIT 1") (theParams <> [PG.toField id]) case headMay newRecord of - Just record -> do + Just rawRecord -> do -- Add the new record to 'watchedRecordIdsRef' -- Otherwise the updates and deletes will not be dispatched to the client modifyIORef' watchedRecordIdsRef (Set.insert id) + let record = map (renameField (renamer tableName)) rawRecord sendJSON DidInsert { subscriptionId, record } Nothing -> pure () ChangeNotifications.DidUpdate { id, changeSet } -> do @@ -167,7 +170,7 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC changes <- ChangeNotifications.retrieveChanges changeSet if isRecordInResultSet - then sendJSON DidUpdate { subscriptionId, id, changeSet = changesToValue changes } + then sendJSON DidUpdate { subscriptionId, id, changeSet = changesToValue (renamer tableName) changes } else sendJSON DidDelete { subscriptionId, id } ChangeNotifications.DidDelete { id } -> do -- Only send the notifcation if the deleted record was part of the initial @@ -202,7 +205,7 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC let query = "INSERT INTO ? ? VALUES ? RETURNING *" let columns = record |> HashMap.keys - |> map fieldNameToColumnName + |> map (renamer table).fieldToColumn let values = record |> HashMap.elems @@ -213,7 +216,11 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC result :: [[Field]] <- sqlQueryWithRLSAndTransactionId transactionId query params case result of - [record] -> sendJSON DidCreateRecord { requestId, record } + [rawRecord] -> + let + record = map (renameField (renamer table)) rawRecord + in + sendJSON DidCreateRecord { requestId, record } otherwise -> error "Unexpected result in CreateRecordMessage handler" pure () @@ -228,7 +235,7 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC Just value -> value Nothing -> error "Atleast one record is required" |> HashMap.keys - |> map fieldNameToColumnName + |> map (renamer table).fieldToColumn let values = records |> map (\object -> @@ -240,7 +247,8 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC let params = (PG.Identifier table, PG.In (map PG.Identifier columns), PG.Values [] values) - records :: [[Field]] <- sqlQueryWithRLSAndTransactionId transactionId query params + rawRecords :: [[Field]] <- sqlQueryWithRLSAndTransactionId transactionId query params + let records = map (map (renameField (renamer table))) rawRecords sendJSON DidCreateRecords { requestId, records } @@ -272,7 +280,11 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC result :: [[Field]] <- sqlQueryWithRLSAndTransactionId transactionId (PG.Query query) params case result of - [record] -> sendJSON DidUpdateRecord { requestId, record } + [rawRecord] -> + let + record = map (renameField (renamer table)) rawRecord + in + sendJSON DidUpdateRecord { requestId, record } otherwise -> error "Could not apply the update to the given record. Are you sure the record ID you passed is correct? If the record ID is correct, likely the row level security policy is not making the record visible to the UPDATE operation." pure () @@ -300,7 +312,8 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC <> (join (map (\(key, value) -> [PG.toField key, value]) keyValues)) <> [PG.toField (PG.In ids)] - records <- sqlQueryWithRLSAndTransactionId transactionId (PG.Query query) params + rawRecords <- sqlQueryWithRLSAndTransactionId transactionId (PG.Query query) params + let records = map (map (renameField (renamer table))) rawRecords sendJSON DidUpdateRecords { requestId, records } @@ -380,10 +393,10 @@ cleanupAllSubscriptions = do DataSyncReady { asyncs } -> forEach asyncs uninterruptibleCancel _ -> pure () -changesToValue :: [ChangeNotifications.Change] -> Value -changesToValue changes = object (map changeToPair changes) +changesToValue :: Renamer -> [ChangeNotifications.Change] -> Value +changesToValue renamer changes = object (map changeToPair changes) where - changeToPair ChangeNotifications.Change { col, new } = (Aeson.fromText $ columnNameToFieldName col) .= new + changeToPair ChangeNotifications.Change { col, new } = (Aeson.fromText $ renamer.columnToField col) .= new runInModelContextWithTransaction :: (?state :: IORef DataSyncController, ?modelContext :: ModelContext) => ((?modelContext :: ModelContext) => IO result) -> Maybe UUID -> IO result runInModelContextWithTransaction function (Just transactionId) = do diff --git a/IHP/DataSync/DynamicQuery.hs b/IHP/DataSync/DynamicQuery.hs index 9f5c938ec..2ec915e5c 100644 --- a/IHP/DataSync/DynamicQuery.hs +++ b/IHP/DataSync/DynamicQuery.hs @@ -127,7 +127,7 @@ instance PG.FromField Field where pure Field { .. } where fieldName = (PG.name field) - |> fmap (columnNameToFieldName . cs) + |> fmap cs |> fromMaybe "" instance PG.FromField DynamicValue where diff --git a/IHP/DataSync/DynamicQueryCompiler.hs b/IHP/DataSync/DynamicQueryCompiler.hs index 68e40f681..f1831d406 100644 --- a/IHP/DataSync/DynamicQueryCompiler.hs +++ b/IHP/DataSync/DynamicQueryCompiler.hs @@ -13,8 +13,40 @@ import qualified Database.PostgreSQL.Simple.ToField as PG import qualified Database.PostgreSQL.Simple.Types as PG import qualified Data.List as List +data Renamer = Renamer + { fieldToColumn :: Text -> Text + , columnToField :: Text -> Text + } + compileQuery :: DynamicSQLQuery -> (PG.Query, [PG.Action]) -compileQuery query = compileQueryMapped (mapColumnNames fieldNameToColumnName query) +compileQuery = compileQueryWithRenamer camelCaseRenamer + +compileQueryWithRenamer :: Renamer -> DynamicSQLQuery -> (PG.Query, [PG.Action]) +compileQueryWithRenamer renamer query = compileQueryMapped (mapColumnNames renamer.fieldToColumn query) + +-- | Default renamer used by DataSync. +-- +-- Transforms JS inputs in @camelCase@ to snake_case for the database +-- and DB outputs in @snake_case@ back to @camelCase@ +camelCaseRenamer :: Renamer +camelCaseRenamer = + Renamer + { fieldToColumn = fieldNameToColumnName + , columnToField = columnNameToFieldName + } + +-- | Renamer that does not modify the column names +unmodifiedRenamer :: Renamer +unmodifiedRenamer = + Renamer + { fieldToColumn = id + , columnToField = id + } + +-- | When a Field is retrieved from the database, it's all in @snake_case@. This turns it into @camelCase@ +renameField :: Renamer -> Field -> Field +renameField renamer field = + field { fieldName = renamer.columnToField field.fieldName } compileQueryMapped :: DynamicSQLQuery -> (PG.Query, [PG.Action]) compileQueryMapped DynamicSQLQuery { .. } = (sql, args) From c39d5f0ec2af03f8a2ac69ede6c377de514ee80f Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Sun, 27 Oct 2024 19:46:31 +0100 Subject: [PATCH 31/47] fixed outdated reference to './start' --- Guide/your-first-project.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Guide/your-first-project.markdown b/Guide/your-first-project.markdown index d2fbed2ce..24b211a60 100644 --- a/Guide/your-first-project.markdown +++ b/Guide/your-first-project.markdown @@ -67,7 +67,7 @@ cd blog Start the development server by running the following in the `blog` directory: ```bash -./start +devenv up ``` Your application is starting now. The development server will automatically launch the built-in IDE. From b68c4ddb9fd22fb06503c8d966eebfb45e45ccec Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Sat, 2 Nov 2024 10:08:16 -0700 Subject: [PATCH 32/47] DataSync: Fixed renamer not being used in updateRecord operations --- IHP/DataSync/ControllerImpl.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/IHP/DataSync/ControllerImpl.hs b/IHP/DataSync/ControllerImpl.hs index f4a7480e5..17fd66217 100644 --- a/IHP/DataSync/ControllerImpl.hs +++ b/IHP/DataSync/ControllerImpl.hs @@ -259,7 +259,7 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC let columns = patch |> HashMap.keys - |> map fieldNameToColumnName + |> map (renamer table).fieldToColumn |> map PG.Identifier let values = patch @@ -294,7 +294,7 @@ buildMessageHandler ensureRLSEnabled installTableChangeTriggers sendJSON handleC let columns = patch |> HashMap.keys - |> map fieldNameToColumnName + |> map (renamer table).fieldToColumn |> map PG.Identifier let values = patch From 61bc6a7365f4d545a5a7bd120d5186dfbb65ab36 Mon Sep 17 00:00:00 2001 From: Lars Lillo Ulvestad Date: Sun, 3 Nov 2024 00:26:22 +0700 Subject: [PATCH 33/47] uncheckedHsx + customHsx (#2010) * add new module * Fix uncheckedHSX quasiquoter * Make sure closing tags are checked * Fix it so it works * remove UncheckedHSX and attept to restore QQ and Parser modules * reset some unecessary whitespace stuff * remove unecessary whitespace * unchecked HSX working, with tests on the parser * Add customHsx + tests * fix comment * fix comment * Update ihp-hsx/IHP/HSX/Parser.hs Co-authored-by: Marc Scholten * Update ihp-hsx/IHP/HSX/QQ.hs Co-authored-by: Marc Scholten * remove newtypes and use 'HsxSettings' directly * Fix Github resolve bug * Aesthethic nitpick * use customHsx to call hsx and uncheckedHsx * Move CustomHsxCases to Test.HSX namespace * Fix import * Fix module comment * For now, move CustomHsxCases back so the tests are working again * Add documentation * Minor doc fix * Formulate solution to QuasiQuoter shortcomings * typo fix * Add use-case example * Simplify langauge * Improve examples a bit * Add spread example --------- Co-authored-by: Marc Scholten --- Guide/hsx.markdown | 96 +++++++++++++++++++ ihp-hsx/IHP/HSX/Parser.hs | 30 ++++-- ihp-hsx/IHP/HSX/QQ.hs | 42 ++++++--- ihp-hsx/README.md | 96 +++++++++++++++++++ ihp-hsx/Test/IHP/HSX/CustomHsxCases.hs | 38 ++++++++ ihp-hsx/Test/IHP/HSX/ParserSpec.hs | 124 ++++++++++++++++++++++--- ihp-hsx/Test/IHP/HSX/QQSpec.hs | 16 ++++ ihp-hsx/ihp-hsx.cabal | 2 +- 8 files changed, 409 insertions(+), 35 deletions(-) create mode 100644 ihp-hsx/Test/IHP/HSX/CustomHsxCases.hs diff --git a/Guide/hsx.markdown b/Guide/hsx.markdown index f58a933d1..4fe57a38c 100644 --- a/Guide/hsx.markdown +++ b/Guide/hsx.markdown @@ -278,6 +278,102 @@ The underlying HTML library blaze currently does not support an empty HTML attri If you use HTML entities, such as ` ` for a non-breaking space, you will notice they appear exactly like that. To output directly (i.e. unescaped) use the method `preEscapedToMarkup` from `Text.Blaze.Html5`. +### Custom HSX and Unchecked HSX + +HSX provides two additional QuasiQuoters beyond the standard `[hsx|...|]` for increased flexibility: `uncheckedHsx` and `customHsx`. + +#### Using `uncheckedHsx` + +`uncheckedHsx` provides a quick way to bypass HSX's strict tag and attribute name checking. + +It will still check for a valid HTML structure, but it will accept any tag and attribute names. + + +```haskell +[uncheckedHsx| + + Content + +|] +``` + +While convenient for rapid development, use it with caution as you lose the benefits of compile-time guarantees for your markup. + +#### Using `customHsx` + +`customHsx` allows you to extend the default HSX with additional whitelisted tag names and attribute names while maintaining the same strict compile-time checking of the default `hsx`. + +This makes it easier to use custom elements that often also contain special attributes, and javascript libraries, for example `_hyperscript`, that use the `_` as an attribute name. + + +To use `customHsx`, you need to create it in a separate module due to Template Haskell restrictions. Here's how to set it up: + +1. First, create a new module for your custom HSX (e.g., `Application.Helper.CustomHsx`): + +```haskell +module Application.Helper.CustomHsx where + +import IHP.Prelude +import IHP.HSX.QQ (customHsx) +import IHP.HSX.Parser +import Language.Haskell.TH.Quote +import qualified Data.Set as Set + +myHsx :: QuasiQuoter +myHsx = customHsx + (HsxSettings + { checkMarkup = True + , additionalTagNames = Set.fromList ["book", "heading", "name"] + , additionalAttributeNames = Set.fromList ["_", "custom-attribute"] + } + ) +``` + +Configuration options for `HsxSettings`: +- `checkMarkup`: Boolean to enable/disable markup checking +- `additionalTagNames`: Set of additional allowed tag names +- `additionalAttributeNames`: Set of additional allowed attribute names + +2. Make it available in your views by adding it to your view helpers module: + +```haskell +module Application.Helper.View ( + module Application.Helper.View, + module Application.Helper.CustomHsx -- Add this line +) where + +import IHP.ViewPrelude +import Application.Helper.CustomHsx (myHsx) -- Add this line +``` + +3. Use it in your views: + +```haskell +[myHsx| + + My Book + Author Name + +|] +``` + +The custom HSX will validate that tags and attributes are either in the default HSX whitelist or in your additional sets. This gives you the flexibility to use custom elements and attributes. + +This approach is particularly useful for: +- Web Components with custom attribute names +- UI libraries with non-standard attributes +- Domain-specific XML markup languages like [Hyperview](https://hyperview.org/docs/example_navigation) +- Integration with third-party frameworks that extend HTML syntax + +`customHsx` whitelisting and even `uncheckedHsx` does not entirely help for libraries with very unusual symbols in their attributes, like Alpine.js, because they don't recognize html attributes starting with `@` or has `:` in the attribute name. In these cases, the spread syntax `{...attributeList}` is likely your best bet. + +```haskell +-- This will not work +[uncheckedHsx||] + +-- Using spread syntax will work +[hsx||] +``` ## Common HSX Patterns diff --git a/ihp-hsx/IHP/HSX/Parser.hs b/ihp-hsx/IHP/HSX/Parser.hs index 1f50d1d5c..2c1eac21e 100644 --- a/ihp-hsx/IHP/HSX/Parser.hs +++ b/ihp-hsx/IHP/HSX/Parser.hs @@ -15,6 +15,7 @@ module IHP.HSX.Parser , Attribute (..) , AttributeValue (..) , collapseSpace +, HsxSettings (..) ) where import Prelude @@ -34,6 +35,12 @@ import qualified Data.Set as Set import qualified Data.Containers.ListUtils as List import qualified IHP.HSX.HaskellParser as HaskellParser +data HsxSettings = HsxSettings + { checkMarkup :: Bool + , additionalTagNames :: Set Text + , additionalAttributeNames :: Set Text + } + data AttributeValue = TextValue !Text | ExpressionValue !Haskell.Exp deriving (Eq, Show) data Attribute = StaticAttribute !Text !AttributeValue | SpreadAttributes !Haskell.Exp deriving (Eq, Show) @@ -57,15 +64,16 @@ data Node = Node !Text ![Attribute] ![Node] !Bool -- > let position = Megaparsec.SourcePos filePath (Megaparsec.mkPos line) (Megaparsec.mkPos col) -- > let hsxText = "Hello" -- > --- > let (Right node) = parseHsx position [] hsxText -parseHsx :: SourcePos -> [TH.Extension] -> Text -> Either (ParseErrorBundle Text Void) Node -parseHsx position extensions code = +-- > let (Right node) = parseHsx settings position [] hsxText +parseHsx :: HsxSettings -> SourcePos -> [TH.Extension] -> Text -> Either (ParseErrorBundle Text Void) Node +parseHsx settings position extensions code = let ?extensions = extensions + ?settings = settings in runParser (setPosition position *> parser) "" code -type Parser a = (?extensions :: [TH.Extension]) => Parsec Void Text a +type Parser a = (?extensions :: [TH.Extension], ?settings :: HsxSettings) => Parsec Void Text a setPosition pstateSourcePos = updateParserState (\state -> state { statePosState = (statePosState state) { pstateSourcePos } @@ -211,15 +219,16 @@ hsxNodeAttribute = do hsxAttributeName :: Parser Text hsxAttributeName = do name <- rawAttribute - unless (isValidAttributeName name) (fail $ "Invalid attribute name: " <> cs name) + let checkingMarkup = ?settings.checkMarkup + unless (isValidAttributeName name || not checkingMarkup) (fail $ "Invalid attribute name: " <> cs name) pure name where isValidAttributeName name = "data-" `Text.isPrefixOf` name || "aria-" `Text.isPrefixOf` name || "hx-" `Text.isPrefixOf` name - || "hx-" `Text.isPrefixOf` name || name `Set.member` attributes + || name `Set.member` ?settings.additionalAttributeNames rawAttribute = takeWhile1P Nothing (\c -> Char.isAlphaNum c || c == '-' || c == '_') @@ -285,13 +294,18 @@ hsxSplicedNode = do treeToString acc (TokenNode (x:xs)) = ((treeToString (acc <> "{") x) <> (Text.concat $ fmap (treeToString "") xs)) <> "}" + hsxElementName :: Parser Text hsxElementName = do name <- takeWhile1P (Just "identifier") (\c -> Char.isAlphaNum c || c == '_' || c == '-' || c == '!') let isValidParent = name `Set.member` parents let isValidLeaf = name `Set.member` leafs - let isValidCustomWebComponent = "-" `Text.isInfixOf` name - unless (isValidParent || isValidLeaf || isValidCustomWebComponent) (fail $ "Invalid tag name: " <> cs name) + let isValidCustomWebComponent = "-" `Text.isInfixOf` name + && not (Text.isPrefixOf "-" name) + && not (Char.isNumber (Text.head name)) + let isValidAdditionalTag = name `Set.member` ?settings.additionalTagNames + let checkingMarkup = ?settings.checkMarkup + unless (isValidParent || isValidLeaf || isValidCustomWebComponent || isValidAdditionalTag || not checkingMarkup) (fail $ "Invalid tag name: " <> cs name) space pure name diff --git a/ihp-hsx/IHP/HSX/QQ.hs b/ihp-hsx/IHP/HSX/QQ.hs index a064f2fb4..4599c7ff2 100644 --- a/ihp-hsx/IHP/HSX/QQ.hs +++ b/ihp-hsx/IHP/HSX/QQ.hs @@ -5,7 +5,7 @@ Module: IHP.HSX.QQ Description: Defines the @[hsx||]@ syntax Copyright: (c) digitally induced GmbH, 2022 -} -module IHP.HSX.QQ (hsx) where +module IHP.HSX.QQ (hsx, uncheckedHsx, customHsx) where import Prelude import Data.Text (Text) @@ -27,20 +27,40 @@ import Data.List (foldl') import IHP.HSX.Attribute import qualified Text.Blaze.Html5.Attributes as Attributes import qualified Data.HashMap.Strict as HashMap +import qualified Data.Set as Set hsx :: QuasiQuoter -hsx = QuasiQuoter { - quoteExp = quoteHsxExpression, - quotePat = error "quotePat: not defined", - quoteDec = error "quoteDec: not defined", - quoteType = error "quoteType: not defined" - } +hsx = customHsx + (HsxSettings + { checkMarkup = True + , additionalTagNames = Set.empty + , additionalAttributeNames = Set.empty + } + ) -quoteHsxExpression :: String -> TH.ExpQ -quoteHsxExpression code = do +uncheckedHsx :: QuasiQuoter +uncheckedHsx = customHsx + (HsxSettings + { checkMarkup = False + , additionalTagNames = Set.empty + , additionalAttributeNames = Set.empty + } + ) + +customHsx :: HsxSettings -> QuasiQuoter +customHsx settings = + QuasiQuoter + { quoteExp = quoteHsxExpression settings + , quotePat = error "quotePat: not defined" + , quoteDec = error "quoteDec: not defined" + , quoteType = error "quoteType: not defined" + } + +quoteHsxExpression :: HsxSettings -> String -> TH.ExpQ +quoteHsxExpression settings code = do hsxPosition <- findHSXPosition extensions <- TH.extsEnabled - expression <- case parseHsx hsxPosition extensions (cs code) of + expression <- case parseHsx settings hsxPosition extensions (cs code) of Left error -> fail (Megaparsec.errorBundlePretty error) Right result -> pure result compileToHaskell expression @@ -458,4 +478,4 @@ textToStaticString text = StaticString (Text.unpack text ++) (Text.encodeUtf8 te {-# INLINE textToStaticString #-} instance Show (MarkupM ()) where - show html = BlazeString.renderHtml html + show html = BlazeString.renderHtml html \ No newline at end of file diff --git a/ihp-hsx/README.md b/ihp-hsx/README.md index 20fcc3a4f..d576bbbc2 100644 --- a/ihp-hsx/README.md +++ b/ihp-hsx/README.md @@ -274,6 +274,102 @@ The underlying HTML library blaze currently does not support an empty HTML attri If you use HTML entities, such as ` ` for a non-breaking space, you will notice they appear exactly like that. To output directly (i.e. unescaped) use the method `preEscapedToMarkup` from `Text.Blaze.Html5`. +### Custom HSX and Unchecked HSX + +HSX provides two additional QuasiQuoters beyond the standard `[hsx|...|]` for increased flexibility: `uncheckedHsx` and `customHsx`. + +#### Using `uncheckedHsx` + +`uncheckedHsx` provides a quick way to bypass HSX's strict tag and attribute name checking. + +It will still check for a valid HTML structure, but it will accept any tag and attribute names. + + +```haskell +[uncheckedHsx| + + Content + +|] +``` + +While convenient for rapid development, use it with caution as you lose the benefits of compile-time guarantees for your markup. + +#### Using `customHsx` + +`customHsx` allows you to extend the default HSX with additional whitelisted tag names and attribute names while maintaining the same strict compile-time checking of the default `hsx`. + +This makes it easier to use custom elements that often also contain special attributes, and javascript libraries, for example `_hyperscript`, that use the `_` as an attribute name. + + +To use `customHsx`, you need to create it in a separate module due to Template Haskell restrictions. Here's how to set it up: + +1. First, create a new module for your custom HSX (e.g., `Application.Helper.CustomHsx`): + +```haskell +module Application.Helper.CustomHsx where + +import IHP.Prelude +import IHP.HSX.QQ (customHsx) +import IHP.HSX.Parser +import Language.Haskell.TH.Quote +import qualified Data.Set as Set + +myHsx :: QuasiQuoter +myHsx = customHsx + (HsxSettings + { checkMarkup = True + , additionalTagNames = Set.fromList ["book", "heading", "name"] + , additionalAttributeNames = Set.fromList ["_", "custom-attribute"] + } + ) +``` + +Configuration options for `HsxSettings`: +- `checkMarkup`: Boolean to enable/disable markup checking +- `additionalTagNames`: Set of additional allowed tag names +- `additionalAttributeNames`: Set of additional allowed attribute names + +2. Make it available in your views by adding it to your view helpers module: + +```haskell +module Application.Helper.View ( + module Application.Helper.View, + module Application.Helper.CustomHsx -- Add this line +) where + +import IHP.ViewPrelude +import Application.Helper.CustomHsx (myHsx) -- Add this line +``` + +3. Use it in your views: + +```haskell +[myHsx| + + My Book + Author Name + +|] +``` + +The custom HSX will validate that tags and attributes are either in the default HSX whitelist or in your additional sets. This gives you the flexibility to use custom elements and attributes. + +This approach is particularly useful for: +- Web Components with custom attribute names +- UI libraries with non-standard attributes +- Domain-specific XML markup languages +- Integration with third-party frameworks that extend HTML syntax + +`customHsx` whitelisting and even `uncheckedHsx` does not entirely help for libraries with very unusual symbols in their attributes, like Alpine.js, because they don't recognize html attributes starting with `@` or has `:` in the attribute name. In these cases, the spread syntax `{...attributeList}` is likely your best bet. + +```haskell +-- This will not work +[uncheckedHsx||] + +-- Using spread syntax will work +[hsx||] +``` ## Common HSX Patterns diff --git a/ihp-hsx/Test/IHP/HSX/CustomHsxCases.hs b/ihp-hsx/Test/IHP/HSX/CustomHsxCases.hs new file mode 100644 index 000000000..af39fcaf2 --- /dev/null +++ b/ihp-hsx/Test/IHP/HSX/CustomHsxCases.hs @@ -0,0 +1,38 @@ +{-| +Module: Test.HSX.CustomHsxCases +Description: Test helpers for HSX tests with custom tags and attributes +-} +module IHP.HSX.CustomHsxCases where + +import Test.Hspec +import Prelude +import IHP.HSX.QQ +import qualified Text.Blaze.Renderer.Text as Blaze +import Data.Text +import Language.Haskell.TH.Quote +import IHP.HSX.Parser +import qualified Data.Set as Set + +myCustomHsx :: QuasiQuoter +myCustomHsx = customHsx + (HsxSettings { checkMarkup = True + , additionalTagNames = Set.fromList ["mycustomtag", "anothercustomtag"] + , additionalAttributeNames = Set.fromList ["my-custom-attr", "anothercustomattr"] + } + ) + +myTagsOnlyHsx :: QuasiQuoter +myTagsOnlyHsx = customHsx + (HsxSettings { checkMarkup = True + , additionalTagNames = Set.fromList ["mycustomtag", "anothercustomtag"] + , additionalAttributeNames = Set.fromList [] + } + ) + +myAttrsOnlyHsx :: QuasiQuoter +myAttrsOnlyHsx = customHsx + (HsxSettings { checkMarkup = True + , additionalTagNames = Set.fromList [] + , additionalAttributeNames = Set.fromList ["my-custom-attr", "anothercustomattr"] + } + ) diff --git a/ihp-hsx/Test/IHP/HSX/ParserSpec.hs b/ihp-hsx/Test/IHP/HSX/ParserSpec.hs index 11b6c1506..8087828a5 100644 --- a/ihp-hsx/Test/IHP/HSX/ParserSpec.hs +++ b/ihp-hsx/Test/IHP/HSX/ParserSpec.hs @@ -11,68 +11,162 @@ import qualified Text.Megaparsec as Megaparsec import qualified Text.Megaparsec.Error as Megaparsec import qualified "template-haskell" Language.Haskell.TH as TH import qualified "template-haskell" Language.Haskell.TH.Syntax as TH +import qualified Data.Set as Set + tests = do let position = Megaparsec.SourcePos "" (Megaparsec.mkPos 1) (Megaparsec.mkPos 1) let extensions = [] + describe "HSX Parser" do + let settings = HsxSettings True Set.empty Set.empty it "should fail on invalid html tags" do let errorText = "1:13:\n |\n1 | \n | ^\nInvalid tag name: myinvalidel\n" - let (Left error) = parseHsx position extensions "" + let (Left error) = parseHsx settings position extensions "" (Megaparsec.errorBundlePretty error) `shouldBe` errorText it "should fail on invalid attribute names" do let errorText = "1:23:\n |\n1 |
\n | ^\nInvalid attribute name: invalid-attribute\n" - let (Left error) = parseHsx position extensions "
" + let (Left error) = parseHsx settings position extensions "
" (Megaparsec.errorBundlePretty error) `shouldBe` errorText it "should fail on unmatched tags" do let errorText = "1:7:\n |\n1 |
\n | ^\nunexpected '/'\nexpecting \"
\", identifier, or white space\n" - let (Left error) = parseHsx position extensions "
" + let (Left error) = parseHsx settings position extensions "
" (Megaparsec.errorBundlePretty error) `shouldBe` errorText it "should parse a closing tag with spaces" do - let p = parseHsx position extensions "
" + let p = parseHsx settings position extensions "
" p `shouldBe` (Right (Children [Node "div" [] [] False])) it "should strip spaces around nodes" do - let p = parseHsx position extensions "
" + let p = parseHsx settings position extensions "
" p `shouldBe` (Right (Children [Node "div" [] [Node "span" [] [] False] False])) it "should strip spaces after self closing tags" do - let p = parseHsx position extensions "{\"meta\"}\n\n " + let p = parseHsx settings position extensions "{\"meta\"}\n\n " p `shouldBe` (Right (Children [Node "head" [] [SplicedNode (TH.LitE (TH.StringL "meta")),Node "link" [StaticAttribute "rel" (TextValue "stylesheet"),StaticAttribute "href" (TextValue "/vendor/bootstrap.min.css")] [] True] False])) it "should not strip spaces in a text node" do - let p = parseHsx position extensions " Hello World " + let p = parseHsx settings position extensions " Hello World " p `shouldBe` (Right (Children [TextNode "Hello World"])) it "should deal with variables in text nodes" do - let p = parseHsx position extensions "
\n Hello {\"name\"}! \n
" + let p = parseHsx settings position extensions "
\n Hello {\"name\"}! \n
" p `shouldBe` (Right (Children [Node "div" [] [TextNode "Hello ",SplicedNode (TH.LitE (TH.StringL "name")),TextNode "!"] False])) it "should parse self closing tags with spaces around it" do - let p = parseHsx position extensions "
" + let p = parseHsx settings position extensions "
" p `shouldBe` (Right (Children [Node "div" [] [] False])) it "should collapse spaces" do - let p = parseHsx position extensions "\n Hello\n World\n ! " + let p = parseHsx settings position extensions "\n Hello\n World\n ! " p `shouldBe` (Right (Children [TextNode "Hello World !"])) it "should parse spread values" do - let p = parseHsx position extensions "
" + let p = parseHsx settings position extensions "
" -- We cannot easily construct the @VarE variables@ expression, therefore we use show here for comparison show p `shouldBe` "Right (Children [Node \"div\" [SpreadAttributes (VarE variables)] [] False])" it "should parse spread values with a space" do -- See https://github.com/digitallyinduced/ihp/issues/1588 - let p = parseHsx position extensions "
" + let p = parseHsx settings position extensions "
" show p `shouldBe` "Right (Children [Node \"div\" [SpreadAttributes (VarE variables)] [] False])" it "should accept underscores in data attributes" do - let p = parseHsx position extensions "
" + let p = parseHsx settings position extensions "
" p `shouldBe` (Right (Children [Node "div" [StaticAttribute "data-client_id" (TextValue "test")] [] False])) it "should accept doctype" do - let p = parseHsx position extensions "hello" - p `shouldBe` (Right (Children [Node "!DOCTYPE" [StaticAttribute "html" (TextValue "html")] [] True, Node "html" [StaticAttribute "lang" (TextValue "en")] [Node "body" [] [TextNode "hello"] False] False])) \ No newline at end of file + let p = parseHsx settings position extensions "hello" + p `shouldBe` (Right (Children [Node "!DOCTYPE" [StaticAttribute "html" (TextValue "html")] [] True, Node "html" [StaticAttribute "lang" (TextValue "en")] [Node "body" [] [TextNode "hello"] False] False])) + + describe "uncheckedHsx" do + let settings = HsxSettings False Set.empty Set.empty + it "should not check markup" do + let p = parseHsx settings position extensions "" + p `shouldBe` (Right (Children [Node "invalid-tag" [StaticAttribute "invalid-attribute" (TextValue "invalid")] [] False])) + + it "should not check attribute names" do + let p = parseHsx settings position extensions "
" + p `shouldBe` (Right (Children [Node "div" [StaticAttribute "invalid-attribute" (TextValue "invalid")] [] False])) + + it "should fail on unmatched tags" do + let errorText = "1:7:\n |\n1 |
\n | ^\nunexpected '/'\nexpecting \"
\", identifier, or white space\n" + let (Left error) = parseHsx settings position extensions "
" + (Megaparsec.errorBundlePretty error) `shouldBe` errorText + + it "should parse a closing tag with spaces" do + let p = parseHsx settings position extensions "
" + p `shouldBe` (Right (Children [Node "div" [] [] False])) + + it "should strip spaces around nodes" do + let p = parseHsx settings position extensions "
" + p `shouldBe` (Right (Children [Node "div" [] [Node "span" [] [] False] False])) + + it "should strip spaces after self closing tags" do + let p = parseHsx settings position extensions "{\"meta\"}\n\n " + p `shouldBe` (Right (Children [Node "head" [] [SplicedNode (TH.LitE (TH.StringL "meta")),Node "link" [StaticAttribute "rel" (TextValue "stylesheet"),StaticAttribute "href" (TextValue "/vendor/bootstrap.min.css")] [] True] False])) + + it "should not strip spaces in a text node" do + let p = parseHsx settings position extensions " Hello World " + p `shouldBe` (Right (Children [TextNode "Hello World"])) + + it "should deal with variables in text nodes" do + let p = parseHsx settings position extensions "
\n Hello {\"name\"}! \n
" + p `shouldBe` (Right (Children [Node "div" [] [TextNode "Hello ",SplicedNode (TH.LitE (TH.StringL "name")),TextNode "!"] False])) + + it "should parse self closing tags with spaces around it" do + let p = parseHsx settings position extensions "
" + p `shouldBe` (Right (Children [Node "div" [] [] False])) + + it "should collapse spaces" do + let p = parseHsx settings position extensions "\n Hello\n World\n ! " + p `shouldBe` (Right (Children [TextNode "Hello World !"])) + + it "should parse spread values" do + let p = parseHsx settings position extensions "
" + -- We cannot easily construct the @VarE variables@ expression, therefore we use show here for comparison + show p `shouldBe` "Right (Children [Node \"div\" [SpreadAttributes (VarE variables)] [] False])" + + it "should parse spread values with a space" do + -- See https://github.com/digitallyinduced/ihp/issues/1588 + let p = parseHsx settings position extensions "
" + show p `shouldBe` "Right (Children [Node \"div\" [SpreadAttributes (VarE variables)] [] False])" + + it "should accept underscores in data attributes" do + let p = parseHsx settings position extensions "
" + p `shouldBe` (Right (Children [Node "div" [StaticAttribute "data-client_id" (TextValue "test")] [] False])) + + it "should accept doctype" do + let p = parseHsx settings position extensions "hello" + p `shouldBe` (Right (Children [Node "!DOCTYPE" [StaticAttribute "html" (TextValue "html")] [] True, Node "html" [StaticAttribute "lang" (TextValue "en")] [Node "body" [] [TextNode "hello"] False] False])) + + describe "customHsx" do + let customSettings = HsxSettings True + (Set.fromList ["mycustomtag"]) + (Set.fromList ["my-custom-attr"]) + + it "should allow specified custom tags" do + let p = parseHsx customSettings position extensions "hello" + p `shouldBe` (Right (Children [Node "mycustomtag" [] [TextNode "hello"] False])) + + it "should reject non-specified custom tags" do + let errorText = "1:15:\n |\n1 | hello\n | ^\nInvalid tag name: notallowedtag\n" + case parseHsx customSettings position extensions "hello" of + Left error -> (Megaparsec.errorBundlePretty error) `shouldBe` errorText + Right _ -> fail "Expected parser to fail with invalid tag name" + + it "should allow specified custom attributes" do + let p = parseHsx customSettings position extensions "
test
" + p `shouldBe` (Right (Children [Node "div" [StaticAttribute "my-custom-attr" (TextValue "hello")] [TextNode "test"] False])) + + it "should reject non-specified custom attributes" do + let errorText = "1:22:\n |\n1 |
\n | ^\nInvalid attribute name: not-allowed-attr\n" + case parseHsx customSettings position extensions "
" of + Left error -> (Megaparsec.errorBundlePretty error) `shouldBe` errorText + Right _ -> fail "Expected parser to fail with invalid attribute name" + + it "should allow mixing custom and standard elements" do + let p = parseHsx customSettings position extensions "test" + p `shouldBe` (Right (Children [Node "mycustomtag" [StaticAttribute "class" (TextValue "hello"), StaticAttribute "my-custom-attr" (TextValue "world")] [TextNode "test"] False])) \ No newline at end of file diff --git a/ihp-hsx/Test/IHP/HSX/QQSpec.hs b/ihp-hsx/Test/IHP/HSX/QQSpec.hs index 9bd65c05d..916a71ed3 100644 --- a/ihp-hsx/Test/IHP/HSX/QQSpec.hs +++ b/ihp-hsx/Test/IHP/HSX/QQSpec.hs @@ -10,6 +10,7 @@ import IHP.HSX.QQ import qualified Text.Blaze.Renderer.Text as Blaze import Text.Blaze (preEscapedTextValue) import Data.Text +import IHP.HSX.CustomHsxCases tests :: SpecWith () tests = do @@ -191,6 +192,21 @@ tests = do [hsx|hello|] `shouldBeHtml` "\nhello" + describe "customHsx" do + it "should allow specified custom tags" do + [myTagsOnlyHsx|hello|] `shouldBeHtml` "hello" + [myTagsOnlyHsx|world|] `shouldBeHtml` "world" + + it "should allow specified custom attributes" do + [myAttrsOnlyHsx|
test
|] `shouldBeHtml` "
test
" + [myAttrsOnlyHsx|
test
|] `shouldBeHtml` "
test
" + + it "should allow combining custom tags and attributes" do + [myCustomHsx|test|] `shouldBeHtml` "test" + + it "should work with regular HTML tags and attributes too" do + [myCustomHsx|
world
|] `shouldBeHtml` "
world
" + data Project = Project { name :: Text } data PlaceId = PlaceId Text diff --git a/ihp-hsx/ihp-hsx.cabal b/ihp-hsx/ihp-hsx.cabal index b6973912d..8ee565b4b 100644 --- a/ihp-hsx/ihp-hsx.cabal +++ b/ihp-hsx/ihp-hsx.cabal @@ -1,6 +1,6 @@ cabal-version: 2.2 name: ihp-hsx -version: 1.3.0 +version: 1.4.0 synopsis: JSX-like but for Haskell description: JSX-like templating syntax for Haskell license: MIT From 1ffdf28bcf956fda43561d1a4670994c2334f570 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Mon, 11 Nov 2024 21:13:39 -0800 Subject: [PATCH 34/47] DataSync: use react18+ useSyncExternalStore --- lib/IHP/DataSync/ihp-datasync.js | 8 ++- lib/IHP/DataSync/react.js | 84 ++++++++++++++++++-------------- 2 files changed, 53 insertions(+), 39 deletions(-) diff --git a/lib/IHP/DataSync/ihp-datasync.js b/lib/IHP/DataSync/ihp-datasync.js index 4800c31f7..af4f77978 100644 --- a/lib/IHP/DataSync/ihp-datasync.js +++ b/lib/IHP/DataSync/ihp-datasync.js @@ -235,7 +235,7 @@ const APPEND_NEW_RECORD = 0; const PREPEND_NEW_RECORD = 1; class DataSubscription { - constructor(query) { + constructor(query, cache = null) { if (typeof query !== "object" || !('table' in query)) { throw new Error("Query passed to `new DataSubscription(..)` doesn't look like a query object. If you're using the `query()` functions to costruct the object, make sure you pass the `.query` property, like this: `new DataSubscription(query('my_table').orderBy('createdAt').query)`"); } @@ -250,7 +250,8 @@ class DataSubscription { this.connectError = null; this.subscriptionId = null; this.subscribers = []; - this.records = []; + this.records = cache ? cache.get(JSON.stringify(query)) || [] : []; + this.cache = cache; this.getRecords = this.getRecords.bind(this); this.subscribe = this.subscribe.bind(this); @@ -418,6 +419,9 @@ class DataSubscription { } updateSubscribers() { + if (this.cache) { + this.cache.set(JSON.stringify(this.query), this.records); + } for (const subscriber of this.subscribers) { subscriber(this.records); } diff --git a/lib/IHP/DataSync/react.js b/lib/IHP/DataSync/react.js index 31cdfe7ca..d8c7c051b 100644 --- a/lib/IHP/DataSync/react.js +++ b/lib/IHP/DataSync/react.js @@ -1,15 +1,10 @@ -import React, { useState, useEffect, useContext } from 'react'; +import React, { useState, useEffect, useContext, useSyncExternalStore } from 'react'; import { DataSubscription, DataSyncController } from './ihp-datasync.js'; // Most IHP apps never use this context because they use session cookies for auth. // Therefore the default value is true. export const AuthCompletedContext = React.createContext(true); -// To avoid too many loading spinners when going backwards and forwards -// between pages, we cache the result of queries so we can already showing -// some data directly after a page transition. The data might be a bit -// outdated, but it will directly be overriden with the latest server state -// once it has arrived. const recordsCache = new Map(); /** @@ -18,40 +13,17 @@ const recordsCache = new Map(); * const messages = useQuery(query('messages').orderBy('createdAt')); */ export function useQuery(queryBuilder) { - const [records, setRecords] = useState(() => { - const strinigifiedQuery = JSON.stringify(queryBuilder.query); - const cachedRecords = recordsCache.get(strinigifiedQuery); - return cachedRecords === undefined ? null : cachedRecords; - }); + const dataSubscription = DataSubscriptionStore.get(queryBuilder.query); const isAuthCompleted = useContext(AuthCompletedContext); + const records = useSyncExternalStore(dataSubscription.subscribe, dataSubscription.getRecords) - useEffect(() => { - if (!isAuthCompleted) { - return; - } - - const strinigifiedQuery = JSON.stringify(queryBuilder.query); - const cachedRecords = recordsCache.get(strinigifiedQuery); - - // Invalidate existing records, as the query might have been changed - setRecords(cachedRecords === undefined ? null : cachedRecords); - - const dataSubscription = new DataSubscription(queryBuilder.query); - dataSubscription.createOnServer(); + if (dataSubscription.connectError) { + throw dataSubscription.connectError; + } - // The dataSubscription is automatically closed when the last subscriber on - // the DataSubscription object has been unsubscribed - - return dataSubscription.subscribe(records => { - setRecords(records); - - // Update the cache whenever the records change - recordsCache.set(strinigifiedQuery, records); - }); - }, [ - JSON.stringify(queryBuilder.query) /* <-- It's terrible - but it works, we should find a better for this */, - isAuthCompleted - ]) + if (!isAuthCompleted) { + return null; + } return records; } @@ -89,4 +61,42 @@ export function useIsConnected() { }, [ setConnected ]); return isConnected; +} + +export class DataSubscriptionStore { + static queryMap = new Map(); + + // To avoid too many loading spinners when going backwards and forwards + // between pages, we cache the result of queries so we can already showing + // some data directly after a page transition. The data might be a bit + // outdated, but it will directly be overriden with the latest server state + // once it has arrived. + static cache = new Map(); + + static get(query) { + const strinigifiedQuery = JSON.stringify(query); + const existingSubscription = DataSubscriptionStore.queryMap.get(strinigifiedQuery) + + if (existingSubscription) { + return existingSubscription; + } else { + + const subscription = new DataSubscription(query, DataSubscriptionStore.cache); + subscription.createOnServer(); + subscription.onClose = () => { DataSubscriptionStore.queryMap.delete(strinigifiedQuery); }; + + DataSubscriptionStore.queryMap.set(strinigifiedQuery, subscription); + + // If the query changes very rapid in `useQuery` it can happen that the `dataSubscription.subscribe` + // is never called at all. In this case we have a unused DataSubscription laying around. We avoid + // to many open connections laying around by trying to close them a second after opening them. + // A second is enough time for react to call the subscribe function. If it's not called by then, + // we most likely deal with a dead subscription, so we close it. + setTimeout(() => { + subscription.closeIfNotUsed(); + }, 1000); + + return subscription; + } + } } \ No newline at end of file From ed7c032e8a84bff0a65566edea63ae14fbedef6f Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Tue, 12 Nov 2024 09:33:59 -0800 Subject: [PATCH 35/47] allow tables named "tables" in Schema.sql --- Test/SchemaCompilerSpec.hs | 140 +++++++++++++++++----------------- ihp-ide/IHP/SchemaCompiler.hs | 22 ++++-- 2 files changed, 84 insertions(+), 78 deletions(-) diff --git a/Test/SchemaCompilerSpec.hs b/Test/SchemaCompilerSpec.hs index 02c4d7ff5..4d94bf068 100644 --- a/Test/SchemaCompilerSpec.hs +++ b/Test/SchemaCompilerSpec.hs @@ -130,20 +130,20 @@ tests = do it "should compile CanCreate instance with sqlQuery" $ \statement -> do getInstanceDecl "CanCreate" compileOutput `shouldBe` [trimming| - instance CanCreate User where - create :: (?modelContext :: ModelContext) => User -> IO User + instance CanCreate Generated.ActualTypes.User where + create :: (?modelContext :: ModelContext) => Generated.ActualTypes.User -> IO Generated.ActualTypes.User create model = do sqlQuerySingleRow "INSERT INTO users (id) VALUES (?) RETURNING id" (Only (model.id)) createMany [] = pure [] createMany models = do sqlQuery (Query $ "INSERT INTO users (id) VALUES " <> (ByteString.intercalate ", " (List.map (\_ -> "(?)") models)) <> " RETURNING id") (List.concat $ List.map (\model -> [toField (model.id)]) models) - createRecordDiscardResult :: (?modelContext :: ModelContext) => User -> IO () + createRecordDiscardResult :: (?modelContext :: ModelContext) => Generated.ActualTypes.User -> IO () createRecordDiscardResult model = do sqlExecDiscardResult "INSERT INTO users (id) VALUES (?)" (Only (model.id)) |] it "should compile CanUpdate instance with sqlQuery" $ \statement -> do getInstanceDecl "CanUpdate" compileOutput `shouldBe` [trimming| - instance CanUpdate User where + instance CanUpdate Generated.ActualTypes.User where updateRecord model = do sqlQuerySingleRow "UPDATE users SET id = ? WHERE id = ? RETURNING id" ((fieldWithUpdate #id model, model.id)) updateRecordDiscardResult model = do @@ -161,7 +161,7 @@ tests = do let compileOutput = compileStatementPreview [statement] statement |> Text.strip getInstanceDecl "CanUpdate" compileOutput `shouldBe` [trimming| - instance CanUpdate User where + instance CanUpdate Generated.ActualTypes.User where updateRecord model = do sqlQuerySingleRow "UPDATE users SET id = ?, ids = ? :: UUID[] WHERE id = ? RETURNING id, ids" ((fieldWithUpdate #id model, fieldWithUpdate #ids model, model.id)) updateRecordDiscardResult model = do @@ -188,11 +188,11 @@ tests = do type User = User' type instance GetTableName (User' ) = "users" - type instance GetModelByTableName "users" = User + type instance GetModelByTableName "users" = Generated.ActualTypes.User instance Default (Id' "users") where def = Id def - instance () => Table (User' ) where + instance () => IHP.ModelSupport.Table (User' ) where tableName = "users" tableNameByteString = Data.Text.Encoding.encodeUtf8 "users" columnNames = ["id","ids","electricity_unit_price"] @@ -201,40 +201,40 @@ tests = do {-# INLINABLE primaryKeyConditionForId #-} - instance InputValue User where inputValue = IHP.ModelSupport.recordToInputValue + instance InputValue Generated.ActualTypes.User where inputValue = IHP.ModelSupport.recordToInputValue - instance FromRow User where + instance FromRow Generated.ActualTypes.User where fromRow = do id <- field ids <- field electricityUnitPrice <- field - let theRecord = User id ids electricityUnitPrice def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } + let theRecord = Generated.ActualTypes.User id ids electricityUnitPrice def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } pure theRecord type instance GetModelName (User' ) = "User" - instance CanCreate User where - create :: (?modelContext :: ModelContext) => User -> IO User + instance CanCreate Generated.ActualTypes.User where + create :: (?modelContext :: ModelContext) => Generated.ActualTypes.User -> IO Generated.ActualTypes.User create model = do sqlQuerySingleRow "INSERT INTO users (id, ids, electricity_unit_price) VALUES (?, ? :: UUID[], ?) RETURNING id, ids, electricity_unit_price" ((model.id, model.ids, fieldWithDefault #electricityUnitPrice model)) createMany [] = pure [] createMany models = do sqlQuery (Query $ "INSERT INTO users (id, ids, electricity_unit_price) VALUES " <> (ByteString.intercalate ", " (List.map (\_ -> "(?, ? :: UUID[], ?)") models)) <> " RETURNING id, ids, electricity_unit_price") (List.concat $ List.map (\model -> [toField (model.id), toField (model.ids), toField (fieldWithDefault #electricityUnitPrice model)]) models) - createRecordDiscardResult :: (?modelContext :: ModelContext) => User -> IO () + createRecordDiscardResult :: (?modelContext :: ModelContext) => Generated.ActualTypes.User -> IO () createRecordDiscardResult model = do sqlExecDiscardResult "INSERT INTO users (id, ids, electricity_unit_price) VALUES (?, ? :: UUID[], ?)" ((model.id, model.ids, fieldWithDefault #electricityUnitPrice model)) - instance CanUpdate User where + instance CanUpdate Generated.ActualTypes.User where updateRecord model = do sqlQuerySingleRow "UPDATE users SET id = ?, ids = ? :: UUID[], electricity_unit_price = ? WHERE id = ? RETURNING id, ids, electricity_unit_price" ((fieldWithUpdate #id model, fieldWithUpdate #ids model, fieldWithUpdate #electricityUnitPrice model, model.id)) updateRecordDiscardResult model = do sqlExecDiscardResult "UPDATE users SET id = ?, ids = ? :: UUID[], electricity_unit_price = ? WHERE id = ?" ((fieldWithUpdate #id model, fieldWithUpdate #ids model, fieldWithUpdate #electricityUnitPrice model, model.id)) - instance Record User where + instance Record Generated.ActualTypes.User where {-# INLINE newRecord #-} - newRecord = User def def 0.17 def + newRecord = Generated.ActualTypes.User def def 0.17 def instance QueryBuilder.FilterPrimaryKey "users" where @@ -263,11 +263,11 @@ tests = do type User = User' type instance GetTableName (User' ) = "users" - type instance GetModelByTableName "users" = User + type instance GetModelByTableName "users" = Generated.ActualTypes.User instance Default (Id' "users") where def = Id def - instance () => Table (User' ) where + instance () => IHP.ModelSupport.Table (User' ) where tableName = "users" tableNameByteString = Data.Text.Encoding.encodeUtf8 "users" columnNames = ["id","ids","electricity_unit_price"] @@ -276,40 +276,40 @@ tests = do {-# INLINABLE primaryKeyConditionForId #-} - instance InputValue User where inputValue = IHP.ModelSupport.recordToInputValue + instance InputValue Generated.ActualTypes.User where inputValue = IHP.ModelSupport.recordToInputValue - instance FromRow User where + instance FromRow Generated.ActualTypes.User where fromRow = do id <- field ids <- field electricityUnitPrice <- field - let theRecord = User id ids electricityUnitPrice def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } + let theRecord = Generated.ActualTypes.User id ids electricityUnitPrice def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } pure theRecord type instance GetModelName (User' ) = "User" - instance CanCreate User where - create :: (?modelContext :: ModelContext) => User -> IO User + instance CanCreate Generated.ActualTypes.User where + create :: (?modelContext :: ModelContext) => Generated.ActualTypes.User -> IO Generated.ActualTypes.User create model = do sqlQuerySingleRow "INSERT INTO users (id, ids, electricity_unit_price) VALUES (?, ? :: UUID[], ?) RETURNING id, ids, electricity_unit_price" ((model.id, model.ids, fieldWithDefault #electricityUnitPrice model)) createMany [] = pure [] createMany models = do sqlQuery (Query $ "INSERT INTO users (id, ids, electricity_unit_price) VALUES " <> (ByteString.intercalate ", " (List.map (\_ -> "(?, ? :: UUID[], ?)") models)) <> " RETURNING id, ids, electricity_unit_price") (List.concat $ List.map (\model -> [toField (model.id), toField (model.ids), toField (fieldWithDefault #electricityUnitPrice model)]) models) - createRecordDiscardResult :: (?modelContext :: ModelContext) => User -> IO () + createRecordDiscardResult :: (?modelContext :: ModelContext) => Generated.ActualTypes.User -> IO () createRecordDiscardResult model = do sqlExecDiscardResult "INSERT INTO users (id, ids, electricity_unit_price) VALUES (?, ? :: UUID[], ?)" ((model.id, model.ids, fieldWithDefault #electricityUnitPrice model)) - instance CanUpdate User where + instance CanUpdate Generated.ActualTypes.User where updateRecord model = do sqlQuerySingleRow "UPDATE users SET id = ?, ids = ? :: UUID[], electricity_unit_price = ? WHERE id = ? RETURNING id, ids, electricity_unit_price" ((fieldWithUpdate #id model, fieldWithUpdate #ids model, fieldWithUpdate #electricityUnitPrice model, model.id)) updateRecordDiscardResult model = do sqlExecDiscardResult "UPDATE users SET id = ?, ids = ? :: UUID[], electricity_unit_price = ? WHERE id = ?" ((fieldWithUpdate #id model, fieldWithUpdate #ids model, fieldWithUpdate #electricityUnitPrice model, model.id)) - instance Record User where + instance Record Generated.ActualTypes.User where {-# INLINE newRecord #-} - newRecord = User def def 0 def + newRecord = Generated.ActualTypes.User def def 0 def instance QueryBuilder.FilterPrimaryKey "users" where @@ -338,11 +338,11 @@ tests = do type User = User' type instance GetTableName (User' ) = "users" - type instance GetModelByTableName "users" = User + type instance GetModelByTableName "users" = Generated.ActualTypes.User instance Default (Id' "users") where def = Id def - instance () => Table (User' ) where + instance () => IHP.ModelSupport.Table (User' ) where tableName = "users" tableNameByteString = Data.Text.Encoding.encodeUtf8 "users" columnNames = ["id","ts"] @@ -351,39 +351,39 @@ tests = do {-# INLINABLE primaryKeyConditionForId #-} - instance InputValue User where inputValue = IHP.ModelSupport.recordToInputValue + instance InputValue Generated.ActualTypes.User where inputValue = IHP.ModelSupport.recordToInputValue - instance FromRow User where + instance FromRow Generated.ActualTypes.User where fromRow = do id <- field ts <- field - let theRecord = User id ts def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } + let theRecord = Generated.ActualTypes.User id ts def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } pure theRecord type instance GetModelName (User' ) = "User" - instance CanCreate User where - create :: (?modelContext :: ModelContext) => User -> IO User + instance CanCreate Generated.ActualTypes.User where + create :: (?modelContext :: ModelContext) => Generated.ActualTypes.User -> IO Generated.ActualTypes.User create model = do sqlQuerySingleRow "INSERT INTO users (id) VALUES (?) RETURNING id" (Only (model.id)) createMany [] = pure [] createMany models = do sqlQuery (Query $ "INSERT INTO users (id) VALUES " <> (ByteString.intercalate ", " (List.map (\_ -> "(?)") models)) <> " RETURNING id") (List.concat $ List.map (\model -> [toField (model.id)]) models) - createRecordDiscardResult :: (?modelContext :: ModelContext) => User -> IO () + createRecordDiscardResult :: (?modelContext :: ModelContext) => Generated.ActualTypes.User -> IO () createRecordDiscardResult model = do sqlExecDiscardResult "INSERT INTO users (id) VALUES (?)" (Only (model.id)) - instance CanUpdate User where + instance CanUpdate Generated.ActualTypes.User where updateRecord model = do sqlQuerySingleRow "UPDATE users SET id = ? WHERE id = ? RETURNING id" ((fieldWithUpdate #id model, model.id)) updateRecordDiscardResult model = do sqlExecDiscardResult "UPDATE users SET id = ? WHERE id = ?" ((fieldWithUpdate #id model, model.id)) - instance Record User where + instance Record Generated.ActualTypes.User where {-# INLINE newRecord #-} - newRecord = User def def def + newRecord = Generated.ActualTypes.User def def def instance QueryBuilder.FilterPrimaryKey "users" where @@ -419,11 +419,11 @@ tests = do type LandingPage = LandingPage' (QueryBuilder.QueryBuilder "paragraph_ctas") (QueryBuilder.QueryBuilder "paragraph_ctas") type instance GetTableName (LandingPage' _ _) = "landing_pages" - type instance GetModelByTableName "landing_pages" = LandingPage + type instance GetModelByTableName "landing_pages" = Generated.ActualTypes.LandingPage instance Default (Id' "landing_pages") where def = Id def - instance () => Table (LandingPage' paragraphCtasLandingPages paragraphCtasToLandingPages) where + instance () => IHP.ModelSupport.Table (LandingPage' paragraphCtasLandingPages paragraphCtasToLandingPages) where tableName = "landing_pages" tableNameByteString = Data.Text.Encoding.encodeUtf8 "landing_pages" columnNames = ["id"] @@ -432,38 +432,38 @@ tests = do {-# INLINABLE primaryKeyConditionForId #-} - instance InputValue LandingPage where inputValue = IHP.ModelSupport.recordToInputValue + instance InputValue Generated.ActualTypes.LandingPage where inputValue = IHP.ModelSupport.recordToInputValue - instance FromRow LandingPage where + instance FromRow Generated.ActualTypes.LandingPage where fromRow = do id <- field - let theRecord = LandingPage id def def def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } + let theRecord = Generated.ActualTypes.LandingPage id def def def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } pure theRecord type instance GetModelName (LandingPage' _ _) = "LandingPage" - instance CanCreate LandingPage where - create :: (?modelContext :: ModelContext) => LandingPage -> IO LandingPage + instance CanCreate Generated.ActualTypes.LandingPage where + create :: (?modelContext :: ModelContext) => Generated.ActualTypes.LandingPage -> IO Generated.ActualTypes.LandingPage create model = do sqlQuerySingleRow "INSERT INTO landing_pages (id) VALUES (?) RETURNING id" (Only (fieldWithDefault #id model)) createMany [] = pure [] createMany models = do sqlQuery (Query $ "INSERT INTO landing_pages (id) VALUES " <> (ByteString.intercalate ", " (List.map (\_ -> "(?)") models)) <> " RETURNING id") (List.concat $ List.map (\model -> [toField (fieldWithDefault #id model)]) models) - createRecordDiscardResult :: (?modelContext :: ModelContext) => LandingPage -> IO () + createRecordDiscardResult :: (?modelContext :: ModelContext) => Generated.ActualTypes.LandingPage -> IO () createRecordDiscardResult model = do sqlExecDiscardResult "INSERT INTO landing_pages (id) VALUES (?)" (Only (fieldWithDefault #id model)) - instance CanUpdate LandingPage where + instance CanUpdate Generated.ActualTypes.LandingPage where updateRecord model = do sqlQuerySingleRow "UPDATE landing_pages SET id = ? WHERE id = ? RETURNING id" ((fieldWithUpdate #id model, model.id)) updateRecordDiscardResult model = do sqlExecDiscardResult "UPDATE landing_pages SET id = ? WHERE id = ?" ((fieldWithUpdate #id model, model.id)) - instance Record LandingPage where + instance Record Generated.ActualTypes.LandingPage where {-# INLINE newRecord #-} - newRecord = LandingPage def def def def + newRecord = Generated.ActualTypes.LandingPage def def def def instance QueryBuilder.FilterPrimaryKey "landing_pages" where @@ -485,14 +485,14 @@ tests = do let compileOutput = compileStatementPreview [statement] statement |> Text.strip getInstanceDecl "CanCreate" compileOutput `shouldBe` [trimming| - instance CanCreate User where - create :: (?modelContext :: ModelContext) => User -> IO User + instance CanCreate Generated.ActualTypes.User where + create :: (?modelContext :: ModelContext) => Generated.ActualTypes.User -> IO Generated.ActualTypes.User create model = do sqlQuerySingleRow "INSERT INTO users (id, keywords) VALUES (?, ? :: TEXT[]) RETURNING id, keywords" ((model.id, model.keywords)) createMany [] = pure [] createMany models = do sqlQuery (Query $ "INSERT INTO users (id, keywords) VALUES " <> (ByteString.intercalate ", " (List.map (\_ -> "(?, ? :: TEXT[])") models)) <> " RETURNING id, keywords") (List.concat $ List.map (\model -> [toField (model.id), toField (model.keywords)]) models) - createRecordDiscardResult :: (?modelContext :: ModelContext) => User -> IO () + createRecordDiscardResult :: (?modelContext :: ModelContext) => Generated.ActualTypes.User -> IO () createRecordDiscardResult model = do sqlExecDiscardResult "INSERT INTO users (id, keywords) VALUES (?, ? :: TEXT[])" ((model.id, model.keywords)) |] @@ -516,20 +516,20 @@ tests = do it "should compile CanCreate instance with sqlQuery" $ \statement -> do getInstanceDecl "CanCreate" compileOutput `shouldBe` [trimming| - instance CanCreate Thing where - create :: (?modelContext :: ModelContext) => Thing -> IO Thing + instance CanCreate Generated.ActualTypes.Thing where + create :: (?modelContext :: ModelContext) => Generated.ActualTypes.Thing -> IO Generated.ActualTypes.Thing create model = do sqlQuerySingleRow "INSERT INTO things (thing_arbitrary_ident) VALUES (?) RETURNING thing_arbitrary_ident" (Only (fieldWithDefault #thingArbitraryIdent model)) createMany [] = pure [] createMany models = do sqlQuery (Query $ "INSERT INTO things (thing_arbitrary_ident) VALUES " <> (ByteString.intercalate ", " (List.map (\_ -> "(?)") models)) <> " RETURNING thing_arbitrary_ident") (List.concat $ List.map (\model -> [toField (fieldWithDefault #thingArbitraryIdent model)]) models) - createRecordDiscardResult :: (?modelContext :: ModelContext) => Thing -> IO () + createRecordDiscardResult :: (?modelContext :: ModelContext) => Generated.ActualTypes.Thing -> IO () createRecordDiscardResult model = do sqlExecDiscardResult "INSERT INTO things (thing_arbitrary_ident) VALUES (?)" (Only (fieldWithDefault #thingArbitraryIdent model)) |] it "should compile CanUpdate instance with sqlQuery" $ \statement -> do getInstanceDecl "CanUpdate" compileOutput `shouldBe` [trimming| - instance CanUpdate Thing where + instance CanUpdate Generated.ActualTypes.Thing where updateRecord model = do sqlQuerySingleRow "UPDATE things SET thing_arbitrary_ident = ? WHERE thing_arbitrary_ident = ? RETURNING thing_arbitrary_ident" ((fieldWithUpdate #thingArbitraryIdent model, model.thingArbitraryIdent)) updateRecordDiscardResult model = do @@ -537,15 +537,15 @@ tests = do |] it "should compile FromRow instance" $ \statement -> do getInstanceDecl "FromRow" compileOutput `shouldBe` [trimming| - instance FromRow Thing where + instance FromRow Generated.ActualTypes.Thing where fromRow = do thingArbitraryIdent <- field - let theRecord = Thing thingArbitraryIdent (QueryBuilder.filterWhere (#thingRef, thingArbitraryIdent) (QueryBuilder.query @Other)) def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } + let theRecord = Generated.ActualTypes.Thing thingArbitraryIdent (QueryBuilder.filterWhere (#thingRef, thingArbitraryIdent) (QueryBuilder.query @Other)) def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } pure theRecord |] it "should compile Table instance" $ \statement -> do - getInstanceDecl "() => Table" compileOutput `shouldBe` [trimming| - instance () => Table (Thing' others) where + getInstanceDecl "() => IHP.ModelSupport.Table" compileOutput `shouldBe` [trimming| + instance () => IHP.ModelSupport.Table (Thing' others) where tableName = "things" tableNameByteString = Data.Text.Encoding.encodeUtf8 "things" columnNames = ["thing_arbitrary_ident"] @@ -585,20 +585,20 @@ tests = do it "should compile CanCreate instance with sqlQuery" $ \statement -> do getInstanceDecl "CanCreate" compileOutput `shouldBe` [trimming| - instance CanCreate BitPartRef where - create :: (?modelContext :: ModelContext) => BitPartRef -> IO BitPartRef + instance CanCreate Generated.ActualTypes.BitPartRef where + create :: (?modelContext :: ModelContext) => Generated.ActualTypes.BitPartRef -> IO Generated.ActualTypes.BitPartRef create model = do sqlQuerySingleRow "INSERT INTO bit_part_refs (bit_ref, part_ref) VALUES (?, ?) RETURNING bit_ref, part_ref" ((model.bitRef, model.partRef)) createMany [] = pure [] createMany models = do sqlQuery (Query $ "INSERT INTO bit_part_refs (bit_ref, part_ref) VALUES " <> (ByteString.intercalate ", " (List.map (\_ -> "(?, ?)") models)) <> " RETURNING bit_ref, part_ref") (List.concat $ List.map (\model -> [toField (model.bitRef), toField (model.partRef)]) models) - createRecordDiscardResult :: (?modelContext :: ModelContext) => BitPartRef -> IO () + createRecordDiscardResult :: (?modelContext :: ModelContext) => Generated.ActualTypes.BitPartRef -> IO () createRecordDiscardResult model = do sqlExecDiscardResult "INSERT INTO bit_part_refs (bit_ref, part_ref) VALUES (?, ?)" ((model.bitRef, model.partRef)) |] it "should compile CanUpdate instance with sqlQuery" $ \statement -> do getInstanceDecl "CanUpdate" compileOutput `shouldBe` [trimming| - instance CanUpdate BitPartRef where + instance CanUpdate Generated.ActualTypes.BitPartRef where updateRecord model = do sqlQuerySingleRow "UPDATE bit_part_refs SET bit_ref = ?, part_ref = ? WHERE (bit_ref, part_ref) = (?, ?) RETURNING bit_ref, part_ref" ((fieldWithUpdate #bitRef model, fieldWithUpdate #partRef model, model.bitRef, model.partRef)) updateRecordDiscardResult model = do @@ -606,16 +606,16 @@ tests = do |] it "should compile FromRow instance" $ \statement -> do getInstanceDecl "FromRow" compileOutput `shouldBe` [trimming| - instance FromRow BitPartRef where + instance FromRow Generated.ActualTypes.BitPartRef where fromRow = do bitRef <- field partRef <- field - let theRecord = BitPartRef bitRef partRef def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } + let theRecord = Generated.ActualTypes.BitPartRef bitRef partRef def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } pure theRecord |] it "should compile Table instance" $ \statement -> do - getInstanceDecl "(ToField bitRef, ToField partRef) => Table" compileOutput `shouldBe` [trimming| - instance (ToField bitRef, ToField partRef) => Table (BitPartRef' bitRef partRef) where + getInstanceDecl "(ToField bitRef, ToField partRef) => IHP.ModelSupport.Table" compileOutput `shouldBe` [trimming| + instance (ToField bitRef, ToField partRef) => IHP.ModelSupport.Table (BitPartRef' bitRef partRef) where tableName = "bit_part_refs" tableNameByteString = Data.Text.Encoding.encodeUtf8 "bit_part_refs" columnNames = ["bit_ref","part_ref"] @@ -627,10 +627,10 @@ tests = do let (Just statement) = find (isNamedTable "parts") statements let compileOutput = compileStatementPreview statements statement |> Text.strip getInstanceDecl "FromRow" compileOutput `shouldBe` [trimming| - instance FromRow Part where + instance FromRow Generated.ActualTypes.Part where fromRow = do partArbitraryIdent <- field - let theRecord = Part partArbitraryIdent (QueryBuilder.filterWhere (#partRef, partArbitraryIdent) (QueryBuilder.query @BitPartRef)) def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } + let theRecord = Generated.ActualTypes.Part partArbitraryIdent (QueryBuilder.filterWhere (#partRef, partArbitraryIdent) (QueryBuilder.query @BitPartRef)) def { originalDatabaseRecord = Just (Data.Dynamic.toDyn theRecord) } pure theRecord |] it "should compile QueryBuilder.FilterPrimaryKey instance" $ \statement -> do diff --git a/ihp-ide/IHP/SchemaCompiler.hs b/ihp-ide/IHP/SchemaCompiler.hs index 31237b3c8..3044cfd47 100644 --- a/ihp-ide/IHP/SchemaCompiler.hs +++ b/ihp-ide/IHP/SchemaCompiler.hs @@ -354,7 +354,7 @@ compileInputValueInstance :: CreateTable -> Text compileInputValueInstance table = "instance InputValue " <> modelName <> " where inputValue = IHP.ModelSupport.recordToInputValue\n" where - modelName = tableNameToModelName table.name + modelName = qualifiedConstructorNameFromTableName table.name -- | Returns all the type arguments of the data structure for an entity dataTypeArguments :: (?schema :: Schema) => CreateTable -> [Text] @@ -542,11 +542,14 @@ columnPlaceholder column@Column { columnType } = if columnPlaceholderNeedsTypeca columnPlaceholderNeedsTypecast Column { columnType = PArray {} } = True columnPlaceholderNeedsTypecast _ = False +qualifiedConstructorNameFromTableName :: Text -> Text +qualifiedConstructorNameFromTableName unqualifiedName = "Generated.ActualTypes." <> (tableNameToModelName unqualifiedName) + compileCreate :: CreateTable -> Text compileCreate table@(CreateTable { name, columns }) = let writableColumns = onlyWritableColumns columns - modelName = tableNameToModelName name + modelName = qualifiedConstructorNameFromTableName name columnNames = commaSep (map (.name) writableColumns) values = commaSep (map columnPlaceholder writableColumns) @@ -600,7 +603,7 @@ onlyWritableColumns columns = columns |> filter (\Column { generator } -> isNoth compileUpdate :: CreateTable -> Text compileUpdate table@(CreateTable { name, columns }) = let - modelName = tableNameToModelName name + modelName = qualifiedConstructorNameFromTableName name writableColumns = onlyWritableColumns columns toUpdateBinding Column { name } = "fieldWithUpdate #" <> columnNameToFieldName name <> " model" @@ -651,7 +654,7 @@ instance FromRow #{modelName} where |] where - modelName = tableNameToModelName name + modelName = qualifiedConstructorNameFromTableName name columnNames = map (columnNameToFieldName . (.name)) columns columnBinding columnName = columnName <> " <- field" @@ -730,9 +733,12 @@ instance FromRow #{modelName} where compileBuild :: (?schema :: Schema) => CreateTable -> Text compileBuild table@(CreateTable { name, columns }) = - "instance Record " <> tableNameToModelName name <> " where\n" + let + constructor = qualifiedConstructorNameFromTableName name + in + "instance Record " <> constructor <> " where\n" <> " {-# INLINE newRecord #-}\n" - <> " newRecord = " <> tableNameToModelName name <> " " <> unwords (map toDefaultValueExpr columns) <> " " <> (columnsReferencingTable name |> map (const "def") |> unwords) <> " def\n" + <> " newRecord = " <> constructor <> " " <> unwords (map toDefaultValueExpr columns) <> " " <> (columnsReferencingTable name |> map (const "def") |> unwords) <> " def\n" compileDefaultIdInstance :: CreateTable -> Text @@ -771,7 +777,7 @@ toDefaultValueExpr _ = "def" compileHasTableNameInstance :: (?schema :: Schema) => CreateTable -> Text compileHasTableNameInstance table@(CreateTable { name }) = "type instance GetTableName (" <> tableNameToModelName name <> "' " <> unwords (map (const "_") (dataTypeArguments table)) <> ") = " <> tshow name <> "\n" - <> "type instance GetModelByTableName " <> tshow name <> " = " <> tableNameToModelName name <> "\n" + <> "type instance GetModelByTableName " <> tshow name <> " = Generated.ActualTypes." <> tableNameToModelName name <> "\n" compilePrimaryKeyInstance :: (?schema :: Schema) => CreateTable -> Text compilePrimaryKeyInstance table@(CreateTable { name, columns, constraints }) = [trimming|type instance PrimaryKey $symbol = $idType|] <> "\n" @@ -816,7 +822,7 @@ instance #{instanceHead} where |] where instanceHead :: Text - instanceHead = instanceConstraints <> " => Table (" <> compileTypePattern table <> ")" + instanceHead = instanceConstraints <> " => IHP.ModelSupport.Table (" <> compileTypePattern table <> ")" where instanceConstraints = table From 51d17d24eee0bb50e64c53acb2e15a5d19440974 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Tue, 12 Nov 2024 09:52:17 -0800 Subject: [PATCH 36/47] support event_trigger functions --- Test/IDE/SchemaDesigner/CompilerSpec.hs | 13 +++++++++++++ Test/IDE/SchemaDesigner/ParserSpec.hs | 15 +++++++++++++++ ihp-graphql/IHP/GraphQL/SchemaCompiler.hs | 1 + ihp-ide/IHP/IDE/SchemaDesigner/Compiler.hs | 1 + ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs | 5 +++++ ihp-ide/IHP/IDE/SchemaDesigner/Types.hs | 1 + 6 files changed, 36 insertions(+) diff --git a/Test/IDE/SchemaDesigner/CompilerSpec.hs b/Test/IDE/SchemaDesigner/CompilerSpec.hs index 22a2f3bc4..a1646df73 100644 --- a/Test/IDE/SchemaDesigner/CompilerSpec.hs +++ b/Test/IDE/SchemaDesigner/CompilerSpec.hs @@ -753,6 +753,19 @@ tests = do compileSql [statement] `shouldBe` sql + it "should compile a CREATE OR REPLACE FUNCTION ..() RETURNS EVENT_TRIGGER .." do + let sql = cs [plain|CREATE OR REPLACE FUNCTION a() RETURNS EVENT_TRIGGER AS $$$$ language plpgsql;\n|] + let statement = CreateFunction + { functionName = "a" + , functionArguments = [] + , functionBody = "" + , orReplace = True + , returns = PEventTrigger + , language = "plpgsql" + } + + compileSql [statement] `shouldBe` sql + it "should compile a CREATE FUNCTION ..() RETURNS TRIGGER .." do let sql = cs [plain|CREATE FUNCTION notify_did_insert_webrtc_connection() RETURNS TRIGGER AS $$ BEGIN PERFORM pg_notify('did_insert_webrtc_connection', json_build_object('id', NEW.id, 'floor_id', NEW.floor_id, 'source_user_id', NEW.source_user_id, 'target_user_id', NEW.target_user_id)::text); RETURN NEW; END; $$ language plpgsql;\n|] let statement = CreateFunction diff --git a/Test/IDE/SchemaDesigner/ParserSpec.hs b/Test/IDE/SchemaDesigner/ParserSpec.hs index e7b558022..b11425334 100644 --- a/Test/IDE/SchemaDesigner/ParserSpec.hs +++ b/Test/IDE/SchemaDesigner/ParserSpec.hs @@ -802,6 +802,21 @@ $$; , language = "plpgsql" } + it "should parse CREATE FUNCTION statements that returns an event_trigger" do + let sql = cs [plain| + CREATE FUNCTION public.a() RETURNS event_trigger + LANGUAGE plpgsql + AS $$ BEGIN SELECT 1; END; $$; + |] + parseSql sql `shouldBe` CreateFunction + { functionName = "a" + , functionArguments = [] + , functionBody = " BEGIN SELECT 1; END; " + , orReplace = False + , returns = PEventTrigger + , language = "plpgsql" + } + it "should parse a decimal default value with a type-cast" do let sql = "CREATE TABLE a(electricity_unit_price DOUBLE PRECISION DEFAULT 0.17::double precision NOT NULL);" let statements = diff --git a/ihp-graphql/IHP/GraphQL/SchemaCompiler.hs b/ihp-graphql/IHP/GraphQL/SchemaCompiler.hs index dd0b48692..e2035ce20 100644 --- a/ihp-graphql/IHP/GraphQL/SchemaCompiler.hs +++ b/ihp-graphql/IHP/GraphQL/SchemaCompiler.hs @@ -213,4 +213,5 @@ postgresTypeToGraphQLType PInet = NamedType "IPv4" postgresTypeToGraphQLType PTSVector = NamedType "String" postgresTypeToGraphQLType (PArray type_) = ListType (postgresTypeToGraphQLType type_) postgresTypeToGraphQLType PTrigger = error "Trigger cannot be converted to a GraphQL type" +postgresTypeToGraphQLType PEventTrigger = error "Trigger cannot be converted to a GraphQL type" postgresTypeToGraphQLType (PCustomType theType) = NamedType "String" \ No newline at end of file diff --git a/ihp-ide/IHP/IDE/SchemaDesigner/Compiler.hs b/ihp-ide/IHP/IDE/SchemaDesigner/Compiler.hs index b05af3c59..373aeab7d 100644 --- a/ihp-ide/IHP/IDE/SchemaDesigner/Compiler.hs +++ b/ihp-ide/IHP/IDE/SchemaDesigner/Compiler.hs @@ -201,6 +201,7 @@ compilePostgresType PInet = "INET" compilePostgresType PTSVector = "TSVECTOR" compilePostgresType (PArray type_) = compilePostgresType type_ <> "[]" compilePostgresType PTrigger = "TRIGGER" +compilePostgresType PEventTrigger = "EVENT_TRIGGER" compilePostgresType (PCustomType theType) = theType compileIdentifier :: Text -> Text diff --git a/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs b/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs index 753e0c21c..e94c409c8 100644 --- a/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs +++ b/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs @@ -272,6 +272,7 @@ sqlType = choice $ map optionalArray , inet , tsvector , trigger + , eventTrigger , singleChar , customType ] @@ -429,6 +430,10 @@ sqlType = choice $ map optionalArray trigger = do try (symbol' "TRIGGER") pure PTrigger + + eventTrigger = do + try (symbol' "EVENT_TRIGGER") + pure PEventTrigger customType = do optional do diff --git a/ihp-ide/IHP/IDE/SchemaDesigner/Types.hs b/ihp-ide/IHP/IDE/SchemaDesigner/Types.hs index 1d9fbf465..e4bffde44 100644 --- a/ihp-ide/IHP/IDE/SchemaDesigner/Types.hs +++ b/ihp-ide/IHP/IDE/SchemaDesigner/Types.hs @@ -225,6 +225,7 @@ data PostgresType | PTSVector | PArray PostgresType | PTrigger + | PEventTrigger | PCustomType Text deriving (Eq, Show) From e2360aa76c0587c0851496d308298d97878456ca Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Tue, 12 Nov 2024 10:28:34 -0800 Subject: [PATCH 37/47] support postgres event trigger in schema.sql --- Test/IDE/SchemaDesigner/CompilerSpec.hs | 11 ++++ Test/IDE/SchemaDesigner/ParserSpec.hs | 15 +++++ ihp-ide/IHP/IDE/CodeGen/MigrationGenerator.hs | 1 + ihp-ide/IHP/IDE/SchemaDesigner/Compiler.hs | 4 ++ ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs | 56 ++++++++++++++++++- ihp-ide/IHP/IDE/SchemaDesigner/Types.hs | 6 ++ 6 files changed, 91 insertions(+), 2 deletions(-) diff --git a/Test/IDE/SchemaDesigner/CompilerSpec.hs b/Test/IDE/SchemaDesigner/CompilerSpec.hs index a1646df73..91d839057 100644 --- a/Test/IDE/SchemaDesigner/CompilerSpec.hs +++ b/Test/IDE/SchemaDesigner/CompilerSpec.hs @@ -998,6 +998,17 @@ tests = do } ] compileSql statements `shouldBe` sql + it "should compile 'CREATE EVENT TRIGGER ..' statements" do + let sql = "CREATE EVENT TRIGGER trigger_update_schema ON ddl_command_end WHEN TAG IN ('CREATE TABLE', 'ALTER TABLE', 'DROP TABLE') EXECUTE FUNCTION update_tables_and_columns();\n" + let statements = [ CreateEventTrigger + { name = "trigger_update_schema" + , eventOn = "ddl_command_end" + , whenCondition = Just (InExpression (VarExpression "TAG") (InArrayExpression [TextExpression "CREATE TABLE", TextExpression "ALTER TABLE", TextExpression "DROP TABLE"])) + , functionName = "update_tables_and_columns" + , arguments = [] + } ] + compileSql statements `shouldBe` sql + it "should compile 'BEGIN;' statements" do let sql = "BEGIN;\n" let statements = [ Begin ] diff --git a/Test/IDE/SchemaDesigner/ParserSpec.hs b/Test/IDE/SchemaDesigner/ParserSpec.hs index b11425334..fd68a7470 100644 --- a/Test/IDE/SchemaDesigner/ParserSpec.hs +++ b/Test/IDE/SchemaDesigner/ParserSpec.hs @@ -1102,6 +1102,21 @@ COMMENT ON EXTENSION "uuid-ossp" IS 'generate universally unique identifiers (UU , arguments = [TextExpression "hello"] } + it "should parse 'CREATE EVENT TRIGGER ..' statements" do + let sql = cs [plain| + CREATE EVENT TRIGGER trigger_update_schema + ON ddl_command_end + WHEN TAG IN ('CREATE TABLE', 'ALTER TABLE', 'DROP TABLE') + EXECUTE FUNCTION update_tables_and_columns(); + |] + parseSql sql `shouldBe` CreateEventTrigger + { name = "trigger_update_schema" + , eventOn = "ddl_command_end" + , whenCondition = Just (InExpression (VarExpression "TAG") (InArrayExpression [TextExpression "CREATE TABLE", TextExpression "ALTER TABLE", TextExpression "DROP TABLE"])) + , functionName = "update_tables_and_columns" + , arguments = [] + } + it "should parse 'ALTER SEQUENCE ..' statements" do let sql = cs [plain|ALTER SEQUENCE public.a OWNED BY public.b.serial_number;|] parseSql sql `shouldBe` UnknownStatement { raw = "ALTER SEQUENCE public.a OWNED BY public.b.serial_number" } diff --git a/ihp-ide/IHP/IDE/CodeGen/MigrationGenerator.hs b/ihp-ide/IHP/IDE/CodeGen/MigrationGenerator.hs index 26d06faf9..41eb714ec 100644 --- a/ihp-ide/IHP/IDE/CodeGen/MigrationGenerator.hs +++ b/ihp-ide/IHP/IDE/CodeGen/MigrationGenerator.hs @@ -168,6 +168,7 @@ diffSchemas targetSchema' actualSchema' = (drop <> create) toDropStatement CreatePolicy { tableName, name } = Just DropPolicy { tableName, policyName = name } toDropStatement CreateFunction { functionName } = Just DropFunction { functionName } toDropStatement CreateTrigger { name, tableName } = Just DropTrigger { name, tableName } + toDropStatement CreateEventTrigger { name } = Just DropEventTrigger { name } toDropStatement otherwise = Nothing diff --git a/ihp-ide/IHP/IDE/SchemaDesigner/Compiler.hs b/ihp-ide/IHP/IDE/SchemaDesigner/Compiler.hs index 373aeab7d..aab65287d 100644 --- a/ihp-ide/IHP/IDE/SchemaDesigner/Compiler.hs +++ b/ihp-ide/IHP/IDE/SchemaDesigner/Compiler.hs @@ -55,6 +55,8 @@ compileStatement UnknownStatement { raw } = raw <> ";" compileStatement Set { name, value } = "SET " <> compileIdentifier name <> " = " <> compileExpression value <> ";" compileStatement SelectStatement { query } = "SELECT " <> query <> ";" compileStatement DropTrigger { name, tableName } = "DROP TRIGGER " <> compileIdentifier name <> " ON " <> compileIdentifier tableName <> ";" +compileStatement CreateEventTrigger { name, eventOn, whenCondition, functionName, arguments } = "CREATE EVENT TRIGGER " <> compileIdentifier name <> " ON " <> compileIdentifier eventOn <> " " <> (maybe "" (\expression -> "WHEN " <> compileExpression expression) whenCondition) <> " EXECUTE FUNCTION " <> compileExpression (CallExpression functionName arguments) <> ";" +compileStatement DropEventTrigger { name } = "DROP EVENT TRIGGER " <> compileIdentifier name <> ";" -- | Emit a PRIMARY KEY constraint when there are multiple primary key columns compilePrimaryKeyConstraint :: PrimaryKeyConstraint -> Maybe Text @@ -132,6 +134,7 @@ compileExpression (EqExpression a b) = compileExpressionWithOptionalParenthese a compileExpression (IsExpression a (NotExpression b)) = compileExpressionWithOptionalParenthese a <> " IS NOT " <> compileExpressionWithOptionalParenthese b -- 'IS (NOT NULL)' => 'IS NOT NULL' compileExpression (IsExpression a b) = compileExpressionWithOptionalParenthese a <> " IS " <> compileExpressionWithOptionalParenthese b compileExpression (InExpression a b) = compileExpressionWithOptionalParenthese a <> " IN " <> compileExpressionWithOptionalParenthese b +compileExpression (InArrayExpression values) = "(" <> intercalate ", " (map compileExpression values) <> ")" compileExpression (NotExpression a) = "NOT " <> compileExpressionWithOptionalParenthese a compileExpression (AndExpression a b) = compileExpressionWithOptionalParenthese a <> " AND " <> compileExpressionWithOptionalParenthese b compileExpression (OrExpression a b) = compileExpressionWithOptionalParenthese a <> " OR " <> compileExpressionWithOptionalParenthese b @@ -161,6 +164,7 @@ compileExpressionWithOptionalParenthese expr@(IntExpression {}) = compileExpress compileExpressionWithOptionalParenthese expr@(DoubleExpression {}) = compileExpression expr compileExpressionWithOptionalParenthese expr@(DotExpression (VarExpression {}) b) = compileExpression expr compileExpressionWithOptionalParenthese expr@(ConcatenationExpression a b ) = compileExpression expr +compileExpressionWithOptionalParenthese expr@(InArrayExpression values) = compileExpression expr compileExpressionWithOptionalParenthese expression = "(" <> compileExpression expression <> ")" compareStatement (CreateEnumType {}) _ = LT diff --git a/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs b/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs index e94c409c8..8c38296dc 100644 --- a/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs +++ b/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs @@ -464,7 +464,7 @@ table = [ , binary "||" ConcatenationExpression , binary "IS" IsExpression - , binary "IN" InExpression + , inExpr , prefix "NOT" NotExpression , prefix "EXISTS" ExistsExpression , typeCast @@ -488,6 +488,11 @@ table = [ char '.' name <- identifier pure $ \expr -> DotExpression expr name + + inExpr = Postfix do + lexeme "IN" + right <- try inArrayExpression <|> expression + pure $ \expr -> InExpression expr right -- | Parses a SQL expression -- @@ -548,7 +553,10 @@ selectExpr = do whereClause Nothing <|> explicitAs <|> implicitAs - +inArrayExpression :: Parser Expression +inArrayExpression = do + values <- between (char '(') (char ')') (expression `sepBy` (char ',' >> space)) + pure (InArrayExpression values) @@ -630,6 +638,38 @@ createFunction = do createTrigger = do lexeme "CREATE" + createEventTrigger <|> createTrigger' + +createEventTrigger = do + lexeme "EVENT" + lexeme "TRIGGER" + + name <- qualifiedIdentifier + lexeme "ON" + eventOn <- identifier + + whenCondition <- optional do + lexeme "WHEN" + expression + + lexeme "EXECUTE" + (lexeme "FUNCTION") <|> (lexeme "PROCEDURE") + + (CallExpression functionName arguments) <- callExpr + + char ';' + + pure CreateEventTrigger + { name + , eventOn + , whenCondition + , functionName + , arguments + } + + + +createTrigger' = do lexeme "TRIGGER" name <- qualifiedIdentifier @@ -875,6 +915,10 @@ dropPolicy = do dropTrigger = do lexeme "DROP" + + dropEventTrigger <|> dropTrigger' + +dropTrigger' = do lexeme "TRIGGER" name <- qualifiedIdentifier lexeme "ON" @@ -882,6 +926,14 @@ dropTrigger = do char ';' pure DropTrigger { name, tableName } + +dropEventTrigger = do + lexeme "EVENT" + lexeme "TRIGGER" + name <- qualifiedIdentifier + char ';' + pure DropEventTrigger { name } + createSequence = do lexeme "CREATE" lexeme "SEQUENCE" diff --git a/ihp-ide/IHP/IDE/SchemaDesigner/Types.hs b/ihp-ide/IHP/IDE/SchemaDesigner/Types.hs index e4bffde44..bf8af1657 100644 --- a/ihp-ide/IHP/IDE/SchemaDesigner/Types.hs +++ b/ihp-ide/IHP/IDE/SchemaDesigner/Types.hs @@ -64,8 +64,12 @@ data Statement | DropDefaultValue { tableName :: Text, columnName :: Text } -- | CREATE TRIGGER ..; | CreateTrigger { name :: !Text, eventWhen :: !TriggerEventWhen, event :: !TriggerEvent, tableName :: !Text, for :: !TriggerFor, whenCondition :: Maybe Expression, functionName :: !Text, arguments :: ![Expression] } + -- | CREATE EVENT TRIGGER ..; + | CreateEventTrigger { name :: !Text, eventOn :: !Text, whenCondition :: Maybe Expression, functionName :: !Text, arguments :: ![Expression] } -- | DROP TRIGGER .. ON ..; | DropTrigger { name :: !Text, tableName :: !Text } + -- | DROP EVENT TRIGGER ..; + | DropEventTrigger { name :: !Text } -- | BEGIN; | Begin -- | COMMIT; @@ -165,6 +169,8 @@ data Expression = | IsExpression Expression Expression -- | a IN b | InExpression Expression Expression + -- | ('a', 'b') + | InArrayExpression [Expression] -- | NOT a | NotExpression Expression -- | EXISTS a From 6fc9158647619cf50a8bf2c7ade9f7f1b4aa274c Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Tue, 12 Nov 2024 11:41:03 -0800 Subject: [PATCH 38/47] DataSync: Avoid CREATE TABLE large_pg_notifications if not needed --- IHP/DataSync/ChangeNotifications.hs | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/IHP/DataSync/ChangeNotifications.hs b/IHP/DataSync/ChangeNotifications.hs index a8ad25d38..8a38d59d4 100644 --- a/IHP/DataSync/ChangeNotifications.hs +++ b/IHP/DataSync/ChangeNotifications.hs @@ -98,12 +98,19 @@ createNotificationFunction table = [i| WHEN duplicate_function THEN null; - CREATE UNLOGGED TABLE IF NOT EXISTS large_pg_notifications ( - id UUID DEFAULT uuid_generate_v4() PRIMARY KEY NOT NULL, - payload TEXT DEFAULT null, - created_at TIMESTAMP WITH TIME ZONE DEFAULT now() NOT NULL - ); - CREATE INDEX IF NOT EXISTS large_pg_notifications_created_at_index ON large_pg_notifications (created_at); + IF NOT EXISTS ( + SELECT FROM pg_catalog.pg_class c + JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE c.relname = 'large_pg_notifications' + AND n.nspname = 'public' + ) THEN + CREATE UNLOGGED TABLE large_pg_notifications ( + id UUID DEFAULT uuid_generate_v4() PRIMARY KEY NOT NULL, + payload TEXT DEFAULT NULL, + created_at TIMESTAMP WITH TIME ZONE DEFAULT now() NOT NULL + ); + CREATE INDEX large_pg_notifications_created_at_index ON large_pg_notifications (created_at); + END IF; END; $$ |] From 31cb4798fa2072758bfb4299bdc2d897acbba497 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Tue, 12 Nov 2024 14:13:28 -0800 Subject: [PATCH 39/47] fixed space handling when parsing sql functions --- ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs | 1 + 1 file changed, 1 insertion(+) diff --git a/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs b/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs index 8c38296dc..eb83f06fa 100644 --- a/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs +++ b/ihp-ide/IHP/IDE/SchemaDesigner/Parser.hs @@ -612,6 +612,7 @@ createFunction = do space lexeme "RETURNS" returns <- sqlType + space language <- optional do lexeme "language" <|> lexeme "LANGUAGE" From fc6180b6fbb63c2fa148bead7e44b10b558c82e2 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Thu, 14 Nov 2024 11:38:52 -0800 Subject: [PATCH 40/47] DataSync. fixed type error in recordMatchesQuery in the branch for LiteralExpression the expression was not correctly unpacked. This resulted in evaluateDynamicValue returning undefined. This resulted in recordMatchesQuery sometimes returning true when it shouldn't. In practise this means that on a createRecord call, sometimes a record will be added to a useQuery result optimistically, even when the query would not match that record. --- lib/IHP/DataSync/ihp-querybuilder.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/IHP/DataSync/ihp-querybuilder.js b/lib/IHP/DataSync/ihp-querybuilder.js index 4fb961568..3517d5e03 100644 --- a/lib/IHP/DataSync/ihp-querybuilder.js +++ b/lib/IHP/DataSync/ihp-querybuilder.js @@ -424,7 +424,7 @@ export function recordMatchesQuery(query, record) { default: throw new Error('Unsupported operator ' + expression.op); } } - case 'LiteralExpression': return evaluateDynamicValue(expression); + case 'LiteralExpression': return evaluateDynamicValue(expression.value); case 'ListExpression': return expression.values.map(value => evaluateDynamicValue(value)); default: throw new Error('Unsupported expression in evaluate: ' + expression.tag); } From dcc0f03601a8e956e447b0a6a9aa54726d047797 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Thu, 14 Nov 2024 12:31:09 -0800 Subject: [PATCH 41/47] DataSync: export NewRecordBehaviour --- lib/IHP/DataSync/ihp-datasync.js | 1 + lib/IHP/DataSync/index.js | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/IHP/DataSync/ihp-datasync.js b/lib/IHP/DataSync/ihp-datasync.js index af4f77978..d6c41b740 100644 --- a/lib/IHP/DataSync/ihp-datasync.js +++ b/lib/IHP/DataSync/ihp-datasync.js @@ -233,6 +233,7 @@ class DataSyncController { const APPEND_NEW_RECORD = 0; const PREPEND_NEW_RECORD = 1; +export const NewRecordBehaviour = { APPEND_NEW_RECORD, PREPEND_NEW_RECORD }; class DataSubscription { constructor(query, cache = null) { diff --git a/lib/IHP/DataSync/index.js b/lib/IHP/DataSync/index.js index 7577b8883..1674daaaa 100644 --- a/lib/IHP/DataSync/index.js +++ b/lib/IHP/DataSync/index.js @@ -19,7 +19,7 @@ import { whereGreaterThan, whereGreaterThanOrEqual } from './ihp-querybuilder.js'; -import { DataSyncController, DataSubscription, initIHPBackend, createRecord, createRecords, updateRecord, updateRecords, deleteRecord, deleteRecords } from './ihp-datasync.js'; +import { DataSyncController, DataSubscription, initIHPBackend, createRecord, createRecords, updateRecord, updateRecords, deleteRecord, deleteRecords, NewRecordBehaviour } from './ihp-datasync.js'; import { Transaction, withTransaction } from './transaction.js'; export { @@ -45,7 +45,7 @@ export { whereGreaterThanOrEqual, /* ihp-datasync.js */ - DataSyncController, DataSubscription, initIHPBackend, createRecord, createRecords, updateRecord, updateRecords, deleteRecord, deleteRecords, + DataSyncController, DataSubscription, initIHPBackend, createRecord, createRecords, updateRecord, updateRecords, deleteRecord, deleteRecords, NewRecordBehaviour, /* transaction.js */ Transaction, withTransaction From d1613e631f6542d5aef4d021ef8f2f253784e090 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Thu, 14 Nov 2024 12:45:40 -0800 Subject: [PATCH 42/47] DataSync: support custom newRecordBehaviour with useQuery --- lib/IHP/DataSync/ihp-datasync.js | 4 ++-- lib/IHP/DataSync/react.js | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/lib/IHP/DataSync/ihp-datasync.js b/lib/IHP/DataSync/ihp-datasync.js index d6c41b740..3c981fa33 100644 --- a/lib/IHP/DataSync/ihp-datasync.js +++ b/lib/IHP/DataSync/ihp-datasync.js @@ -236,7 +236,7 @@ const PREPEND_NEW_RECORD = 1; export const NewRecordBehaviour = { APPEND_NEW_RECORD, PREPEND_NEW_RECORD }; class DataSubscription { - constructor(query, cache = null) { + constructor(query, options = null, cache = null) { if (typeof query !== "object" || !('table' in query)) { throw new Error("Query passed to `new DataSubscription(..)` doesn't look like a query object. If you're using the `query()` functions to costruct the object, make sure you pass the `.query` property, like this: `new DataSubscription(query('my_table').orderBy('createdAt').query)`"); } @@ -261,7 +261,7 @@ class DataSubscription { this.onMessage = this.onMessage.bind(this); // When a new record is inserted, do we put it at the end or at the beginning? - this.newRecordBehaviour = this.detectNewRecordBehaviour(); + this.newRecordBehaviour = (options && 'newRecordBehaviour' in options) ? options.newRecordBehaviour : this.detectNewRecordBehaviour(); this.optimisticCreatedPendingRecordIds = []; } diff --git a/lib/IHP/DataSync/react.js b/lib/IHP/DataSync/react.js index d8c7c051b..ba008e110 100644 --- a/lib/IHP/DataSync/react.js +++ b/lib/IHP/DataSync/react.js @@ -12,8 +12,8 @@ const recordsCache = new Map(); * @example * const messages = useQuery(query('messages').orderBy('createdAt')); */ -export function useQuery(queryBuilder) { - const dataSubscription = DataSubscriptionStore.get(queryBuilder.query); +export function useQuery(queryBuilder, options = null) { + const dataSubscription = DataSubscriptionStore.get(queryBuilder.query, options); const isAuthCompleted = useContext(AuthCompletedContext); const records = useSyncExternalStore(dataSubscription.subscribe, dataSubscription.getRecords) @@ -73,19 +73,19 @@ export class DataSubscriptionStore { // once it has arrived. static cache = new Map(); - static get(query) { - const strinigifiedQuery = JSON.stringify(query); - const existingSubscription = DataSubscriptionStore.queryMap.get(strinigifiedQuery) + static get(query, options = null) { + const key = JSON.stringify(query) + JSON.stringify(options); + const existingSubscription = DataSubscriptionStore.queryMap.get(key) if (existingSubscription) { return existingSubscription; } else { - const subscription = new DataSubscription(query, DataSubscriptionStore.cache); + const subscription = new DataSubscription(query, options, DataSubscriptionStore.cache); subscription.createOnServer(); - subscription.onClose = () => { DataSubscriptionStore.queryMap.delete(strinigifiedQuery); }; + subscription.onClose = () => { DataSubscriptionStore.queryMap.delete(key); }; - DataSubscriptionStore.queryMap.set(strinigifiedQuery, subscription); + DataSubscriptionStore.queryMap.set(key, subscription); // If the query changes very rapid in `useQuery` it can happen that the `dataSubscription.subscribe` // is never called at all. In this case we have a unused DataSubscription laying around. We avoid From d93f07e0bc6aa38e58f3219ce109ef73d1157460 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Sun, 17 Nov 2024 15:30:52 -0800 Subject: [PATCH 43/47] nix: drop dependency to ihp source to reduce derivation size Until now the IHP build did depend on the full ihp source code instead of Makefile.dist and ihp static files. Now we only depend on the specific files, reducing the overall derivation size. This should speed up deployments by a bit. --- NixSupport/default.nix | 31 +++++++++++++++++++------------ flake-module.nix | 2 ++ 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/NixSupport/default.nix b/NixSupport/default.nix index b754aeaea..baa5b6120 100644 --- a/NixSupport/default.nix +++ b/NixSupport/default.nix @@ -23,6 +23,9 @@ let (otherDeps pkgs) ]; + ihpLibWithMakefile = filter { root = ihp; include = ["lib/IHP/Makefile.dist"]; name = "ihpLibWithMakefile"; }; + ihpLibWithMakefileAndStatic = filter { root = ihp; include = ["lib/IHP/Makefile.dist" "lib/IHP/static"]; name = "ihpLibWithMakefileAndStatic"; }; + schemaObjectFiles = let self = projectPath; @@ -33,18 +36,19 @@ let mkdir -p build/Generated build-generated-code - export IHP=${ihp}/lib/IHP + export IHP=${ihpLibWithMakefile}/lib/IHP ghc -O${if optimized then optimizationLevel else "0"} $(make print-ghc-options) --make build/Generated/Types.hs -odir build/RunProdServer -hidir build/RunProdServer cp -r build $out ''; - src = filter { root = self; include = ["Application/Schema.sql" "Makefile"]; }; + src = filter { root = self; include = ["Application/Schema.sql" "Makefile"]; name = "schemaObjectFiles-source"; }; nativeBuildInputs = [ (ghc.ghcWithPackages (p: [ p.ihp-ide ])) # Needed for build-generated-code ] ; dontInstall = true; dontFixup = false; + disallowedReferences = [ ihp ]; # Prevent including the large full IHP source code }; prodGhcOptions = "-funbox-strict-fields -fconstraint-solver-iterations=100 -fdicts-strict -with-rtsopts=\"${rtsFlags}\""; @@ -59,8 +63,8 @@ let chmod -R +w build/RunProdServer/* - export IHP_LIB=${ihp}/lib/IHP - export IHP=${ihp}/lib/IHP + export IHP_LIB=${ihpLibWithMakefile}/lib/IHP + export IHP=${ihpLibWithMakefile}/lib/IHP mkdir -p build/bin build/RunUnoptimizedProdServer @@ -114,10 +118,11 @@ let done ''; dontFixup = true; - src = filter { root = pkgs.nix-gitignore.gitignoreSource [] projectPath; include = [filter.isDirectory "Makefile" (filter.matchExt "hs")]; exclude = ["static" "Frontend"]; }; + src = filter { root = pkgs.nix-gitignore.gitignoreSource [] projectPath; include = [filter.isDirectory "Makefile" (filter.matchExt "hs")]; exclude = ["static" "Frontend"]; name = "${appName}-source"; }; buildInputs = [allHaskellPackages]; - nativeBuildInputs = [ pkgs.makeWrapper ]; + nativeBuildInputs = [ pkgs.makeWrapper schemaObjectFiles]; enableParallelBuilding = true; + disallowedReferences = [ ihp ]; # Prevent including the large full IHP source code }; in pkgs.stdenv.mkDerivation { @@ -134,8 +139,8 @@ in # See https://github.com/svanderburg/node2nix/issues/217#issuecomment-751311272 export HOME=/tmp - export IHP_LIB=${ihp}/lib/IHP - export IHP=${ihp}/lib/IHP + export IHP_LIB=${ihpLibWithMakefileAndStatic}/lib/IHP + export IHP=${ihpLibWithMakefileAndStatic}/lib/IHP make -j static/app.css static/app.js @@ -148,11 +153,11 @@ in mkdir -p $out/bin $out/lib INPUT_HASH="$((basename $out) | cut -d - -f 1)" - makeWrapper ${binaries}/bin/RunProdServer $out/bin/RunProdServer --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihp}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} + makeWrapper ${binaries}/bin/RunProdServer $out/bin/RunProdServer --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihpLibWithMakefileAndStatic}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} # Copy job runner binary to bin/ if we built it if [ -f ${binaries}/bin/RunJobs ]; then - makeWrapper ${binaries}/bin/RunJobs $out/bin/RunJobs --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihp}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} + makeWrapper ${binaries}/bin/RunJobs $out/bin/RunJobs --set-default IHP_ASSET_VERSION $INPUT_HASH --set-default IHP_LIB ${ihpLibWithMakefileAndStatic}/lib/IHP --run "cd $out/lib" --prefix PATH : ${pkgs.lib.makeBinPath (otherDeps pkgs)} fi; # Copy other binaries, excluding RunProdServer and RunJobs @@ -168,13 +173,15 @@ in ''; dontFixup = true; src = pkgs.nix-gitignore.gitignoreSource [] projectPath; - buildInputs = builtins.concatLists [ [allHaskellPackages] allNativePackages ]; + buildInputs = builtins.concatLists [ allNativePackages ]; nativeBuildInputs = builtins.concatLists [ [ pkgs.makeWrapper - pkgs.cacert # Needed for npm install to work from within the IHP build process + pkgs.cacert # Needed for npm install to work from within the IHP build process + [allHaskellPackages] ] ]; shellHook = "eval $(egrep ^export ${allHaskellPackages}/bin/ghc)"; enableParallelBuilding = true; impureEnvVars = pkgs.lib.fetchers.proxyImpureEnvVars; # Needed for npm install to work from within the IHP build process + disallowedReferences = [ ihp ]; # Prevent including the large full IHP source code } \ No newline at end of file diff --git a/flake-module.nix b/flake-module.nix index a9e54de7d..1cfe56f36 100644 --- a/flake-module.nix +++ b/flake-module.nix @@ -182,10 +182,12 @@ ihpFlake: name = "ihp-schema"; src = ihp; phases = [ "unpackPhase" "installPhase" ]; + nativeBuildInputs = [ihp.ihp-ide]; installPhase = '' mkdir $out cp ${ihp.ihp-ide}/lib/IHP/IHPSchema.sql $out/ ''; + allowedReferences = []; }; From 8ad0227991e6bf6770d7d8c0f979a2e3106ed045 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Sun, 17 Nov 2024 22:33:01 -0800 Subject: [PATCH 44/47] DataSync: fixed useQuery returning [] instead of null when reply is not available yet --- lib/IHP/DataSync/ihp-datasync.js | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/IHP/DataSync/ihp-datasync.js b/lib/IHP/DataSync/ihp-datasync.js index 3c981fa33..345834fb2 100644 --- a/lib/IHP/DataSync/ihp-datasync.js +++ b/lib/IHP/DataSync/ihp-datasync.js @@ -251,7 +251,17 @@ class DataSubscription { this.connectError = null; this.subscriptionId = null; this.subscribers = []; - this.records = cache ? cache.get(JSON.stringify(query)) || [] : []; + + if (cache) { + const cacheResults = cache.get(JSON.stringify(query)); + if (cacheResults !== undefined) { + this.records = cacheResults; + } else { + this.records = null; + } + } else { + this.records = null; + } this.cache = cache; this.getRecords = this.getRecords.bind(this); From c4307ac65a1129efe997908858f266c19fb84aec Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Mon, 18 Nov 2024 17:03:06 -0800 Subject: [PATCH 45/47] removed ihp-datasync/react18 this modules has been replaced with the default ihp-datasync/react. so it's not needed anymore --- .../IHP/DataSync/TypeScript/Compiler.hs | 22 --------- ihp-datasync-typescript/Test/Spec.hs | 22 --------- lib/IHP/DataSync/react18.js | 49 ------------------- 3 files changed, 93 deletions(-) delete mode 100644 lib/IHP/DataSync/react18.js diff --git a/ihp-datasync-typescript/IHP/DataSync/TypeScript/Compiler.hs b/ihp-datasync-typescript/IHP/DataSync/TypeScript/Compiler.hs index 33973f1b0..c59539967 100644 --- a/ihp-datasync-typescript/IHP/DataSync/TypeScript/Compiler.hs +++ b/ihp-datasync-typescript/IHP/DataSync/TypeScript/Compiler.hs @@ -293,28 +293,6 @@ declare module 'ihp-datasync/react' { } function ThinBackend(props: ThinBackendProps): JSX.Element; } - -declare module 'ihp-datasync/react18' { - import { TableName, QueryBuilder } from 'ihp-datasync'; - - /** - * React hook for querying the database and streaming results in real-time. - * - * Suspends while the data is being fetched from the server. - * - * @example - * function TasksList() { - * const tasks = useQuery(query('tasks').orderBy('createdAt')) - * - * return
- * {tasks.map(task =>
{task.title}
)} - *
- * } - * - * @param {QueryBuilder} queryBuilder A database query - */ - function useQuery(queryBuilder: QueryBuilder): Array; -} |] where tableNameTypeDef :: Text diff --git a/ihp-datasync-typescript/Test/Spec.hs b/ihp-datasync-typescript/Test/Spec.hs index d08b06e52..e9061dcb2 100644 --- a/ihp-datasync-typescript/Test/Spec.hs +++ b/ihp-datasync-typescript/Test/Spec.hs @@ -383,28 +383,6 @@ tests = do } function ThinBackend(props: ThinBackendProps): JSX.Element; } - - declare module 'ihp-datasync/react18' { - import { TableName, QueryBuilder } from 'ihp-datasync'; - - /** - * React hook for querying the database and streaming results in real-time. - * - * Suspends while the data is being fetched from the server. - * - * @example - * function TasksList() { - * const tasks = useQuery(query('tasks').orderBy('createdAt')) - * - * return
- * {tasks.map(task =>
{task.title}
)} - *
- * } - * - * @param {QueryBuilder} queryBuilder A database query - */ - function useQuery
(queryBuilder: QueryBuilder): Array; - } |] (generateTypeScriptTypeDefinitions schema) `shouldBe` expected diff --git a/lib/IHP/DataSync/react18.js b/lib/IHP/DataSync/react18.js deleted file mode 100644 index dbb8af1de..000000000 --- a/lib/IHP/DataSync/react18.js +++ /dev/null @@ -1,49 +0,0 @@ -import React, { useState, useEffect, useSyncExternalStore } from 'react'; -import { DataSubscription } from './ihp-datasync.js'; - -/** - * Returns the result of the current query in real-time. Suspends while the data is still being fetched from the server. - * @example - * const messages = useQuery(query('messages').orderBy('createdAt')); - */ -export function useQuery(queryBuilder) { - const dataSubscription = DataSubscriptionStore.get(queryBuilder.query); - - if (dataSubscription.isConnected) { - const records = useSyncExternalStore(dataSubscription.subscribe, dataSubscription.getRecords) - return records; - } else if (dataSubscription.connectError) { - throw dataSubscription.connectError; - } else { - throw dataSubscription.createOnServerPromise; - } -} - -export class DataSubscriptionStore { - static queryMap = new Map(); - static get(query) { - const strinigifiedQuery = JSON.stringify(query); - const existingSubscription = DataSubscriptionStore.queryMap.get(strinigifiedQuery) - - if (existingSubscription) { - return existingSubscription; - } else { - const subscription = new DataSubscription(query); - subscription.createOnServer(); - subscription.onClose = () => { DataSubscriptionStore.queryMap.delete(strinigifiedQuery); }; - - DataSubscriptionStore.queryMap.set(strinigifiedQuery, subscription); - - // If the query changes very rapid in `useQuery` it can happen that the `dataSubscription.subscribe` - // is never called at all. In this case we have a unused DataSubscription laying around. We avoid - // to many open connections laying around by trying to close them a second after opening them. - // A second is enough time for react to call the subscribe function. If it's not called by then, - // we most likely deal with a dead subscription, so we close it. - setTimeout(() => { - subscription.closeIfNotUsed(); - }, 1000); - - return subscription; - } - } -} \ No newline at end of file From 2c23415e2fb002a9b0f7d0320cdc45dc3d6af838 Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Mon, 18 Nov 2024 18:25:18 -0800 Subject: [PATCH 46/47] custom storage upload directory via IHP_STORAGE_DIR env var --- IHP/FileStorage/Config.hs | 4 +++- IHP/FileStorage/ControllerFunctions.hs | 17 ++++++++--------- IHP/FileStorage/Types.hs | 2 +- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/IHP/FileStorage/Config.hs b/IHP/FileStorage/Config.hs index f5a5159fc..83f806577 100644 --- a/IHP/FileStorage/Config.hs +++ b/IHP/FileStorage/Config.hs @@ -94,7 +94,9 @@ initMinioStorage server bucket = do -- > initStaticDirStorage -- initStaticDirStorage :: State.StateT TMap.TMap IO () -initStaticDirStorage = option StaticDirStorage +initStaticDirStorage = do + directory <- EnvVar.envOrDefault "IHP_STORAGE_DIR" "static/" + option StaticDirStorage { directory } -- | The Filebase access key and secret key have to be provided using the @FILEBASE_KEY@ and @FILEBASE_SECRET@ env vars. -- diff --git a/IHP/FileStorage/ControllerFunctions.hs b/IHP/FileStorage/ControllerFunctions.hs index cd9901ea8..6eb4cdd56 100644 --- a/IHP/FileStorage/ControllerFunctions.hs +++ b/IHP/FileStorage/ControllerFunctions.hs @@ -96,16 +96,15 @@ storeFileWithOptions fileInfo options = do let fileName = options.fileName |> fromMaybe objectId - let directory = options.directory - let objectPath = directory <> "/" <> UUID.toText fileName + let objectPath = options.directory <> "/" <> UUID.toText fileName let preprocess = options.preprocess fileInfo <- preprocess fileInfo url <- case storage of - StaticDirStorage -> do - let destPath :: Text = "static/" <> objectPath - Directory.createDirectoryIfMissing True (cs $ "static/" <> directory) + StaticDirStorage { directory } -> do + let destPath :: Text = directory <> objectPath + Directory.createDirectoryIfMissing True (cs $ directory <> options.directory) fileInfo |> (.fileContent) @@ -225,7 +224,7 @@ createTemporaryDownloadUrlFromPathWithExpiredAt :: (?context :: context, ConfigP createTemporaryDownloadUrlFromPathWithExpiredAt validInSeconds objectPath = do publicUrlExpiredAt <- addUTCTime (fromIntegral validInSeconds) <$> getCurrentTime case storage of - StaticDirStorage -> do + StaticDirStorage {} -> do let frameworkConfig = ?context.frameworkConfig let urlSchemes = ["http://", "https://"] @@ -398,8 +397,8 @@ uploadToStorage field record = uploadToStorageWithOptions def field record removeFileFromStorage :: (?context :: context, ConfigProvider context) => StoredFile -> IO (Either MinioErr ()) removeFileFromStorage StoredFile { path, url } = do case storage of - StaticDirStorage -> do - let fullPath :: String = cs $ "static/" <> path + StaticDirStorage { directory } -> do + let fullPath :: String = cs $ directory <> path Directory.removeFile fullPath pure $ Right () S3Storage { connectInfo, bucket} -> do @@ -415,5 +414,5 @@ storage = ?context.frameworkConfig.appConfig -- | Returns the prefix for the storage. This is either @static/@ or an empty string depending on the storage. storagePrefix :: (?context :: ControllerContext) => Text storagePrefix = case storage of - StaticDirStorage -> "static/" + StaticDirStorage { directory } -> directory _ -> "" diff --git a/IHP/FileStorage/Types.hs b/IHP/FileStorage/Types.hs index 61962f5b8..320871c69 100644 --- a/IHP/FileStorage/Types.hs +++ b/IHP/FileStorage/Types.hs @@ -14,7 +14,7 @@ import qualified Network.Minio as Minio import qualified Network.Wai.Parse as Wai data FileStorage - = StaticDirStorage -- ^ Stores files publicly visible inside the project's @static@ directory + = StaticDirStorage { directory :: !Text } -- ^ Stores files publicly visible inside the project's @static@ directory | S3Storage { connectInfo :: Minio.ConnectInfo, bucket :: Text, baseUrl :: Text } -- ^ Stores files inside a S3 compatible cloud storage -- | Result of a 'storeFile' operation From 44d8992b89782cd14177de44492a0c39cec3124e Mon Sep 17 00:00:00 2001 From: Marc Scholten Date: Mon, 18 Nov 2024 18:25:46 -0800 Subject: [PATCH 47/47] enable split-sections for linux builds --- NixSupport/default.nix | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/NixSupport/default.nix b/NixSupport/default.nix index baa5b6120..ac271d43a 100644 --- a/NixSupport/default.nix +++ b/NixSupport/default.nix @@ -25,6 +25,7 @@ let ihpLibWithMakefile = filter { root = ihp; include = ["lib/IHP/Makefile.dist"]; name = "ihpLibWithMakefile"; }; ihpLibWithMakefileAndStatic = filter { root = ihp; include = ["lib/IHP/Makefile.dist" "lib/IHP/static"]; name = "ihpLibWithMakefileAndStatic"; }; + splitSections = if !pkgs.stdenv.hostPlatform.isDarwin then "-split-sections" else ""; schemaObjectFiles = let @@ -37,7 +38,7 @@ let build-generated-code export IHP=${ihpLibWithMakefile}/lib/IHP - ghc -O${if optimized then optimizationLevel else "0"} $(make print-ghc-options) --make build/Generated/Types.hs -odir build/RunProdServer -hidir build/RunProdServer + ghc -O${if optimized then optimizationLevel else "0"} ${splitSections} $(make print-ghc-options) --make build/Generated/Types.hs -odir build/RunProdServer -hidir build/RunProdServer cp -r build $out ''; @@ -68,8 +69,8 @@ let mkdir -p build/bin build/RunUnoptimizedProdServer - echo ghc -O${if optimized then optimizationLevel else "0"} $(make print-ghc-options) ${if optimized then prodGhcOptions else ""} Main.hs -o build/bin/RunProdServer -odir build/RunProdServer -hidir build/RunProdServer - ghc -O${if optimized then optimizationLevel else "0"} $(make print-ghc-options) ${if optimized then prodGhcOptions else ""} Main.hs -o build/bin/RunProdServer -odir build/RunProdServer -hidir build/RunProdServer + echo ghc -O${if optimized then optimizationLevel else "0"} ${splitSections} $(make print-ghc-options) ${if optimized then prodGhcOptions else ""} Main.hs -o build/bin/RunProdServer -odir build/RunProdServer -hidir build/RunProdServer + ghc -O${if optimized then optimizationLevel else "0"} ${splitSections} $(make print-ghc-options) ${if optimized then prodGhcOptions else ""} Main.hs -o build/bin/RunProdServer -odir build/RunProdServer -hidir build/RunProdServer # Build job runner if there are any jobs if find -type d -iwholename \*/Job|grep .; then @@ -81,7 +82,7 @@ let echo "import Main ()" >> build/RunJobs.hs echo "main :: IO ()" >> build/RunJobs.hs echo "main = runScript Config.config (runJobWorkers (workers RootApplication))" >> build/RunJobs.hs - ghc -O${if optimized then optimizationLevel else "0"} -main-is 'RunJobs.main' $(make print-ghc-options) ${if optimized then prodGhcOptions else ""} build/RunJobs.hs -o build/bin/RunJobs -odir build/RunProdServer -hidir build/RunProdServer + ghc -O${if optimized then optimizationLevel else "0"} ${splitSections} -main-is 'RunJobs.main' $(make print-ghc-options) ${if optimized then prodGhcOptions else ""} build/RunJobs.hs -o build/bin/RunJobs -odir build/RunProdServer -hidir build/RunProdServer fi; # Build all scripts if there are any @@ -117,7 +118,6 @@ let mv "build/bin/Script/$script_basename" "$out/bin/$script_basename"; done ''; - dontFixup = true; src = filter { root = pkgs.nix-gitignore.gitignoreSource [] projectPath; include = [filter.isDirectory "Makefile" (filter.matchExt "hs")]; exclude = ["static" "Frontend"]; name = "${appName}-source"; }; buildInputs = [allHaskellPackages]; nativeBuildInputs = [ pkgs.makeWrapper schemaObjectFiles]; @@ -171,7 +171,6 @@ in runHook postInstall ''; - dontFixup = true; src = pkgs.nix-gitignore.gitignoreSource [] projectPath; buildInputs = builtins.concatLists [ allNativePackages ]; nativeBuildInputs = builtins.concatLists [