diff --git a/distribution/lib/Standard/Test/0.0.0-dev/src/Test.enso b/distribution/lib/Standard/Test/0.0.0-dev/src/Test.enso index 9c54a5ad71d4..a40afa939597 100644 --- a/distribution/lib/Standard/Test/0.0.0-dev/src/Test.enso +++ b/distribution/lib/Standard/Test/0.0.0-dev/src/Test.enso @@ -10,6 +10,10 @@ import project.Suite.Suite import project.Suite.Suite_Builder import project.Test_Result.Test_Result + +polyglot java import java.lang.Thread + + ## Contains only static methods type Test ## Construct a Test Suite object @@ -167,3 +171,41 @@ type Test result = behavior State.put Clue prev_clue result + + ## A helper method that retries the action a few times if it panics. + It allows to make flaky tests more robust. + + It waits for a short period of time between retries (in case the failures + are related e.g. to network conditions or propagation delays). + + This function should be placed inside of the `specify` block. It can be + used to retry the whole test, or it can be applied to a specific block + inside of the test to only re-run that specific block (in such case, any + side-effects must be considered carefully). + + The method returns the first successful value returned by the action, + or fails with the last error thrown by the action after exhausting retry + attempts. + with_retries : Any -> Any + with_retries ~action = + loc = Meta.get_source_location 1 + + milliseconds_between_attempts = 100 + ## We give the CI a bit more attempts, as failures are more annoying there. + For local development, a bit less retries is enough - there it is more + likely that a failure is not flaky but it is an actual error so we + don't want to spend too much time retrying when debugging the tests locally. + max_retries = if Environment.get "CI" . is_nothing . not then 100 else 20 + + go i = + Panic.catch Any action caught_panic-> + # If the iterations are exhausted, we rethrow the panic. + if i > max_retries then Panic.throw caught_panic else + if i % 10 == 0 then + IO.println "Still failing after "+i.to_text+" retries. ("+loc.to_display_text+")" + Thread.sleep milliseconds_between_attempts + ## TODO This used to be + @Tail_Call go (i+1) + We should re-add the tail call once https://github.com/enso-org/enso/issues/9251 is fixed. + go (i+1) + go 1 diff --git a/test/AWS_Tests/src/S3_Spec.enso b/test/AWS_Tests/src/S3_Spec.enso index 5119d9ba31b6..7502f15c1a24 100644 --- a/test/AWS_Tests/src/S3_Spec.enso +++ b/test/AWS_Tests/src/S3_Spec.enso @@ -18,7 +18,6 @@ import enso_dev.Table_Tests.Util from Standard.Test import all import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup -from enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries test_credentials -> AWS_Credential | Nothing = @@ -159,7 +158,7 @@ add_specs suite_builder = Panic.with_finalizer secret_key_id.delete <| secret_key_value = Enso_Secret.create "my_test_secret-AWS-secretkey" test_credentials.secret_access_key secret_key_value.should_succeed - Panic.with_finalizer secret_key_value.delete <| with_retries <| + Panic.with_finalizer secret_key_value.delete <| Test.with_retries <| r2 = S3.list_buckets (AWS_Credential.Key secret_key_id secret_key_value) r2.should_succeed r2.should_be_a Vector @@ -328,7 +327,7 @@ add_specs suite_builder = new_file.delete . should_succeed - with_retries <| + Test.with_retries <| new_file.exists . should_be_false my_writable_dir.list . should_not_contain new_file @@ -339,7 +338,7 @@ add_specs suite_builder = new_file.read . should_equal "Hello" "World".write new_file on_existing_file=Existing_File_Behavior.Overwrite . should_succeed - with_retries <| + Test.with_retries <| new_file.read . should_equal "World" group_builder.specify "should not be able to append to a file" <| @@ -529,7 +528,7 @@ add_specs suite_builder = Panic.with_finalizer secret_key_id.delete <| secret_key_value = Enso_Secret.create "datalink-secret-AWS-secretkey" test_credentials.secret_access_key secret_key_value.should_succeed - Panic.with_finalizer secret_key_value.delete <| with_retries <| + Panic.with_finalizer secret_key_value.delete <| Test.with_retries <| transformed_data_link_file.read . should_equal "Hello WORLD!" group_builder.specify "should be able to read a data link with a custom file format set" <| with_default_credentials <| diff --git a/test/Base_Tests/src/Data/XML/XML_Spec.enso b/test/Base_Tests/src/Data/XML/XML_Spec.enso index cf233ecad149..25a9cde16a26 100644 --- a/test/Base_Tests/src/Data/XML/XML_Spec.enso +++ b/test/Base_Tests/src/Data/XML/XML_Spec.enso @@ -254,7 +254,7 @@ add_specs suite_builder = doc = Data.read data.test_file doc.root_element.name . should_equal "class" - group_builder.specify "Can read from an endpoint" <| + group_builder.specify "Can read from an endpoint" <| Test.with_retries <| doc = Data.fetch "https://enso-data-samples.s3.us-west-1.amazonaws.com/sample.xml" doc.root_element.name . should_equal "class" doc.root_element.at 1 . name . should_equal "teacher" @@ -295,4 +295,3 @@ main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder suite.run_with_filter filter - diff --git a/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Data_Link_Spec.enso b/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Data_Link_Spec.enso index 7b5d61e9949d..c1fab3f4e959 100644 --- a/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Data_Link_Spec.enso +++ b/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Data_Link_Spec.enso @@ -10,7 +10,6 @@ from Standard.Test import all import Standard.Test.Test_Environment import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup -from enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environment <| suite_builder.group "DataLinks in Enso Cloud" pending=setup.real_cloud_pending group_builder-> diff --git a/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Tests_Setup.enso b/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Tests_Setup.enso index 3c05a7aa9da1..a371f266720d 100644 --- a/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Tests_Setup.enso +++ b/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Tests_Setup.enso @@ -4,7 +4,6 @@ import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Test.Test_Environment -polyglot java import java.lang.Thread polyglot java import java.security.KeyStore polyglot java import javax.net.ssl.SSLContext polyglot java import javax.net.ssl.TrustManagerFactory @@ -158,28 +157,3 @@ type Mock_Credentials about_to_expire self -> Mock_Credentials = new_expire_at = Date_Time.now + (Duration.new minutes=1) Mock_Credentials.Value self.access_token new_expire_at self.refresh_token self.refresh_url self.client_id - -## PRIVATE - A helper method that retries the action a few times, to allow tests that may fail due to propagation delays to pass. - This is needed, because after creating a secret, there is a slight delay before it shows up within `list`. - To make tests robust, we add this retry logic. -with_retries ~action = - loc = Meta.get_source_location 1 - - # Delays are in seconds - sleep_time = 0.1 - total_sleep_delay = if Environment.get "CI" . is_nothing . not then 10 else 2 - - max_iterations = total_sleep_delay / sleep_time - go i = - Panic.catch Any action caught_panic-> - # If the iterations are exhausted, we rethrow the panic. - if i > max_iterations then Panic.throw caught_panic else - if i % 10 == 0 then - IO.println "Still failing after "+i.to_text+" retries. ("+loc.to_display_text+")" - Thread.sleep (1000*sleep_time . floor) - ## TODO This used to be - @Tail_Call go (i+1) - We should re-add the tail call once https://github.com/enso-org/enso/issues/9251 is fixed. - go (i+1) - go 1 diff --git a/test/Base_Tests/src/Network/Enso_Cloud/Enso_File_Spec.enso b/test/Base_Tests/src/Network/Enso_Cloud/Enso_File_Spec.enso index 9c6093437719..dc785e3d7ca0 100644 --- a/test/Base_Tests/src/Network/Enso_Cloud/Enso_File_Spec.enso +++ b/test/Base_Tests/src/Network/Enso_Cloud/Enso_File_Spec.enso @@ -7,7 +7,6 @@ from Standard.Test import all import Standard.Test.Test_Environment import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup -from enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environment <| suite_builder.group "Enso Cloud Files" pending=setup.real_cloud_pending group_builder-> @@ -30,7 +29,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen delete_on_fail caught_panic = my_dir.delete Panic.throw caught_panic - Panic.catch Any handler=delete_on_fail <| with_retries <| + Panic.catch Any handler=delete_on_fail <| Test.with_retries <| my_dir.is_directory . should_be_true my_dir.exists . should_be_true my_dir.name . should_equal my_name @@ -38,7 +37,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen my_dir.delete . should_succeed - with_retries <| + Test.with_retries <| Enso_File.root.list . should_not_contain my_dir # TODO the dir still shows as 'existing' after deletion, probably because it still is there in the Trash @@ -98,7 +97,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen dir1.delete . should_succeed - with_retries <| + Test.with_retries <| dir1.exists . should_be_false # The inner directory should also have been trashed if its parent is removed dir2.exists . should_be_false diff --git a/test/Base_Tests/src/Network/Enso_Cloud/Secrets_Spec.enso b/test/Base_Tests/src/Network/Enso_Cloud/Secrets_Spec.enso index 323953ea56c2..4cf103ec2f80 100644 --- a/test/Base_Tests/src/Network/Enso_Cloud/Secrets_Spec.enso +++ b/test/Base_Tests/src/Network/Enso_Cloud/Secrets_Spec.enso @@ -16,7 +16,6 @@ from Standard.Test.Execution_Context_Helpers import run_with_and_without_output import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup import project.Network.Enso_Cloud.Cloud_Tests_Setup.Mock_Credentials -from project.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries polyglot java import org.enso.base.enso_cloud.EnsoSecretAccessDenied polyglot java import org.enso.base.enso_cloud.ExternalLibrarySecretHelper @@ -35,19 +34,19 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen my_secret.name . should_equal "my_test_secret" my_secret.id.is_empty . should_be_false - delete_on_fail my_secret <| with_retries <| + delete_on_fail my_secret <| Test.with_retries <| Enso_Secret.list . should_contain my_secret my_secret.delete . should_succeed - with_retries <| + Test.with_retries <| Enso_Secret.list . should_not_contain my_secret group_builder.specify "should allow to get a secret by name or path" <| created_secret = Enso_Secret.create "my_test_secret-2" "my_secret_value" created_secret.should_succeed Panic.with_finalizer created_secret.delete <| - with_retries <| + Test.with_retries <| fetched_secret = Enso_Secret.get "my_test_secret-2" fetched_secret . should_equal created_secret @@ -62,7 +61,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen created_secret.should_succeed wait_until_secret_is_propagated created_secret Panic.with_finalizer created_secret.delete <| - with_retries <| + Test.with_retries <| r1 = Enso_Secret.create "my_test_secret-3" "my_secret_value" ## If the secret was created due to race condition - we clean it up @@ -76,7 +75,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen secret1 = Enso_Secret.create "my_test_secret-6" "Yet another Mystery" secret1.should_succeed - Panic.with_finalizer secret1.delete <| with_retries <| + Panic.with_finalizer secret1.delete <| Test.with_retries <| https = setup.httpbin_secure_client response = https.request (Request.get (setup.httpbin_secure_uri / "get") headers=[Header.new "X-My-Secret" secret1]) response.decode_as_json.at "headers" . at "X-My-Secret" . should_equal "Yet another Mystery" @@ -85,7 +84,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen secret_token = Enso_Secret.create "my_test_secret-7" "MySecretToken" secret_token.should_succeed - Panic.with_finalizer secret_token.delete <| with_retries <| + Panic.with_finalizer secret_token.delete <| Test.with_retries <| https = setup.httpbin_secure_client response = https.request (Request.get (setup.httpbin_secure_uri / "get") headers=[Header.authorization_bearer secret_token]) response_json = response.decode_as_json @@ -97,7 +96,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen Panic.with_finalizer secret_username.delete <| secret_password = Enso_Secret.create "my_test_secret-9" "MyP@ssword" secret_password.should_succeed - Panic.with_finalizer secret_password.delete <| with_retries <| + Panic.with_finalizer secret_password.delete <| Test.with_retries <| https = setup.httpbin_secure_client response = https.request (Request.get (setup.httpbin_secure_uri / "get") headers=[Header.authorization_basic secret_username secret_password]) @@ -108,7 +107,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen group_builder.specify "should allow to derive values from secrets" <| secret1 = Enso_Secret.create "my_test_secret-10" "Something" secret1.should_succeed - Panic.with_finalizer secret1.delete <| with_retries <| + Panic.with_finalizer secret1.delete <| Test.with_retries <| x = Derived_Secret_Value.from "X" y = Derived_Secret_Value.from "Y" v1 = x + y @@ -134,7 +133,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen group_builder.specify "does not allow secrets in HTTP headers" pending=setup.httpbin_pending <| secret1 = Enso_Secret.create "my_test_secret-11" "Something" secret1.should_succeed - Panic.with_finalizer secret1.delete <| with_retries <| + Panic.with_finalizer secret1.delete <| Test.with_retries <| uri = setup.httpbin_uri / "get" r1 = uri.fetch headers=[Header.new "X-My-Secret" secret1] r1.should_fail_with Illegal_Argument @@ -143,7 +142,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen group_builder.specify "API exposing secrets to external libraries should not be accessible from unauthorized code" <| secret1 = Enso_Secret.create "my_test_secret-12" "Something" secret1.should_succeed - Panic.with_finalizer secret1.delete <| with_retries <| + Panic.with_finalizer secret1.delete <| Test.with_retries <| java_repr = as_hideable_value secret1 Test.expect_panic EnsoSecretAccessDenied <| ExternalLibrarySecretHelper.resolveValue java_repr @@ -156,7 +155,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen nested_secret.should_succeed delete_on_fail nested_secret <| - with_retries <| + Test.with_retries <| Enso_Secret.list parent=subdirectory . should_contain nested_secret Enso_Secret.exists "my-nested-secret-1" parent=subdirectory . should_be_true Enso_Secret.get "my-nested-secret-1" parent=subdirectory . should_equal nested_secret @@ -169,7 +168,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen nested_secret.delete . should_succeed # Secret should disappear from the list after deletion: - with_retries <| + Test.with_retries <| Enso_Secret.list parent=subdirectory . should_not_contain nested_secret Enso_Secret.exists "my-nested-secret-1" parent=subdirectory . should_be_false Enso_Secret.get "my-nested-secret-1" parent=subdirectory . should_fail_with Not_Found @@ -181,7 +180,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen nested_secret = Enso_Secret.create "my-nested-secret-2" "NESTED_secret_value" parent=subdirectory nested_secret.should_succeed Panic.with_finalizer nested_secret.delete <| - with_retries <| + Test.with_retries <| https = setup.httpbin_secure_client response = https.request (Request.get (setup.httpbin_secure_uri / "get") headers=[Header.new "X-My-Nested-Secret" nested_secret]) response.decode_as_json.at "headers" . at "X-My-Nested-Secret" . should_equal "NESTED_secret_value" @@ -193,7 +192,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen nested_secret = Enso_Secret.create "my-nested-secret-3" "Value-A" parent=subdirectory nested_secret.should_succeed Panic.with_finalizer nested_secret.delete <| - with_retries <| + Test.with_retries <| https = setup.httpbin_secure_client response = https.request (Request.get (setup.httpbin_secure_uri / "get") headers=[Header.new "X-My-Nested-Secret" nested_secret]) response.decode_as_json.at "headers" . at "X-My-Nested-Secret" . should_equal "Value-A" @@ -201,7 +200,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen nested_secret.update_value "Value-New-B" . should_succeed # Not exactly sure if retries are needed here, but for test stability preferred to keep them. - with_retries <| + Test.with_retries <| # Flushing caches to avoid the old value getting stuck after the first retry fails due to lack of propagation yet. Enso_User.flush_caches https = setup.httpbin_secure_client @@ -221,7 +220,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen Enso_Secret.create ("foo"+Random.uuid) "baz" . should_fail_with Forbidden_Operation # Get should still work - with_retries <| Enso_Secret.get "my_test_secret-13" . should_equal secret1 + Test.with_retries <| Enso_Secret.get "my_test_secret-13" . should_equal secret1 group_builder.specify "should be able to retry fetching a secret if the token is expired" pending=setup.httpbin_pending <| mock_setup = Cloud_Tests_Setup.prepare_mock_setup @@ -250,7 +249,7 @@ main filter=Nothing = wait_until_secret_is_propagated secret = - with_retries <| Enso_Secret.list . should_contain secret + Test.with_retries <| Enso_Secret.list . should_contain secret delete_on_fail resource ~action = on_failure caught_panic = diff --git a/test/Base_Tests/src/Network/Http/Http_Auto_Parse_Spec.enso b/test/Base_Tests/src/Network/Http/Http_Auto_Parse_Spec.enso index 9508bc5d3c4f..5de8f6338023 100644 --- a/test/Base_Tests/src/Network/Http/Http_Auto_Parse_Spec.enso +++ b/test/Base_Tests/src/Network/Http/Http_Auto_Parse_Spec.enso @@ -27,11 +27,11 @@ add_specs suite_builder = . add_query_argument "Content-Type" "text/plain; charset=windows-1250" . add_query_argument "base64_response_data" (Base_64.encode_text content_windows_1250 Encoding.windows_1250) - group_builder.specify "should detect the encoding from Content-Type in fetch" <| + group_builder.specify "should detect the encoding from Content-Type in fetch" <| Test.with_retries <| url_utf8.fetch . should_equal content_utf url_windows_1250.fetch . should_equal content_windows_1250 - group_builder.specify "should detect the encoding from Content-Type in decode_as_text" <| + group_builder.specify "should detect the encoding from Content-Type in decode_as_text" <| Test.with_retries <| r1 = url_utf8.fetch format=Raw_Response r1.decode_as_text . should_equal content_utf @@ -42,7 +42,7 @@ add_specs suite_builder = # We may override the encoding detected from Content-Type: r3.decode_as_text Encoding.ascii . should_fail_with Encoding_Error - group_builder.specify "should detect the encoding from Content-Type in decode_as_json" <| + group_builder.specify "should detect the encoding from Content-Type in decode_as_json" <| Test.with_retries <| r1 = url_utf8.fetch format=Raw_Response r1.decode_as_json . should_equal ["x", "Hello! 😊👍 ąę"] diff --git a/test/Base_Tests/src/Network/Http/Http_Data_Link_Spec.enso b/test/Base_Tests/src/Network/Http/Http_Data_Link_Spec.enso index 20890565530b..32ee972197ad 100644 --- a/test/Base_Tests/src/Network/Http/Http_Data_Link_Spec.enso +++ b/test/Base_Tests/src/Network/Http/Http_Data_Link_Spec.enso @@ -17,53 +17,53 @@ main filter=Nothing = add_specs suite_builder = data_link_root = enso_project.data / "datalinks" suite_builder.group "HTTP DataLink" pending=pending_has_url group_builder-> - group_builder.specify "should allow to read a web resource" <| + group_builder.specify "should allow to read a web resource" <| Test.with_retries <| f = replace_url_in_data_link (data_link_root / "example-http.datalink") r = f.read # Defaults to reading as text, because the resource read is called `js.txt`, implying Plain_Text format r.should_be_a Text r.trim.should_equal '{"hello": "world"}' - group_builder.specify "should allow to read a web resource, with explicitly using default format" <| + group_builder.specify "should allow to read a web resource, with explicitly using default format" <| Test.with_retries <| f = replace_url_in_data_link (data_link_root / "example-http-format-explicit-default.datalink") r = f.read r.should_be_a Text r.trim.should_equal '{"hello": "world"}' - group_builder.specify "should allow to read a web resource, setting format to JSON" <| + group_builder.specify "should allow to read a web resource, setting format to JSON" <| Test.with_retries <| f = replace_url_in_data_link (data_link_root / "example-http-format-json.datalink") r = f.read js = '{"hello": "world"}'.parse_json r.should_equal js r.get "hello" . should_equal "world" - group_builder.specify "will fail if invalid format is used" <| + group_builder.specify "will fail if invalid format is used" <| Test.with_retries <| f = replace_url_in_data_link (data_link_root / "example-http-format-invalid.datalink") r = f.read r.should_fail_with Illegal_State r.catch.to_display_text.should_contain "Unknown format" - group_builder.specify "will fail if an unloaded format is used" <| + group_builder.specify "will fail if an unloaded format is used" <| Test.with_retries <| # We assume that Base_Tests _do not_ import Standard.Table f = replace_url_in_data_link (data_link_root / "example-http-format-delimited.datalink") r = f.read r.should_fail_with Illegal_State r.catch.to_display_text.should_contain "Unknown format" - group_builder.specify "but will succeed if an unknown format is not used because it was overridden" <| + group_builder.specify "but will succeed if an unknown format is not used because it was overridden" <| Test.with_retries <| f = replace_url_in_data_link (data_link_root / "example-http-format-delimited.datalink") r = f.read Plain_Text r.should_be_a Text r.trim.should_equal '{"hello": "world"}' - group_builder.specify "should be able to follow a datalink from HTTP in Data.read" <| + group_builder.specify "should be able to follow a datalink from HTTP in Data.read" <| Test.with_retries <| r1 = Data.read base_url_with_slash+"dynamic.datalink" JSON_Format r1.should_equal ('{"hello": "world"}'.parse_json) r2 = Data.read base_url_with_slash+"dynamic-datalink" Plain_Text r2.trim.should_equal '{"hello": "world"}' - group_builder.specify "should be able to follow a datalink from HTTP in Data.fetch/post, if auto parse is on" <| + group_builder.specify "should be able to follow a datalink from HTTP in Data.fetch/post, if auto parse is on" <| Test.with_retries <| r1 = Data.fetch base_url_with_slash+"dynamic.datalink" r1.trim.should_equal '{"hello": "world"}' @@ -73,7 +73,7 @@ add_specs suite_builder = r3 = Data.post base_url_with_slash+"dynamic.datalink" r3.trim.should_equal '{"hello": "world"}' - group_builder.specify "will return raw datalink config data in Data.fetch/post if auto parse is off" <| + group_builder.specify "will return raw datalink config data in Data.fetch/post if auto parse is off" <| Test.with_retries <| r1 = Data.fetch base_url_with_slash+"dynamic.datalink" format=Raw_Response r1.should_be_a Response @@ -97,7 +97,7 @@ add_specs suite_builder = r2_plain = r2.decode Plain_Text r2_plain.should_contain '"libraryName": "Standard.Base"' - group_builder.specify "should follow a datalink encountered in Data.download" <| + group_builder.specify "should follow a datalink encountered in Data.download" <| Test.with_retries <| target_file = enso_project.data / "transient" / "my_download.txt" target_file.delete_if_exists Data.download base_url_with_slash+"dynamic.datalink" target_file . should_equal target_file diff --git a/test/Base_Tests/src/Network/Http_Spec.enso b/test/Base_Tests/src/Network/Http_Spec.enso index 536135c9a819..03741bd2b9d7 100644 --- a/test/Base_Tests/src/Network/Http_Spec.enso +++ b/test/Base_Tests/src/Network/Http_Spec.enso @@ -71,10 +71,11 @@ add_specs suite_builder = http = HTTP.new (follow_redirects = False) http.follow_redirects.should_equal False - r = http.request (Request.new HTTP_Method.Get base_url_with_slash+"test_redirect") - r.should_fail_with HTTP_Error - r.catch.should_be_a HTTP_Error.Status_Error - r.catch.status_code.code . should_equal 302 + Test.with_retries <| + r = http.request (Request.new HTTP_Method.Get base_url_with_slash+"test_redirect") + r.should_fail_with HTTP_Error + r.catch.should_be_a HTTP_Error.Status_Error + r.catch.status_code.code . should_equal 302 group_builder.specify "should create HTTP client with proxy setting" <| proxy_setting = Proxy.Address "example.com" 80 @@ -91,7 +92,7 @@ add_specs suite_builder = url_head = base_url_with_slash + "head" url_options = base_url_with_slash + "options" - group_builder.specify "Can perform a GET with a JSON response" <| + group_builder.specify "Can perform a GET with a JSON response" <| Test.with_retries <| response = Data.fetch url_get expected_response = Json.parse <| ''' { @@ -112,7 +113,7 @@ add_specs suite_builder = uri_response = url_get.to URI . fetch compare_responses uri_response expected_response - group_builder.specify "Can perform a HEAD" <| + group_builder.specify "Can perform a HEAD" <| Test.with_retries <| response = Data.fetch url_head method=HTTP_Method.Head response.code.code . should_equal 200 response.decode_as_text . should_equal '' @@ -121,7 +122,7 @@ add_specs suite_builder = uri_response.code.code . should_equal 200 uri_response.decode_as_text . should_equal '' - group_builder.specify "Can perform an OPTIONS" <| + group_builder.specify "Can perform an OPTIONS" <| Test.with_retries <| response = Data.fetch url_options method=HTTP_Method.Options response.code.code . should_equal 200 response.decode_as_text . should_equal '' @@ -130,14 +131,14 @@ add_specs suite_builder = uri_response.code.code . should_equal 200 uri_response.decode_as_text . should_equal '' - group_builder.specify "Can perform auto-parse" <| + group_builder.specify "Can perform auto-parse" <| Test.with_retries <| response = Data.fetch url_get response.at "headers" . at "Content-Length" . should_equal "0" uri_response = url_get.to_uri.fetch uri_response.at "headers" . at "Content-Length" . should_equal "0" - group_builder.specify "Can skip auto-parse" <| + group_builder.specify "Can skip auto-parse" <| Test.with_retries <| response = Data.fetch url_get format=Raw_Response response.code.code . should_equal 200 expected_response = Json.parse <| ''' @@ -161,7 +162,7 @@ add_specs suite_builder = compare_responses uri_response.decode_as_json expected_response group_builder.specify "Can still perform request when output context is disabled" <| - run_with_and_without_output <| + run_with_and_without_output <| Test.with_retries <| Data.fetch url_get format=Raw_Response . code . code . should_equal 200 Data.fetch url_get method=HTTP_Method.Head format=Raw_Response . code . code . should_equal 200 Data.fetch url_get method=HTTP_Method.Options format=Raw_Response . code . code . should_equal 200 @@ -181,7 +182,7 @@ add_specs suite_builder = Data.fetch "zxcv://bad.scheme" . should_fail_with Illegal_Argument Data.fetch "" . should_fail_with Illegal_Argument - group_builder.specify "can select the version" <| + group_builder.specify "can select the version" <| Test.with_retries <| req = Request.get url_get r2 = HTTP.new version=HTTP_Version.HTTP_2 . request req . decode_as_json r2.at "headers" . at "Connection" . should_equal "Upgrade, HTTP2-Settings" @@ -194,20 +195,20 @@ add_specs suite_builder = header_names.should_not_contain "upgrade" suite_builder.group "HTTP in Data.read" pending=pending_has_url group_builder-> - group_builder.specify "can use URI in Data.read" <| + group_builder.specify "can use URI in Data.read" <| Test.with_retries <| r = Data.read (URI.from url_get) r.should_be_a JS_Object - group_builder.specify "works if HTTP is uppercase" <| + group_builder.specify "works if HTTP is uppercase" <| Test.with_retries <| r = Data.fetch (url_get.replace "http" "HTTP") r.should_be_a JS_Object - group_builder.specify "should follow redirects" <| + group_builder.specify "should follow redirects" <| Test.with_retries <| r = Data.read base_url_with_slash+"test_redirect" r.should_be_a Text r.trim . should_equal '{"hello": "world"}' - group_builder.specify "can override the format" <| + group_builder.specify "can override the format" <| Test.with_retries <| auto_response = Data.read url_get auto_response.should_be_a JS_Object @@ -220,7 +221,7 @@ add_specs suite_builder = url_patch = base_url_with_slash + "patch" url_delete = base_url_with_slash + "delete" - group_builder.specify "Can perform a Request_Body.Text POST" <| + group_builder.specify "Can perform a Request_Body.Text POST" <| Test.with_retries <| response = Data.post url_post (Request_Body.Text "hello world") expected_response = echo_response_template "POST" "/post" "hello world" content_type="text/plain; charset=UTF-8" compare_responses response expected_response @@ -228,24 +229,24 @@ add_specs suite_builder = url_response = url_post.to_uri.post (Request_Body.Text "hello world") compare_responses url_response expected_response - group_builder.specify "Can perform a Request_Body.Json JSON POST" <| + group_builder.specify "Can perform a Request_Body.Json JSON POST" <| Test.with_retries <| json = Json.parse '{"a": "asdf", "b": 123}' response = Data.post url_post (Request_Body.Json json) expected_response = echo_response_template "POST" "/post" '{"a":"asdf","b":123}' content_type="application/json" compare_responses response expected_response - group_builder.specify "Can perform a JSON POST" <| + group_builder.specify "Can perform a JSON POST" <| Test.with_retries <| json = Json.parse '{"a": "asdf", "b": 123}' response = Data.post url_post json expected_response = echo_response_template "POST" "/post" '{"a":"asdf","b":123}' content_type="application/json" compare_responses response expected_response - group_builder.specify "Can perform an object Request_Body.Json POST" <| + group_builder.specify "Can perform an object Request_Body.Json POST" <| Test.with_retries <| response = Data.post url_post (Request_Body.Json (Test_Type.Aaa "abc")) expected_response = echo_response_template "POST" "/post" '{"type":"Test_Type","constructor":"Aaa","s":"abc"}' content_type="application/json" compare_responses response expected_response - group_builder.specify "Can perform an object JSON POST" <| + group_builder.specify "Can perform an object JSON POST" <| Test.with_retries <| response = Data.post url_post (Test_Type.Bbb 12) expected_response = echo_response_template "POST" "/post" '{"type":"Test_Type","constructor":"Bbb","i":12}' content_type="application/json" compare_responses response expected_response @@ -256,7 +257,7 @@ add_specs suite_builder = group_builder.specify "can handle a bad .to_json" <| Data.post url_post (Bad_To_Json.Aaa "abcd") . should_fail_with Illegal_Argument - group_builder.specify "Can perform a Text POST with explicit encoding" <| + group_builder.specify "Can perform a Text POST with explicit encoding" <| Test.with_retries <| body = Request_Body.Text 'Hello World!' encoding=Encoding.utf_16_le response = Data.post url_post body expected_response = echo_response_template "POST" "/post" "Hello World!" content_type="text/plain; charset=UTF-16LE" content_length=24 @@ -265,34 +266,34 @@ add_specs suite_builder = uri_response = url_post.to_uri.post body compare_responses uri_response expected_response - group_builder.specify "Can perform a Text POST with explicit content type" <| + group_builder.specify "Can perform a Text POST with explicit content type" <| Test.with_retries <| response = Data.post url_post (Request_Body.Text 'a,b,c\n' content_type="text/csv") expected_response = echo_response_template "POST" "/post" 'a,b,c\n' content_type="text/csv; charset=UTF-8" compare_responses response expected_response - group_builder.specify "Can perform a File POST" <| + group_builder.specify "Can perform a File POST" <| Test.with_retries <| test_file = enso_project.data / "sample.txt" response = Data.post url_post (Request_Body.Binary test_file) response.at "headers" . at "Content-Type" . should_equal "application/octet-stream" expected_text = test_file.read_text response . at "data" . should_equal expected_text - group_builder.specify "Can perform a binary File POST" <| + group_builder.specify "Can perform a binary File POST" <| Test.with_retries <| test_file = enso_project.data / "sample.png" response = Data.post url_post (Request_Body.Binary test_file) response.at "headers" . at "Content-Type" . should_equal "application/octet-stream" response.at "headers" . at "Content-Length" . should_equal test_file.size.to_text response.at "data" . should_start_with '\uFFFDPNG' - group_builder.specify "Can perform a url-encoded form POST" <| + group_builder.specify "Can perform a url-encoded form POST" <| Test.with_retries <| test_file = enso_project.data / "sample.txt" form_data = Map.from_vector [["key", "val"], ["a_file", test_file]] response = Data.post url_post (Request_Body.Form_Data form_data url_encoded=True) response.at "headers" . at "Content-Type" . should_equal "application/x-www-form-urlencoded" response.at "data" . replace "%0D%" "%" . should_equal 'key=val&a_file=Cupcake+ipsum+dolor+sit+amet.+Caramels+tootsie+roll+cake+ice+cream.+Carrot+cake+apple+pie+gingerbread+chocolate+cake+pudding+tart+souffl%C3%A9+jelly+beans+gummies.%0A%0ATootsie+roll+chupa+chups+muffin+croissant+fruitcake+jujubes+danish+cotton+candy+danish.+Oat+cake+chocolate+fruitcake+halvah+icing+oat+cake+toffee+powder.+Pastry+drag%C3%A9e+croissant.+Ice+cream+candy+canes+dessert+muffin+sugar+plum+tart+jujubes.%0A' - group_builder.specify "Can perform a multipart form POST" <| + group_builder.specify "Can perform a multipart form POST" <| Test.with_retries <| test_file = enso_project.data / "sample.png" form_data = Map.from_vector [["key", "val"], ["a_file", test_file]] response = Data.post url_post (Request_Body.Form_Data form_data) @@ -300,29 +301,29 @@ add_specs suite_builder = response_json.at "headers" . at "Content-Type" . should_start_with "multipart/form-data; boundary=" response_json.at "data" . is_empty . should_be_false - group_builder.specify "Can perform a File POST with auto-conversion" <| + group_builder.specify "Can perform a File POST with auto-conversion" <| Test.with_retries <| test_file = enso_project.data / "sample.txt" response = Data.post url_post test_file response.at "headers" . at "Content-Type" . should_equal "application/octet-stream" expected_text = test_file.read_text response . at "data" . should_equal expected_text - group_builder.specify "Can perform a Text POST with auto-conversion" <| + group_builder.specify "Can perform a Text POST with auto-conversion" <| Test.with_retries <| response = Data.post url_post "hello world" expected_response = echo_response_template "POST" "/post" "hello world" content_type="text/plain; charset=UTF-8" compare_responses response expected_response - group_builder.specify "Can perform a Request_Body.Text PUT" <| + group_builder.specify "Can perform a Request_Body.Text PUT" <| Test.with_retries <| response = Data.post url_put (Request_Body.Text "hello world") method=HTTP_Method.Put expected_response = echo_response_template "PUT" "/put" "hello world" content_type="text/plain; charset=UTF-8" compare_responses response expected_response - group_builder.specify "Can perform a Request_Body.Text PATCH" <| + group_builder.specify "Can perform a Request_Body.Text PATCH" <| Test.with_retries <| response = Data.post url_patch (Request_Body.Text "hello world" content_type="application/diff") method=HTTP_Method.Patch expected_response = echo_response_template "PATCH" "/patch" "hello world" content_type="application/diff; charset=UTF-8" compare_responses response expected_response - group_builder.specify "Can perform a DELETE" <| + group_builder.specify "Can perform a DELETE" <| Test.with_retries <| response = Data.post url_delete method=HTTP_Method.Delete expected_response = Json.parse <| ''' { @@ -343,12 +344,12 @@ add_specs suite_builder = } compare_responses response expected_response - group_builder.specify "Can skip auto-parse" <| + group_builder.specify "Can skip auto-parse" <| Test.with_retries <| response = Data.post url_post (Request_Body.Text "hello world") response_format=Raw_Response expected_response = echo_response_template "POST" "/post" "hello world" content_type="text/plain; charset=UTF-8" compare_responses response.decode_as_json expected_response - group_builder.specify "Can send a custom header" <| + group_builder.specify "Can send a custom header" <| Test.with_retries <| response = Data.post url_post (Request_Body.Text "hello world") headers=[Header.new "Custom" "asdf", Header.new "Another" 'a:b: c - "ddd"'] expected_response = Json.parse <| ''' { @@ -372,7 +373,7 @@ add_specs suite_builder = } compare_responses response expected_response - group_builder.specify "can handle HTTP errors" <| + group_builder.specify "can handle HTTP errors" <| Test.with_retries <| # This should give us 405 method not allowed r1 = Data.post url_delete r1.should_fail_with HTTP_Error @@ -430,7 +431,7 @@ add_specs suite_builder = suite_builder.group "Headers" pending=pending_has_url group_builder-> url_post = base_url_with_slash + "post" - group_builder.specify "Content-type in the body is respected" <| + group_builder.specify "Content-type in the body is respected" <| Test.with_retries <| response = Data.post url_post (Request_Body.Text '{"a": "asdf", "b": 123}' content_type="application/json") expected_response = Json.parse <| ''' { @@ -452,7 +453,7 @@ add_specs suite_builder = } compare_responses response expected_response - group_builder.specify "Content type in the header list is respected" <| + group_builder.specify "Content type in the header list is respected" <| Test.with_retries <| response = Data.post url_post (Request_Body.Text '{"a": "asdf", "b": 123}') headers=[Header.content_type "application/json"] expected_response = Json.parse <| ''' { @@ -474,7 +475,7 @@ add_specs suite_builder = } compare_responses response expected_response - group_builder.specify "Multiple content types in the header list are respected" <| + group_builder.specify "Multiple content types in the header list are respected" <| Test.with_retries <| response = Data.post url_post (Request_Body.Text '{"a": "asdf", "b": 123}') headers=[Header.content_type "application/json", Header.content_type "text/plain"] ## Our http-test-helper gets 2 Content-Type headers and merges them in the response. How this is interpreted in practice depends on the server. @@ -498,7 +499,7 @@ add_specs suite_builder = } compare_responses response expected_response - group_builder.specify "Unspecified content type defaults to text/plain" <| + group_builder.specify "Unspecified content type defaults to text/plain" <| Test.with_retries <| response = Data.post url_post (Request_Body.Text '{"a": "asdf", "b": 123}') expected_response = Json.parse <| ''' { @@ -526,7 +527,7 @@ add_specs suite_builder = group_builder.specify "Cannot specify content type (implicitly via explicit text encoding) in both body and headers" <| Data.post url_post (Request_Body.Text "hello world" encoding=Encoding.utf_8) headers=[Header.content_type "application/json"] . should_fail_with Illegal_Argument - group_builder.specify "can also read headers from a response, when returning a raw response" <| + group_builder.specify "can also read headers from a response, when returning a raw response" <| Test.with_retries <| r1 = Data.post url_post (Request_Body.Text "hello world") response_format=Raw_Response r1.should_be_a Response # The result is JSON data: @@ -545,7 +546,7 @@ add_specs suite_builder = r2.headers.find (p-> p.name.equals_ignore_case "Test-Header") . value . should_equal "test-value" r2.headers.find (p-> p.name.equals_ignore_case "Other-Header") . value . should_equal "some other value" - group_builder.specify "is capable of handling aliasing headers" <| + group_builder.specify "is capable of handling aliasing headers" <| Test.with_retries <| uri = URI.from (base_url_with_slash + "test_headers") . add_query_argument "my-header" "value-1" . add_query_argument "my-header" "value-2" @@ -590,7 +591,7 @@ add_specs suite_builder = ## Checking this error partially as a warning - I spent a lot of time debugging why I'm getting such an error. Apparently it happens when the httpbin server was crashing without sending any response. - group_builder.specify "should be able to handle server crash resulting in no response" pending=pending_has_url <| + group_builder.specify "should be able to handle server crash resulting in no response" pending=pending_has_url <| Test.with_retries <| err = Data.fetch (base_url_with_slash+"crash") err.should_fail_with Request_Error err.catch.error_type . should_equal "java.io.IOException" @@ -605,7 +606,7 @@ add_specs suite_builder = err.should_fail_with HTTP_Error suite_builder.group "Http Auth" group_builder-> - group_builder.specify "should support Basic user+password authentication" pending=pending_has_url <| + group_builder.specify "should support Basic user+password authentication" pending=pending_has_url <| Test.with_retries <| url = base_url_with_slash + "test_basic_auth" # Correct user and password @@ -629,7 +630,7 @@ add_specs suite_builder = r4.should_fail_with HTTP_Error r4.catch.status_code.code . should_equal 403 - group_builder.specify "should support Bearer token authentication" pending=pending_has_url <| + group_builder.specify "should support Bearer token authentication" pending=pending_has_url <| Test.with_retries <| url = base_url_with_slash + "test_token_auth" # Correct token diff --git a/test/Base_Tests/src/Network/URI_Spec.enso b/test/Base_Tests/src/Network/URI_Spec.enso index 23f18d92df0f..31f5d944cf94 100644 --- a/test/Base_Tests/src/Network/URI_Spec.enso +++ b/test/Base_Tests/src/Network/URI_Spec.enso @@ -7,7 +7,6 @@ import Standard.Base.Network.HTTP.Request.Request from Standard.Test import all import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup -from project.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries add_specs suite_builder = ## To run this test locally: @@ -312,7 +311,7 @@ add_specs suite_builder = uri.to_text . should_equal (cloud_setup.httpbin_secure_uri.to_text + "get?arg1=__SECRET__&arg2=plain+value&arg3=__SECRET__") uri.raw_query . should_fail_with Enso_Secret_Error - with_retries <| + Test.with_retries <| response = cloud_setup.httpbin_secure_client.request (Request.get uri) decode_query_params response.decode_as_json . should_equal [["arg1", "My Very Secret Value"], ["arg2", "plain value"], ["arg3", s2]] diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso index 99bbd1dc1a13..23f1201570f5 100644 --- a/test/Table_Tests/src/Database/Postgres_Spec.enso +++ b/test/Table_Tests/src/Database/Postgres_Spec.enso @@ -31,7 +31,6 @@ from project.Database.Types.Postgres_Type_Mapping_Spec import default_text import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup import enso_dev.Base_Tests.Network.Http.Http_Test_Setup -from enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries type Basic_Test_Data @@ -704,7 +703,7 @@ add_table_specs suite_builder = cloud_setup.with_prepared_environment <| with_secret "my_postgres_username" db_user username_secret-> with_secret "my_postgres_password" db_password password_secret-> my_secret_name = "Enso Test: My Secret App NAME " + (Random.uuid.take 5) - with_secret "my_postgres_app_name" my_secret_name app_name_secret-> with_retries <| + with_secret "my_postgres_app_name" my_secret_name app_name_secret-> Test.with_retries <| details = Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password username_secret password_secret) # We set the ApplicationName option, so that we can see that secrets can be used in custom properties. options = Connection_Options.Value [["ApplicationName", app_name_secret]] diff --git a/test/Table_Tests/src/Database/Snowflake_Spec.enso b/test/Table_Tests/src/Database/Snowflake_Spec.enso index fba73905d526..11764b44de63 100644 --- a/test/Table_Tests/src/Database/Snowflake_Spec.enso +++ b/test/Table_Tests/src/Database/Snowflake_Spec.enso @@ -26,7 +26,6 @@ from project.Database.Types.Postgres_Type_Mapping_Spec import default_text from project.Database.Postgres_Spec import Basic_Test_Data, Postgres_Tables_Data import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup -from enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries ## Still a WIP cannot be completed until the dialect is implemented properly for Snowflake. @@ -620,7 +619,7 @@ add_table_specs suite_builder = cloud_setup.with_prepared_environment <| with_secret "my_postgres_username" db_user username_secret-> with_secret "my_postgres_password" db_password password_secret-> my_secret_name = "Enso Test: My Secret App NAME " + (Random.uuid.take 5) - with_secret "my_postgres_app_name" my_secret_name app_name_secret-> with_retries <| + with_secret "my_postgres_app_name" my_secret_name app_name_secret-> Test.with_retries <| details = Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password username_secret password_secret) # We set the ApplicationName option, so that we can see that secrets can be used in custom properties. options = Connection_Options.Value [["ApplicationName", app_name_secret]] diff --git a/test/Table_Tests/src/IO/Fetch_Spec.enso b/test/Table_Tests/src/IO/Fetch_Spec.enso index fa2af75f3ba1..556881d48128 100644 --- a/test/Table_Tests/src/IO/Fetch_Spec.enso +++ b/test/Table_Tests/src/IO/Fetch_Spec.enso @@ -21,12 +21,12 @@ main filter=Nothing = add_specs suite_builder = suite_builder.group "fetching files using HTTP" pending=pending_has_url group_builder-> - group_builder.specify "fetching json" <| + group_builder.specify "fetching json" <| Test.with_retries <| r = Data.fetch base_url_with_slash+"testfiles/table.json" expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]] r.to Table . should_equal expected_table - group_builder.specify "fetching csv" <| + group_builder.specify "fetching csv" <| Test.with_retries <| url = base_url_with_slash+"testfiles/table.csv" r = Data.fetch url expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]] @@ -38,7 +38,7 @@ add_specs suite_builder = r2.should_be_a Table r2.should_equal expected_table - group_builder.specify "fetching xls" <| + group_builder.specify "fetching xls" <| Test.with_retries <| url = base_url_with_slash+"testfiles/table.xls" r = Data.fetch url expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]] @@ -51,7 +51,7 @@ add_specs suite_builder = r2.should_be_a Table r2.should_equal expected_table - group_builder.specify "fetching xlsx" <| + group_builder.specify "fetching xlsx" <| Test.with_retries <| url = base_url_with_slash+"testfiles/table.xlsx" r = Data.fetch url expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]] @@ -68,7 +68,7 @@ add_specs suite_builder = r3.should_be_a Table r3.should_equal expected_table - group_builder.specify "format detection based on Content-Type and Content-Disposition" <| + group_builder.specify "format detection based on Content-Type and Content-Disposition" <| Test.with_retries <| content = 'A,B\n1,x\n3,y' uri = URI.from (base_url_with_slash+"test_headers") . add_query_argument "base64_response_data" (Base_64.encode_text content)