From d53c51f8c923714b98afd45b7f173d796affb8b1 Mon Sep 17 00:00:00 2001 From: Gregory Michael Travis Date: Tue, 17 Dec 2024 15:42:59 -0500 Subject: [PATCH] Consume fetch stream output in tests (#11885) --- test/Table_Tests/src/IO/Fetch_Spec.enso | 40 ++++++++++++------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/test/Table_Tests/src/IO/Fetch_Spec.enso b/test/Table_Tests/src/IO/Fetch_Spec.enso index b1c3344506..0e27747890 100644 --- a/test/Table_Tests/src/IO/Fetch_Spec.enso +++ b/test/Table_Tests/src/IO/Fetch_Spec.enso @@ -228,16 +228,16 @@ add_specs suite_builder = group_builder.specify "Cache policy should work for HTTP.fetch" pending=pending_has_url <| Test.with_retries <| with_default_cache <| expect_counts [0, 0] <| - HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache - HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache + HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text + HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text expect_counts [0, 2] <| - HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache - HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache + HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text + HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text with_default_cache <| expect_counts [0, 2] <| - HTTP.fetch url0 - HTTP.fetch url1 + HTTP.fetch url0 . decode_as_text + HTTP.fetch url1 . decode_as_text group_builder.specify "Cache policy should work for Data.fetch" pending=pending_has_url <| Test.with_retries <| with_default_cache <| @@ -412,11 +412,11 @@ add_specs suite_builder = group_builder.specify "Should not cache if the request fails" pending=pending_has_url <| Test.with_retries <| with_default_cache <| - HTTP.fetch url0 + HTTP.fetch url0 . decode_as_text get_num_response_cache_entries . should_equal 1 - HTTP.fetch base_url_with_slash+'crash' + HTTP.fetch base_url_with_slash+'crash' . decode_as_text get_num_response_cache_entries . should_equal 1 - HTTP.fetch base_url_with_slash+'nonexistent_endpoint' + HTTP.fetch base_url_with_slash+'nonexistent_endpoint' . decode_as_text get_num_response_cache_entries . should_equal 1 cloud_setup = Cloud_Tests_Setup.prepare @@ -437,9 +437,9 @@ add_specs suite_builder = . add_query_argument "arg1" secret2 . add_query_argument "arg2" "plain value" - HTTP.fetch url1 + HTTP.fetch url1 . decode_as_text get_num_response_cache_entries . should_equal 1 - HTTP.fetch uri2 + HTTP.fetch uri2 . decode_as_text get_num_response_cache_entries . should_equal 2 group_builder.specify "Should work with secrets in the headers" pending=pending_has_url <| Test.with_retries <| @@ -455,9 +455,9 @@ add_specs suite_builder = headers1 = [Header.new "A-Header" secret1] headers2 = [Header.new "A-Header" secret2] - HTTP.fetch headers=headers1 uri + HTTP.fetch headers=headers1 uri . decode_as_text get_num_response_cache_entries . should_equal 1 - HTTP.fetch headers=headers2 uri + HTTP.fetch headers=headers2 uri . decode_as_text get_num_response_cache_entries . should_equal 2 group_builder.specify "Does not attempt to make room for the maximum file size when that is larger than the total cache size" pending=pending_has_url <| Test.with_retries <| @@ -545,24 +545,24 @@ add_specs suite_builder = LRUCache.new . getSettings . getTotalCacheLimit . should_equal (TotalCacheLimit.Percentage.new 0.2) group_builder.specify "Cache should be cleared when a reload is detected" <| - HTTP.fetch base_url_with_slash+'test_download?length=10' - HTTP.fetch base_url_with_slash+'test_download?length=11' - HTTP.fetch base_url_with_slash+'test_download?length=12' + HTTP.fetch base_url_with_slash+'test_download?length=10' . decode_as_text + HTTP.fetch base_url_with_slash+'test_download?length=11' . decode_as_text + HTTP.fetch base_url_with_slash+'test_download?length=12' . decode_as_text get_num_response_cache_entries . should_equal 3 fake_reload get_num_response_cache_entries . should_equal 3 # Cleaning is not triggered until the next request - HTTP.fetch base_url_with_slash+'test_download?length=10' + HTTP.fetch base_url_with_slash+'test_download?length=10' . decode_as_text get_num_response_cache_entries . should_equal 1 - HTTP.fetch base_url_with_slash+'test_download?length=14' - HTTP.fetch base_url_with_slash+'test_download?length=15' + HTTP.fetch base_url_with_slash+'test_download?length=14' . decode_as_text + HTTP.fetch base_url_with_slash+'test_download?length=15' . decode_as_text get_num_response_cache_entries . should_equal 3 fake_reload get_num_response_cache_entries . should_equal 3 # Cleaning is not triggered until the next request - HTTP.fetch base_url_with_slash+'test_download?length=16' + HTTP.fetch base_url_with_slash+'test_download?length=16' . decode_as_text get_num_response_cache_entries . should_equal 1 group_builder.specify "Reissues the request if the cache file disappears" pending=pending_has_url <| Test.with_retries <|