HTTP cache size limit environment variables (#11530)

This commit is contained in:
Gregory Michael Travis 2024-11-13 13:40:54 -05:00 committed by GitHub
parent 03dc77006b
commit fb50a8f24f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 594 additions and 311 deletions

View File

@ -91,6 +91,11 @@ type Project_Description
namespace : Text
namespace self = self.ns
## PRIVATE
Returns the path of the project root.
root_path : Text
root_path self = self.root.path
## ICON enso_icon
Returns the Enso project description for the project that the engine was
executed with, i.e., the project that contains the `main` method, or

View File

@ -49,7 +49,6 @@ polyglot java import java.net.http.HttpRequest.Builder
polyglot java import java.net.InetSocketAddress
polyglot java import java.net.ProxySelector
polyglot java import javax.net.ssl.SSLContext
polyglot java import org.enso.base.enso_cloud.EnsoHTTPResponseCache
polyglot java import org.enso.base.enso_cloud.EnsoSecretHelper
polyglot java import org.enso.base.file_system.File_Utils
polyglot java import org.enso.base.net.http.MultipartBodyBuilder
@ -184,7 +183,7 @@ type HTTP
HTTP.clear_response_cache
clear_response_cache : Nothing
clear_response_cache -> Nothing = EnsoHTTPResponseCache.clear
clear_response_cache -> Nothing = EnsoSecretHelper.getCache.clear
## PRIVATE
ADVANCED

View File

@ -7,26 +7,32 @@ import org.graalvm.polyglot.Value;
public final class CurrentEnsoProject {
private final String name;
private final String namespace;
private final String rootPath;
private static CurrentEnsoProject cached = null;
private static boolean isCached = false;
private CurrentEnsoProject(String name, String namespace) {
private CurrentEnsoProject(String name, String namespace, String rootPath) {
this.name = name;
this.namespace = namespace;
this.rootPath = rootPath;
}
public static CurrentEnsoProject get() {
if (!isCached) {
Value ensoProject =
EnsoMeta.callStaticModuleMethod("Standard.Base.Meta.Enso_Project", "enso_project");
if (ensoProject.hasMember("name") && ensoProject.hasMember("namespace")) {
if (ensoProject.hasMember("name")
&& ensoProject.hasMember("namespace")
&& ensoProject.hasMember("root_path")) {
Value namespace = ensoProject.invokeMember("namespace");
Value name = ensoProject.invokeMember("name");
if (namespace == null || name == null) {
Value rootPath = ensoProject.invokeMember("root_path");
if (namespace == null || name == null || rootPath == null) {
cached = null;
} else {
cached = new CurrentEnsoProject(name.asString(), namespace.asString());
cached =
new CurrentEnsoProject(name.asString(), namespace.asString(), rootPath.asString());
}
} else {
cached = null;
@ -46,6 +52,10 @@ public final class CurrentEnsoProject {
return namespace;
}
public String getRootPath() {
return rootPath;
}
public String fullName() {
return namespace + "." + name;
}

View File

@ -0,0 +1,15 @@
package org.enso.base.cache;
import java.io.File;
import org.enso.base.CurrentEnsoProject;
public class DiskSpaceGetter extends Mockable<Long> {
@Override
public Long computeValue() {
return getRootPath().getUsableSpace();
}
private static File getRootPath() {
return new File(CurrentEnsoProject.get().getRootPath());
}
}

View File

@ -28,22 +28,59 @@ import org.enso.base.Stream_Utils;
* deleting entries to make space for new ones. All cache files are set to be deleted automatically
* on JVM exit.
*
* <p>Limits should be set with environment variables:
*
* <p>
*
* <pre>
* - ENSO_LIB_HTTP_CACHE_MAX_FILE_SIZE_MB: single file size, in MB
* - ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT: total cache size, in MB or
* percentage of free disk space
* </pre>
*
* <p>Examples:
*
* <pre>
* ENSO_LIB_HTTP_CACHE_MAX_FILE_SIZE_MB=20
* ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT=200 ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT=50%
* </pre>
*
* <p>Regardless of other settings, the total cache size is capped at a percentage of the free disk
* space (MAX_PERCENTAGE).
*
* @param <M> Additional metadata to associate with the data.
*/
public class LRUCache<M> {
private static final Logger logger = Logger.getLogger(LRUCache.class.getName());
private final long maxFileSize;
private final long maxTotalCacheSize;
private final CacheTestParameters cacheTestParameters = new CacheTestParameters();
/**
* An upper limit on the total cache size. If the cache size limit specified by the other
* parameters goes over this value, then this value is used.
*/
private static final double MAX_PERCENTAGE = 0.9;
/** Used to override cache parameters for testing. */
private final Map<String, CacheEntry<M>> cache = new HashMap<>();
private final Map<String, ZonedDateTime> lastUsed = new HashMap<>();
public LRUCache(long maxFileSize, long maxTotalCacheSize) {
this.maxFileSize = maxFileSize;
this.maxTotalCacheSize = maxTotalCacheSize;
/** Defines the per-file and total cache size limits. */
private final LRUCacheSettings settings;
/** Used to get the current time; mockable. */
private final NowGetter nowGetter;
/** Used to get the current free disk space; mockable. */
private final DiskSpaceGetter diskSpaceGetter;
public LRUCache() {
this(LRUCacheSettings.getDefault(), new NowGetter(), new DiskSpaceGetter());
}
public LRUCache(LRUCacheSettings settings, NowGetter nowGetter, DiskSpaceGetter diskSpaceGetter) {
this.settings = settings;
this.nowGetter = nowGetter;
this.diskSpaceGetter = diskSpaceGetter;
}
public CacheResult<M> getResult(ItemBuilder<M> itemBuilder)
@ -70,12 +107,17 @@ public class LRUCache<M> {
return new CacheResult<>(item.stream(), item.metadata());
}
// If we have a content-length, clear up enough space for that. If not,
// then clear up enough space for the largest allowed file size.
long maxFileSize = settings.getMaxFileSize();
if (item.sizeMaybe.isPresent()) {
long size = item.sizeMaybe().get();
if (size > getMaxFileSize()) {
throw new ResponseTooLargeException(getMaxFileSize());
if (size > maxFileSize) {
throw new ResponseTooLargeException(maxFileSize);
}
makeRoomFor(size);
} else {
makeRoomFor(maxFileSize);
}
try {
@ -83,17 +125,13 @@ public class LRUCache<M> {
File responseData = downloadResponseData(cacheKey, item);
M metadata = item.metadata();
long size = responseData.length();
ZonedDateTime expiry = getNow().plus(Duration.ofSeconds(item.ttl().get()));
ZonedDateTime expiry = nowGetter.get().plus(Duration.ofSeconds(item.ttl().get()));
// Create a cache entry.
var cacheEntry = new CacheEntry<>(responseData, metadata, size, expiry);
cache.put(cacheKey, cacheEntry);
markCacheEntryUsed(cacheKey);
// Clear out old entries to satisfy the total cache size limit. This might
// be necessary here if we didn't receive a correct content size value.
removeFilesToSatisfyLimit();
return getResultForCacheEntry(cacheKey);
} catch (IOException e) {
logger.log(
@ -124,13 +162,14 @@ public class LRUCache<M> {
boolean successful = false;
try {
// Limit the download to getMaxFileSize().
boolean sizeOK = Stream_Utils.limitedCopy(inputStream, outputStream, getMaxFileSize());
long maxFileSize = settings.getMaxFileSize();
boolean sizeOK = Stream_Utils.limitedCopy(inputStream, outputStream, maxFileSize);
if (sizeOK) {
successful = true;
return temp;
} else {
throw new ResponseTooLargeException(getMaxFileSize());
throw new ResponseTooLargeException(maxFileSize);
}
} finally {
outputStream.close();
@ -144,12 +183,12 @@ public class LRUCache<M> {
/** Mark the entry with the current time, to maintain LRU data. */
private void markCacheEntryUsed(String cacheKey) {
lastUsed.put(cacheKey, getNow());
lastUsed.put(cacheKey, nowGetter.get());
}
/** Remove all cache entries (and their files) that have passed their TTL. */
private void removeStaleEntries() {
var now = getNow();
var now = nowGetter.get();
removeCacheEntriesByPredicate(e -> e.expiry().isBefore(now));
}
@ -195,8 +234,13 @@ public class LRUCache<M> {
private void makeRoomFor(long newFileSize) {
removeStaleEntries();
long totalSize = getTotalCacheSize() + newFileSize;
long maxTotalCacheSize = getMaxTotalCacheSize();
// Size of files on disk.
long currentCacheSize = getTotalCacheSize();
// Upper limit to cache size.
long maxTotalCacheSize = getMaxTotalCacheSize(currentCacheSize);
// Size including new file.
long totalSize = currentCacheSize + newFileSize;
if (totalSize <= maxTotalCacheSize) {
return;
}
@ -221,39 +265,46 @@ public class LRUCache<M> {
return sortedEntries;
}
/** Remove least-recently used entries until the total cache size is under the limit. */
private void removeFilesToSatisfyLimit() {
makeRoomFor(0L);
}
private long getTotalCacheSize() {
return cache.values().stream().collect(Collectors.summingLong(e -> e.size()));
}
private long getMaxFileSize() {
return cacheTestParameters.getMaxFileSizeOverrideTestOnly().orElse(maxFileSize);
/**
* Calculate the max total cache size, using the current limit but also constraining the result to
* the upper bound.
*/
public long getMaxTotalCacheSize(long currentlyUsed) {
long freeSpace = diskSpaceGetter.get() + currentlyUsed;
var totalCacheSize =
switch (settings.getTotalCacheLimit()) {
case TotalCacheLimit.Bytes bytes -> bytes.bytes();
case TotalCacheLimit.Percentage percentage -> {
yield (long) (percentage.percentage() * freeSpace);
}
};
long upperBound = (long) (freeSpace * MAX_PERCENTAGE);
return Long.min(upperBound, totalCacheSize);
}
private long getMaxTotalCacheSize() {
return cacheTestParameters.getMaxTotalCacheSizeOverrideTestOnly().orElse(maxTotalCacheSize);
/** For testing. */
public long getMaxTotalCacheSize() {
return getMaxTotalCacheSize(getTotalCacheSize());
}
public int getNumEntries() {
return cache.size();
}
public List<Long> getFileSizesTestOnly() {
/** Public for testing. */
public List<Long> getFileSizes() {
return new ArrayList<>(
cache.values().stream().map(CacheEntry::size).collect(Collectors.toList()));
}
private ZonedDateTime getNow() {
return cacheTestParameters.getNowOverrideTestOnly().orElse(ZonedDateTime.now());
}
/** Return a set of parameters that can be used to modify settings for testing purposes. */
public CacheTestParameters getCacheTestParameters() {
return cacheTestParameters;
/** Public for testing. */
public LRUCacheSettings getSettings() {
return settings;
}
private record CacheEntry<M>(File responseData, M metadata, long size, ZonedDateTime expiry) {}
@ -290,64 +341,4 @@ public class LRUCache<M> {
private final Comparator<Map.Entry<String, CacheEntry<M>>> cacheEntryLRUComparator =
Comparator.comparing(me -> lastUsed.get(me.getKey()));
/** A set of parameters that can be used to modify cache settings for testing purposes. */
public class CacheTestParameters {
/** This value is used for the current time when testing TTL expiration logic. */
private Optional<ZonedDateTime> nowOverrideTestOnly = Optional.empty();
/**
* Used for testing file and cache size limits. These cannot be set to values larger than the
* real limits.
*/
private Optional<Long> maxFileSizeOverrideTestOnly = Optional.empty();
private Optional<Long> maxTotalCacheSizeOverrideTestOnly = Optional.empty();
public Optional<ZonedDateTime> getNowOverrideTestOnly() {
return nowOverrideTestOnly;
}
public void setNowOverrideTestOnly(ZonedDateTime nowOverride) {
nowOverrideTestOnly = Optional.of(nowOverride);
}
public void clearNowOverrideTestOnly() {
nowOverrideTestOnly = Optional.empty();
}
public Optional<Long> getMaxFileSizeOverrideTestOnly() {
return maxFileSizeOverrideTestOnly;
}
public void setMaxFileSizeOverrideTestOnly(long maxFileSizeOverrideTestOnly_) {
if (maxFileSizeOverrideTestOnly_ > maxFileSize) {
throw new IllegalArgumentException(
"Cannot set the (test-only) maximum file size to more than the allowed limit of "
+ maxFileSize);
}
maxFileSizeOverrideTestOnly = Optional.of(maxFileSizeOverrideTestOnly_);
}
public void clearMaxFileSizeOverrideTestOnly() {
maxFileSizeOverrideTestOnly = Optional.empty();
}
public Optional<Long> getMaxTotalCacheSizeOverrideTestOnly() {
return maxTotalCacheSizeOverrideTestOnly;
}
public void setMaxTotalCacheSizeOverrideTestOnly(long maxTotalCacheSizeOverrideTestOnly_) {
if (maxTotalCacheSizeOverrideTestOnly_ > maxTotalCacheSize) {
throw new IllegalArgumentException(
"Cannot set the (test-only) total cache size to more than the allowed limit of "
+ maxTotalCacheSize);
}
maxTotalCacheSizeOverrideTestOnly = Optional.of(maxTotalCacheSizeOverrideTestOnly_);
}
public void clearMaxTotalCacheSizeOverrideTestOnly() {
maxTotalCacheSizeOverrideTestOnly = Optional.empty();
}
}
}

View File

@ -0,0 +1,99 @@
package org.enso.base.cache;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.enso.base.Environment_Utils;
public class LRUCacheSettings {
private static final Logger logger = Logger.getLogger(LRUCacheSettings.class.getName());
private static final String MAX_FILE_SIZE_ENV_VAR = "ENSO_LIB_HTTP_CACHE_MAX_FILE_SIZE_MB";
private static final String TOTAL_CACHE_SIZE_ENV_VAR =
"ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT";
/**
* Default value for the largest file size allowed. Should be overridden with the
* ENSO_LIB_HTTP_CACHE_MAX_FILE_SIZE_MB environment variable.
*/
private static final long DEFAULT_MAX_FILE_SIZE = 2L * 1024 * 1024 * 1024;
/**
* Default value for the percentage of free disk space to use as a limit on the total cache size.
* Should be overridden with the ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT environment variable.
*/
private static final double DEFAULT_TOTAL_CACHE_SIZE_FREE_SPACE_PERCENTAGE = 0.2;
/**
* Maximum size allowed for a single file. If a file larger than this is requested through this
* cache, a ResponseTooLargeException is thrown.
*/
private final long maxFileSize;
/**
* Limits the total size of all files in the cache.
*
* <p>This value can depend on free disk space, so it is not resolved to a maximum byte count at
* initialization time, but recalculated during each file cleanup.
*/
private final TotalCacheLimit.Limit totalCacheLimit;
public LRUCacheSettings(long maxFileSize, TotalCacheLimit.Limit totalCacheLimit) {
this.maxFileSize = maxFileSize;
this.totalCacheLimit = totalCacheLimit;
}
/** Uses defaults if the vars are not set. */
public static LRUCacheSettings getDefault() {
return new LRUCacheSettings(parseMaxFileSizeEnvVar(), parseTotalCacheLimitEnvVar());
}
public long getMaxFileSize() {
return maxFileSize;
}
public TotalCacheLimit.Limit getTotalCacheLimit() {
return totalCacheLimit;
}
// Uses the environment variable if set and correctly formatted, otherwise
// uses a default.
private static long parseMaxFileSizeEnvVar() {
String maxFileSizeSpec = Environment_Utils.get_environment_variable(MAX_FILE_SIZE_ENV_VAR);
if (maxFileSizeSpec == null) {
return DEFAULT_MAX_FILE_SIZE;
}
try {
double maxFileSizeMegs = Double.parseDouble(maxFileSizeSpec);
return (long) (maxFileSizeMegs * 1024 * 1024);
} catch (NumberFormatException e) {
logger.log(
Level.WARNING,
"Unable to parse environment variable "
+ MAX_FILE_SIZE_ENV_VAR
+ ": {}, falling back to default",
e);
return DEFAULT_MAX_FILE_SIZE;
}
}
// Uses the environment variable if set and correctly formatted, otherwise
// uses a default.
private static TotalCacheLimit.Limit parseTotalCacheLimitEnvVar() {
String totalCacheLimitSpec =
Environment_Utils.get_environment_variable(TOTAL_CACHE_SIZE_ENV_VAR);
if (totalCacheLimitSpec == null) {
return new TotalCacheLimit.Percentage(DEFAULT_TOTAL_CACHE_SIZE_FREE_SPACE_PERCENTAGE);
}
try {
return TotalCacheLimit.parse(totalCacheLimitSpec);
} catch (IllegalArgumentException e) {
logger.log(
Level.WARNING,
"Unable to parse environment variable "
+ TOTAL_CACHE_SIZE_ENV_VAR
+ ": {}, falling back to default",
e);
return new TotalCacheLimit.Percentage(DEFAULT_TOTAL_CACHE_SIZE_FREE_SPACE_PERCENTAGE);
}
}
}

View File

@ -0,0 +1,21 @@
package org.enso.base.cache;
import java.util.Optional;
public abstract class Mockable<T> {
private Optional<T> override = Optional.empty();
public abstract T computeValue();
public void mocked(T t) {
this.override = Optional.of(t);
}
public void unmocked() {
this.override = Optional.empty();
}
public T get() {
return override.orElse(computeValue());
}
}

View File

@ -0,0 +1,19 @@
package org.enso.base.cache;
import java.time.ZonedDateTime;
public class NowGetter extends Mockable<ZonedDateTime> {
@Override
public ZonedDateTime computeValue() {
return ZonedDateTime.now();
}
/**
* This is necessary because a direct call to the superclass does not convert a polyglot Value to
* ZonedDateTime.
*/
@Override
public void mocked(ZonedDateTime dt) {
super.mocked(dt);
}
}

View File

@ -0,0 +1,42 @@
package org.enso.base.cache;
import java.text.DecimalFormat;
import java.text.ParsePosition;
/**
* Represents a limit on the total size of an LRUCache, either as a fixed byte count or as a
* percentage of available disk space.
*/
public class TotalCacheLimit {
/** Parse the limit specification string into either a Bytes or Percentage value. */
public static Limit parse(String limitString)
throws IllegalArgumentException, NumberFormatException {
Number percentageNumber = tryPercentage(limitString);
if (percentageNumber != null) {
double percentage = percentageNumber.doubleValue();
if (percentage < 0.0 || percentage > 1.0) {
throw new IllegalArgumentException(
"LURCache free disk space percentage must be in the range 0..100% (inclusive): was "
+ limitString);
}
return new Percentage(percentage);
}
double megs = Double.parseDouble(limitString);
long bytes = (long) (megs * 1024 * 1024);
return new Bytes(bytes);
}
public sealed interface Limit permits Bytes, Percentage {}
// Specify the limit in bytes.
public record Bytes(long bytes) implements Limit {}
// Specify the limit as a percentage of total free, usable disk space.
public record Percentage(double percentage) implements Limit {}
private static Number tryPercentage(String limitString) {
DecimalFormat df = new DecimalFormat("0%");
ParsePosition pp = new ParsePosition(0);
return df.parse(limitString, pp);
}
}

View File

@ -3,7 +3,6 @@ package org.enso.base.enso_cloud;
import java.io.IOException;
import java.io.InputStream;
import java.net.http.HttpHeaders;
import java.util.List;
import java.util.Optional;
import org.enso.base.cache.LRUCache;
import org.enso.base.cache.ResponseTooLargeException;
@ -21,15 +20,15 @@ import org.enso.base.cache.ResponseTooLargeException;
* is no cache hit, the InputStream is connected directly to the remote server.
*/
public class EnsoHTTPResponseCache {
// Public for testing.
public EnsoHTTPResponseCache() {}
// 1 year.
private static final int DEFAULT_TTL_SECONDS = 31536000;
private static final long MAX_FILE_SIZE = 2L * 1024 * 1024 * 1024;
private static final long MAX_TOTAL_CACHE_SIZE = 20L * 1024 * 1024 * 1024;
private final int DEFAULT_TTL_SECONDS = 31536000;
private static final LRUCache<Metadata> lruCache =
new LRUCache<>(MAX_FILE_SIZE, MAX_TOTAL_CACHE_SIZE);
private LRUCache<Metadata> lruCache = new LRUCache<>();
public static EnsoHttpResponse makeRequest(RequestMaker requestMaker)
public EnsoHttpResponse makeRequest(RequestMaker requestMaker)
throws IOException, InterruptedException, ResponseTooLargeException {
var itemBuilder = new ItemBuilder(requestMaker);
@ -39,7 +38,7 @@ public class EnsoHTTPResponseCache {
cacheResult.inputStream(), cacheResult.metadata());
}
public static class ItemBuilder implements LRUCache.ItemBuilder<Metadata> {
public class ItemBuilder implements LRUCache.ItemBuilder<Metadata> {
private final RequestMaker requestMaker;
ItemBuilder(RequestMaker requestMaker) {
@ -74,7 +73,7 @@ public class EnsoHTTPResponseCache {
}
/** Get the size of the response data, if available. */
private static Optional<Long> getResponseDataSize(HttpHeaders headers) {
private Optional<Long> getResponseDataSize(HttpHeaders headers) {
return headers.firstValue("content-length").map(Long::parseLong);
}
@ -94,7 +93,7 @@ public class EnsoHTTPResponseCache {
* <p>If 'max-age' and 'Age' are both present, we set TTL = max-age - Age. If only 'max-age' is
* present, we set TTL = max-age. If neither are present, we use a default.
*/
private static int calculateTTL(HttpHeaders headers) {
private int calculateTTL(HttpHeaders headers) {
Integer maxAge = getMaxAge(headers);
if (maxAge == null) {
return DEFAULT_TTL_SECONDS;
@ -104,7 +103,7 @@ public class EnsoHTTPResponseCache {
}
}
private static Integer getMaxAge(HttpHeaders headers) {
private Integer getMaxAge(HttpHeaders headers) {
var cacheControlMaybe = headers.firstValue("cache-control");
Integer maxAge = null;
if (cacheControlMaybe.isPresent()) {
@ -123,21 +122,15 @@ public class EnsoHTTPResponseCache {
return maxAge;
}
public static void clear() {
lruCache.clear();
/** Public for testing. */
public LRUCache getLRUCache() {
return lruCache;
}
public static int getNumEntries() {
return lruCache.getNumEntries();
}
public static List<Long> getFileSizesTestOnly() {
return lruCache.getFileSizesTestOnly();
}
/** Return a set of parameters that can be used to modify settings for testing purposes. */
public static LRUCache.CacheTestParameters getCacheTestParameters() {
return lruCache.getCacheTestParameters();
/** Public for testing. */
public void setLRUCache(LRUCache<Metadata> lruCache) {
this.lruCache.clear();
this.lruCache = lruCache;
}
public interface RequestMaker {

View File

@ -22,6 +22,7 @@ import org.graalvm.collections.Pair;
/** Makes HTTP requests with secrets in either header or query string. */
public final class EnsoSecretHelper extends SecretValueResolver {
private static EnsoHTTPResponseCache cache;
/** Gets a JDBC connection resolving EnsoKeyValuePair into the properties. */
public static Connection getJDBCConnection(
@ -87,7 +88,7 @@ public final class EnsoSecretHelper extends SecretValueResolver {
if (!useCache) {
return requestMaker.makeRequest();
} else {
return EnsoHTTPResponseCache.makeRequest(requestMaker);
return getOrCreateCache().makeRequest(requestMaker);
}
}
@ -175,6 +176,17 @@ public final class EnsoSecretHelper extends SecretValueResolver {
}
}
private static EnsoHTTPResponseCache getOrCreateCache() {
if (cache == null) {
cache = new EnsoHTTPResponseCache();
}
return cache;
}
public static EnsoHTTPResponseCache getCache() {
return cache;
}
private static final Comparator<Pair<String, String>> headerNameComparator =
Comparator.comparing((Pair<String, String> pair) -> pair.getLeft())
.thenComparing(Comparator.comparing(pair -> pair.getRight()));

View File

@ -14,14 +14,22 @@ from Standard.Table import all
import Standard.Table.Errors.Invalid_JSON_Format
from Standard.Test import all
import Standard.Test.Test_Environment
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
from enso_dev.Base_Tests.Network.Http.Http_Test_Setup import base_url_with_slash, pending_has_url
import project.Util
polyglot java import java.io.File as Java_File
polyglot java import java.lang.IllegalArgumentException
polyglot java import org.enso.base.enso_cloud.EnsoHTTPResponseCache
polyglot java import java.lang.NumberFormatException
polyglot java import org.enso.base.cache.DiskSpaceGetter
polyglot java import org.enso.base.cache.LRUCache
polyglot java import org.enso.base.cache.LRUCacheSettings
polyglot java import org.enso.base.cache.NowGetter
polyglot java import org.enso.base.cache.TotalCacheLimit
polyglot java import org.enso.base.enso_cloud.EnsoSecretHelper
main filter=Nothing =
suite = Test.build suite_builder->
@ -106,25 +114,59 @@ add_specs suite_builder =
r4.should_equal (Table.from_rows ["Column 1"] [["A,B"], ["1,x"], ["3,y"]])
suite_builder.group "Response caching" pending=pending_has_url group_builder->
with_temp_file file ~action =
deleter =
file.delete_if_exists
Panic.with_finalizer deleter <|
deleter
action
get_lru_cache =
EnsoSecretHelper.getCache.getLRUCache
get_num_response_cache_entries =
EnsoHTTPResponseCache.getNumEntries
get_lru_cache.getNumEntries
with_counts ~action =
before_count = get_num_response_cache_entries
action
after_count = get_num_response_cache_entries
[before_count, after_count]
reset_size_limits =
EnsoHTTPResponseCache.getCacheTestParameters.clearMaxFileSizeOverrideTestOnly
EnsoHTTPResponseCache.getCacheTestParameters.clearMaxTotalCacheSizeOverrideTestOnly
expect_counts expected_counts ~action =
counts = with_counts action
counts . should_equal expected_counts frames_to_skip=1
get_cache_file_sizes : Vector Integer
get_cache_file_sizes -> Vector Integer =
Vector.from_polyglot_array EnsoHTTPResponseCache.getFileSizesTestOnly . sort Sort_Direction.Ascending
Vector.from_polyglot_array EnsoSecretHelper.getCache.getLRUCache.getFileSizes . sort Sort_Direction.Ascending
# Craetes a new cache each time, then resets it at the end
with_lru_cache lru_cache ~action =
reset = EnsoSecretHelper.getCache.setLRUCache LRUCache.new
Panic.with_finalizer reset <|
EnsoSecretHelper.getCache.setLRUCache lru_cache
action
# Craetes a new cache each time, then resets it at the end
with_config max_file_size total_cache_size ~action =
now_getter = NowGetter.new
disk_space_getter = DiskSpaceGetter.new
lru_cache_settings = LRUCacheSettings.new max_file_size total_cache_size
lru_cache = LRUCache.new lru_cache_settings now_getter disk_space_getter
with_lru_cache lru_cache (action now_getter disk_space_getter)
# Craetes a new cache each time, then resets it at the end
with_mocks ~action =
now_getter = NowGetter.new
disk_space_getter = DiskSpaceGetter.new
lru_cache = LRUCache.new LRUCacheSettings.getDefault now_getter disk_space_getter
with_lru_cache lru_cache (action now_getter disk_space_getter)
# Craetes a new cache each time, then resets it at the end
with_default_cache ~action =
lru_cache = LRUCache.new
with_lru_cache lru_cache action
url0 = base_url_with_slash+'test_download?max-age=16&length=10'
url1 = base_url_with_slash+'test_download?max-age=16&length=20'
@ -138,98 +180,90 @@ add_specs suite_builder =
results.distinct.length . should_equal 1
group_builder.specify "Cache should return the same repsonse" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
with_default_cache <|
check_same_results <|
HTTP.fetch url0 . decode_as_text
get_num_response_cache_entries . should_equal 1
check_same_results <|
HTTP.fetch url1 . decode_as_text
get_num_response_cache_entries . should_equal 2
check_same_results <|
HTTP.fetch url0 . decode_as_text
get_num_response_cache_entries . should_equal 1
check_same_results <|
HTTP.fetch url1 . decode_as_text
get_num_response_cache_entries . should_equal 2
with_default_cache <|
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text
url1_body_1 = HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text
HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text . should_equal url1_body_1
get_num_response_cache_entries . should_equal 2
HTTP.clear_response_cache
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text
url1_body_1 = HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text
HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text . should_equal url1_body_1
get_num_response_cache_entries . should_equal 2
HTTP.clear_response_cache
url0_body_2 = HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text
HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text . should_not_equal url0_body_2
url1_body_2 = HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text
HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text . should_not_equal url1_body_2
get_num_response_cache_entries . should_equal 0
with_default_cache <|
url0_body_2 = HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text
HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text . should_not_equal url0_body_2
url1_body_2 = HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text
HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text . should_not_equal url1_body_2
get_num_response_cache_entries . should_equal 0
group_builder.specify "Cache should handle many entries" pending=pending_has_url <| Test.with_retries <|
count = 20
HTTP.clear_response_cache
check_same_results <|
0.up_to count . map i->
HTTP.fetch base_url_with_slash+"test_download?length="+i.to_text . decode_as_text
get_num_response_cache_entries . should_equal count
with_default_cache <|
check_same_results <|
0.up_to count . map i->
HTTP.fetch base_url_with_slash+"test_download?length="+i.to_text . decode_as_text
get_num_response_cache_entries . should_equal count
HTTP.clear_response_cache
check_same_results <|
0.up_to count . each i->
headers = [Header.new "A-Header" "a-header-value-"+i.to_text]
HTTP.fetch base_url_with_slash+"test_download?length=8" headers=headers . decode_as_text
get_num_response_cache_entries . should_equal count
with_default_cache <|
check_same_results <|
0.up_to count . each i->
headers = [Header.new "A-Header" "a-header-value-"+i.to_text]
HTTP.fetch base_url_with_slash+"test_download?length=8" headers=headers . decode_as_text
get_num_response_cache_entries . should_equal count
group_builder.specify "Cache policy should work for HTTP.fetch" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
expect_counts [0, 0] <|
HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache
HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache
expect_counts [0, 2] <|
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache
HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache
HTTP.clear_response_cache
expect_counts [0, 2] <|
HTTP.fetch url0
HTTP.fetch url1
with_default_cache <|
expect_counts [0, 0] <|
HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache
HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache
expect_counts [0, 2] <|
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache
HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache
with_default_cache <|
expect_counts [0, 2] <|
HTTP.fetch url0
HTTP.fetch url1
group_builder.specify "Cache policy should work for Data.fetch" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
expect_counts [0, 0] <|
Data.fetch url0 cache_policy=Cache_Policy.No_Cache
Data.fetch url1 cache_policy=Cache_Policy.No_Cache
expect_counts [0, 2] <|
Data.fetch url0 cache_policy=Cache_Policy.Use_Cache
Data.fetch url1 cache_policy=Cache_Policy.Use_Cache
HTTP.clear_response_cache
expect_counts [0, 2] <|
Data.fetch url0
Data.fetch url1
with_default_cache <|
expect_counts [0, 0] <|
Data.fetch url0 cache_policy=Cache_Policy.No_Cache
Data.fetch url1 cache_policy=Cache_Policy.No_Cache
expect_counts [0, 2] <|
Data.fetch url0 cache_policy=Cache_Policy.Use_Cache
Data.fetch url1 cache_policy=Cache_Policy.Use_Cache
with_default_cache <|
expect_counts [0, 2] <|
Data.fetch url0
Data.fetch url1
group_builder.specify "Should not cache Data.download" pending=pending_has_url <| Test.with_retries <|
target_file = enso_project.data / "transient" / "my_download0.txt"
HTTP.clear_response_cache
target_file.delete_if_exists
Data.download url0 target_file
get_num_response_cache_entries . should_equal 0
target_file.delete_if_exists
with_temp_file target_file <| with_default_cache <|
Data.download url0 target_file
get_num_response_cache_entries . should_equal 0
group_builder.specify "Data.download is not affected by caching limits" pending=pending_has_url <| Test.with_retries <|
target_file = enso_project.data / "transient" / "my_download0.txt"
Panic.with_finalizer reset_size_limits <|
EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly 120
EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly 100
Data.download base_url_with_slash+"test_download?length=200" target_file
with_temp_file target_file <| with_config 100 (TotalCacheLimit.Bytes.new 120) _-> _->
url = base_url_with_slash+"test_download?length=200"
Data.download url target_file
target_file.read.length . should_equal 200
target_file.delete_if_exists
Data.fetch url . should_fail_with (Response_Too_Large.Error 100)
group_builder.specify "Should not cache for methods other than GET" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
expect_counts [0, 0] <|
Data.post url_post (Request_Body.Text "hello world")
with_default_cache <|
expect_counts [0, 0] <|
Data.post url_post (Request_Body.Text "hello world")
group_builder.specify "HTTP request with a non-GET method should reject a cache_policy=Use_Cache argument" pending=pending_has_url <| Test.with_retries <|
request = Request.new HTTP_Method.Post url_post [] Request_Body.Empty
@ -240,32 +274,32 @@ add_specs suite_builder =
HTTP.new.request request cache_policy=Cache_Policy.No_Cache . should_succeed
group_builder.specify "Should be able to clear caches" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
Data.fetch url0
get_num_response_cache_entries . should_equal 1
HTTP.clear_response_cache
get_num_response_cache_entries . should_equal 0
with_default_cache <|
Data.fetch url0
get_num_response_cache_entries . should_equal 1
with_default_cache <|
get_num_response_cache_entries . should_equal 0
group_builder.specify "Cache key should depend on the headers" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
expect_counts [0, 2] <|
Data.fetch url0 headers=headers0
Data.fetch url0 headers=headers1
Data.fetch url0 headers=headers1
Data.fetch url0 headers=headers0
Data.fetch url0 headers=headers0
Data.fetch url0 headers=headers1
with_default_cache <|
expect_counts [0, 2] <|
Data.fetch url0 headers=headers0
Data.fetch url0 headers=headers1
Data.fetch url0 headers=headers1
Data.fetch url0 headers=headers0
Data.fetch url0 headers=headers0
Data.fetch url0 headers=headers1
group_builder.specify "Cache key should not depend on header order" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
header0 = Header.new "Abc" "eef"
header1 = Header.new "Abc" "def"
header2 = Header.new "Ghi" "jkl"
orders = [[header0, header1, header2], [header1, header2, header0], [header2, header1, header0]]
responses = orders.map headers->
Data.fetch url0 headers=headers . decode_as_text
get_num_response_cache_entries . should_equal 1
responses.distinct.length . should_equal 1
with_default_cache <|
header0 = Header.new "Abc" "eef"
header1 = Header.new "Abc" "def"
header2 = Header.new "Ghi" "jkl"
orders = [[header0, header1, header2], [header1, header2, header0], [header2, header1, header0]]
responses = orders.map headers->
Data.fetch url0 headers=headers . decode_as_text
get_num_response_cache_entries . should_equal 1
responses.distinct.length . should_equal 1
## Fetching the trigger uri causes stale entries to be removed, since the
uri is always different and so the caching and cleanup logic is run.
@ -275,117 +309,108 @@ add_specs suite_builder =
serial = trigger_uri_serial.get
trigger_uri_serial.modify (_ + 1)
base_url_with_slash+'test_download?max-age=10000&length=50&abc='+serial.to_text
set_time_and_get_count advance_secs =
EnsoHTTPResponseCache.getCacheTestParameters.setNowOverrideTestOnly (fake_now + (Duration.new seconds=advance_secs))
set_time_and_get_count now_mock advance_secs =
now_mock.mocked (fake_now + (Duration.new seconds=advance_secs))
trigger_uri = make_trigger_uri
Data.fetch trigger_uri
get_num_response_cache_entries
fake_time_resetter =
EnsoHTTPResponseCache.getCacheTestParameters.clearNowOverrideTestOnly
group_builder.specify "The cache should expire stale entries" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
with_mocks now-> _->
set_time_and_get_count now 0 # Initialize fake now.
set_time_and_get_count 0 # Initialize fake now.
Data.fetch base_url_with_slash+'test_download?max-age=100&length=50'
Data.fetch base_url_with_slash+'test_download?max-age=200&length=50'
Data.fetch base_url_with_slash+'test_download?max-age=200&length=51'
Data.fetch base_url_with_slash+'test_download?max-age=300&length=50'
Data.fetch base_url_with_slash+'test_download?max-age=100&length=50'
Data.fetch base_url_with_slash+'test_download?max-age=200&length=50'
Data.fetch base_url_with_slash+'test_download?max-age=200&length=51'
Data.fetch base_url_with_slash+'test_download?max-age=300&length=50'
Panic.with_finalizer fake_time_resetter <|
## The count will increase by 1 each time, but decrease by the
number of entries removed
set_time_and_get_count 0 . should_equal 6
set_time_and_get_count 90 . should_equal 7
set_time_and_get_count 110 . should_equal 7
set_time_and_get_count 190 . should_equal 8
set_time_and_get_count 202 . should_equal 7
set_time_and_get_count 292 . should_equal 8
set_time_and_get_count 301 . should_equal 8
set_time_and_get_count now 0 . should_equal 6
set_time_and_get_count now 90 . should_equal 7
set_time_and_get_count now 110 . should_equal 7
set_time_and_get_count now 190 . should_equal 8
set_time_and_get_count now 202 . should_equal 7
set_time_and_get_count now 292 . should_equal 8
set_time_and_get_count now 301 . should_equal 8
group_builder.specify "The cache should use the Age response header" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
with_mocks now-> _->
set_time_and_get_count now 0 # Initialize fake now.
set_time_and_get_count 0 # Initialize fake now.
Data.fetch base_url_with_slash+'test_download?max-age=100&age=50&length=50' # ttl 50
Data.fetch base_url_with_slash+'test_download?max-age=100&age=30&length=50' # ttl 70
Data.fetch base_url_with_slash+'test_download?max-age=120&age=50&length=50' # ttl 70
Data.fetch base_url_with_slash+'test_download?max-age=70&&length=50' # ttl 70
Data.fetch base_url_with_slash+'test_download?max-age=160&age=70&length=50' # ttl 90
Data.fetch base_url_with_slash+'test_download?max-age=100&age=50&length=50' # ttl 50
Data.fetch base_url_with_slash+'test_download?max-age=100&age=30&length=50' # ttl 70
Data.fetch base_url_with_slash+'test_download?max-age=120&age=50&length=50' # ttl 70
Data.fetch base_url_with_slash+'test_download?max-age=70&&length=50' # ttl 70
Data.fetch base_url_with_slash+'test_download?max-age=160&age=70&length=50' # ttl 90
Panic.with_finalizer fake_time_resetter <|
## The count will increase by 1 each time, but decrease by the
number of entries removed
set_time_and_get_count 0 . should_equal 7
set_time_and_get_count 40 . should_equal 8
set_time_and_get_count 51 . should_equal 8
set_time_and_get_count 68 . should_equal 9
set_time_and_get_count 72 . should_equal 7
set_time_and_get_count 88 . should_equal 8
set_time_and_get_count 93 . should_equal 8
set_time_and_get_count now 0 . should_equal 7
set_time_and_get_count now 40 . should_equal 8
set_time_and_get_count now 51 . should_equal 8
set_time_and_get_count now 68 . should_equal 9
set_time_and_get_count now 72 . should_equal 7
set_time_and_get_count now 88 . should_equal 8
set_time_and_get_count now 93 . should_equal 8
download size =
fetch_n size =
Data.fetch base_url_with_slash+'test_download?length='+size.to_text
group_builder.specify "Will remove old cache files to keep the total cache size under the total cache size limit" pending=pending_has_url <| Test.with_retries <|
Panic.with_finalizer reset_size_limits <|
reset_size_limits
EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly 100
download 30
download 50
download 10
with_config 1000 (TotalCacheLimit.Bytes.new 100) _-> _->
fetch_n 30
fetch_n 50
fetch_n 10
get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50]
download 20
fetch_n 20
get_cache_file_sizes . should_equal_ignoring_order [10, 20, 50]
download 40
fetch_n 40
get_cache_file_sizes . should_equal_ignoring_order [10, 20, 40]
download 35
fetch_n 35
get_cache_file_sizes . should_equal_ignoring_order [20, 35, 40]
group_builder.specify "Will remove old cache files based on how recently they were used" pending=pending_has_url <| Test.with_retries <|
Panic.with_finalizer reset_size_limits <|
reset_size_limits
EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly 100
download 30
download 50
download 10
with_config 1000 (TotalCacheLimit.Bytes.new 100) _-> _->
fetch_n 30
fetch_n 50
fetch_n 10
get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50]
# Use 30 again so it's considered more recently used.
download 30
fetch_n 30
get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50]
download 20
fetch_n 20
get_cache_file_sizes . should_equal_ignoring_order [10, 20, 30]
download 45
fetch_n 45
get_cache_file_sizes . should_equal_ignoring_order [20, 30, 45]
group_builder.specify "Will not cache a file with a content length greater than the single file limit" pending=pending_has_url <| Test.with_retries <|
Panic.with_finalizer reset_size_limits <|
reset_size_limits
EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly 100
download 110 . should_fail_with (Response_Too_Large.Error 100)
with_config 100 (TotalCacheLimit.Bytes.new 1000) _-> _->
fetch_n 110 . should_fail_with (Response_Too_Large.Error 100)
group_builder.specify "Will not cache a file without a content length, but which is greater than the single file limit" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
Panic.with_finalizer reset_size_limits <|
reset_size_limits
EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly 100
with_config 100 (TotalCacheLimit.Bytes.new 1000) _-> _->
url = base_url_with_slash+'test_download?omit-content-length=1&length=110'
Data.fetch url . should_fail_with (Response_Too_Large.Error 100)
group_builder.specify "Should not cache if the request fails" pending=pending_has_url <| Test.with_retries <|
HTTP.clear_response_cache
group_builder.specify "Will make room for the largest possible file, if the server does not provide a content-length" pending=pending_has_url <| Test.with_retries <|
with_config 50 (TotalCacheLimit.Bytes.new 100) _-> _->
fetch_n 20
fetch_n 40
fetch_n 10
fetch_n 15
url = base_url_with_slash+'test_download?omit-content-length=1&length=2'
Data.fetch url . should_succeed
get_cache_file_sizes . should_equal_ignoring_order [10, 15, 2]
HTTP.fetch url0
get_num_response_cache_entries . should_equal 1
HTTP.fetch base_url_with_slash+'crash'
get_num_response_cache_entries . should_equal 1
HTTP.fetch base_url_with_slash+'nonexistent_endpoint'
get_num_response_cache_entries . should_equal 1
group_builder.specify "Should not cache if the request fails" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
HTTP.fetch url0
get_num_response_cache_entries . should_equal 1
HTTP.fetch base_url_with_slash+'crash'
get_num_response_cache_entries . should_equal 1
HTTP.fetch base_url_with_slash+'nonexistent_endpoint'
get_num_response_cache_entries . should_equal 1
cloud_setup = Cloud_Tests_Setup.prepare
@ -396,7 +421,7 @@ add_specs suite_builder =
cleanup =
secret1.delete
secret2.delete
Panic.with_finalizer cleanup <|
with_default_cache <| Panic.with_finalizer cleanup <|
# Requests differ only in secrets in URI.
url1 = URI.from 'https://httpbin.org/bytes/50'
. add_query_argument "arg1" secret1
@ -405,7 +430,6 @@ add_specs suite_builder =
. add_query_argument "arg1" secret2
. add_query_argument "arg2" "plain value"
HTTP.clear_response_cache
HTTP.fetch url1
get_num_response_cache_entries . should_equal 1
HTTP.fetch uri2
@ -418,20 +442,73 @@ add_specs suite_builder =
cleanup =
secret1.delete
secret2.delete
Panic.with_finalizer cleanup <|
with_default_cache <| Panic.with_finalizer cleanup <|
# Requests differ only in secrets in headers.
uri = URI.from 'https://httpbin.org/bytes/50'
headers1 = [Header.new "A-Header" secret1]
headers2 = [Header.new "A-Header" secret2]
HTTP.clear_response_cache
HTTP.fetch headers=headers1 uri
get_num_response_cache_entries . should_equal 1
HTTP.fetch headers=headers2 uri
get_num_response_cache_entries . should_equal 2
group_builder.specify "Should not be able to set the cache limits higher than the real limits" pending=pending_has_url <| Test.with_retries <|
Test.expect_panic IllegalArgumentException <|
EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly (2 * 1024 * 1024 * 1024 + 1) . should_fail_with Illegal_Argument
Test.expect_panic IllegalArgumentException <|
EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly (20 * 1024 * 1024 * 1024 + 1) . should_fail_with Illegal_Argument
group_builder.specify "Cache limits should have defaults" <|
lru_cache = LRUCache.new
lru_cache.getSettings.getMaxFileSize . should_equal (2 * 1024 * 1024 * 1024)
lru_cache.getSettings.getTotalCacheLimit.percentage . should_equal 0.2
group_builder.specify "Should be able to set the max file size and total cache size (in MB) via environment variable" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_FILE_SIZE_MB" "8" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT" "30" <|
with_default_cache <|
EnsoSecretHelper.getCache.getLRUCache.getSettings.getMaxFileSize . should_equal (8 * 1024 * 1024)
EnsoSecretHelper.getCache.getLRUCache.getMaxTotalCacheSize . should_equal (30 * 1024 * 1024)
group_builder.specify "Should be able to set the max file size and total cache size (as a percentage) via environment variable, and track changes to available disk space" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_FILE_SIZE_MB" "8" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT" "10%" <|
with_mocks _-> disk_space->
EnsoSecretHelper.getCache.getLRUCache.getSettings.getMaxFileSize . should_equal (8 * 1024 * 1024)
disk_space.mocked 300
EnsoSecretHelper.getCache.getLRUCache.getMaxTotalCacheSize . should_equal 30
disk_space.mocked 400
EnsoSecretHelper.getCache.getLRUCache.getMaxTotalCacheSize . should_equal 40
group_builder.specify "Includes the existing cache files in the total cache size calculation, for a percentage total cache limit" pending=pending_has_url <| Test.with_retries <|
with_config 1000 (TotalCacheLimit.Percentage.new 0.5) _-> disk_space->
disk_space.mocked 100
EnsoSecretHelper.getCache.getLRUCache.getMaxTotalCacheSize . should_equal 50
fetch_n 30
disk_space.mocked 70
EnsoSecretHelper.getCache.getLRUCache.getMaxTotalCacheSize . should_equal 50
fetch_n 20
disk_space.mocked 50
get_num_response_cache_entries . should_equal 2
EnsoSecretHelper.getCache.getLRUCache.getMaxTotalCacheSize . should_equal 50
fetch_n 10
disk_space.mocked 70
get_num_response_cache_entries . should_equal 2
EnsoSecretHelper.getCache.getLRUCache.getMaxTotalCacheSize . should_equal 50
group_builder.specify "Total cache size, specified in MB, should not go over the percentage hard limit" <|
with_config 1000 (TotalCacheLimit.Bytes.new 200) _-> disk_space->
disk_space.mocked 100
EnsoSecretHelper.getCache.getLRUCache.getMaxTotalCacheSize . should_equal 90
group_builder.specify "Total cache size, specified as a percentage, should not go over the percentage hard limit" <|
with_config 1000 (TotalCacheLimit.Percentage.new 0.95) _-> disk_space->
disk_space.mocked 100
EnsoSecretHelper.getCache.getLRUCache.getMaxTotalCacheSize . should_equal 90
group_builder.specify "Falls back to the default if an environment variable is incorrectly formatted" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_FILE_SIZE_MB" "abcd" <|
LRUCache.new . getSettings . getMaxFileSize . should_equal (2 * 1024 * 1024 * 1024)
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT" "50q.%" <|
LRUCache.new . getSettings . getTotalCacheLimit . should_equal (TotalCacheLimit.Percentage.new 0.2)
group_builder.specify "Falls back to the default if the max total cache percentage is outside 0..100%" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT" "-10%" <|
LRUCache.new . getSettings . getTotalCacheLimit . should_equal (TotalCacheLimit.Percentage.new 0.2)
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT" "101%" <|
LRUCache.new . getSettings . getTotalCacheLimit . should_equal (TotalCacheLimit.Percentage.new 0.2)