Optimize import/export resolution (#5700)

This change adds serialization and deserialization of library bindings.
In order to be functional, one needs to first generate IR and
serialize bindings using `--compiled <path-to-library>` command. The bindings
will be stored under the library with `.bindings` suffix.
Bindings are being generated during `buildEngineDistribution` task, thus not
requiring any extra steps.

When resolving import/exports the compiler will first try to load
module's bindings from cache. If successful, it will not schedule its
imports/exports for immediate compilation, as we always did, but use the
bindings info to infer the dependent modules.

The current change does not make any optimizations when it comes to
compiling the modules, yet. It only delays the actual
compilation/loading IR from cache so that it can be done in bulk.
Further optimizations will come from this opportunity such as parallel
loading of caches or lazily inferring only the necessary modules.

Part of https://github.com/enso-org/enso/issues/5568 work.
This commit is contained in:
Hubert Plociniczak 2023-03-01 09:53:29 +01:00 committed by GitHub
parent 0778c85094
commit 941512e0ba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 1441 additions and 670 deletions

View File

@ -597,6 +597,7 @@
- [Engine can now recover from serialization failures][5591]
- [Use sbt runEngineDistribution][5609]
- [Update to GraalVM 22.3.1][5602]
- [Cache library bindings to optimize import/export resolution][5700]
[3227]: https://github.com/enso-org/enso/pull/3227
[3248]: https://github.com/enso-org/enso/pull/3248
@ -693,6 +694,7 @@
[5591]: https://github.com/enso-org/enso/pull/5591
[5609]: https://github.com/enso-org/enso/pull/5609
[5602]: https://github.com/enso-org/enso/pull/5602
[5700]: https://github.com/enso-org/enso/pull/5700
# Enso 2.0.0-alpha.18 (2021-10-12)

View File

@ -1512,7 +1512,6 @@ lazy val runtime = (project in file("engine/runtime"))
.dependsOn(graph)
.dependsOn(pkg)
.dependsOn(`edition-updater`)
.dependsOn(`library-manager`)
.dependsOn(`connected-lock-manager`)
.dependsOn(syntax.jvm)
.dependsOn(`syntax-rust-definition`)

View File

@ -207,7 +207,7 @@ other than the fact that they may be seeing the actual files on disk.
Integrity Checking does not check the situation when the cached module imports a
module which cache has been invalidated. For example, module `A` uses a method
`foo` from module `B` and a successful compilation resulted in IR cache for both
`A` and `B`. Later, someone modifed module `B` by renaming method `foo` to
`A` and `B`. Later, someone modified module `B` by renaming method `foo` to
`bar`. If we only compared source hashes, `B`'s IR would be re-generated while
`A`'s would be loaded from cache, thus failing to notice method name change,
until a complete cache invalidation was forced.
@ -228,6 +228,21 @@ There are two main elements that need to be tested as part of this feature.
runtime options for debugging, but also constructing the `DistributionManager`
on context creation (removing `RuntimeDistributionManager`).
### Import/Export caching of bindings
Import and export resolution is one of the more expensive elements in the
initial pipeline. It is also the element which does not change for the releases
library components as we do not expect users to modify them. During the initial
compilation stage we iteratively parse/load cached ir, do import resolution on
the module, followed by export resolution, and repeat the process with any
dependent modules discovered in the process. Calculating such transitive closure
is an expensive and repeatable process. By caching bindings per library we are
able to skip that process completely and discover all necessary modules of the
library in a single pass.
The bindings are serialized along with the library caches in a file with a
`.bindings` suffix.
## Future Directions
Due to the less than ideal platform situation we're in, we're limited to using

View File

@ -37,7 +37,7 @@ class CompilerBasedDependencyExtractor(logLevel: LogLevel)
importedLibraries.toSet
}
val sourcesImports = pkg.listSources.toSet.flatMap(findImportedLibraries)
val sourcesImports = pkg.listSources().toSet.flatMap(findImportedLibraries)
val itself = pkg.libraryName
// Builtins need to be removed from the set of the dependencies, because

View File

@ -223,7 +223,7 @@ class LibraryPreinstallHandler(
publishedLibraryProvider = config.publishedLibraryCache,
edition = edition,
preferLocalLibraries = preferLocalLibraries,
versionResolver = LibraryResolver(config.localLibraryProvider),
libraryResolver = LibraryResolver(config.localLibraryProvider),
dependencyExtractor = config.installerConfig.dependencyExtractor
)
} yield Tools(installer, dependencyResolver)

View File

@ -609,9 +609,11 @@ class LibrariesTest extends BaseServerTest {
.loadPackage(cachedLibraryRoot.location.toFile)
.get
pkg.name shouldEqual "Bar"
pkg.listSources.map(
_.file.getName
) should contain theSameElementsAs Seq("Main.enso")
pkg
.listSources()
.map(
_.file.getName
) should contain theSameElementsAs Seq("Main.enso")
assert(
Files.exists(cachedLibraryRoot / LibraryManifest.filename),

View File

@ -0,0 +1,482 @@
package org.enso.compiler;
import com.oracle.truffle.api.TruffleFile;
import com.oracle.truffle.api.TruffleLogger;
import org.bouncycastle.jcajce.provider.digest.SHA3;
import org.bouncycastle.util.encoders.Hex;
import org.enso.interpreter.runtime.EnsoContext;
import org.enso.logger.masking.MaskedPath;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.security.MessageDigest;
import java.util.Optional;
import java.util.logging.Level;
/**
* Cache encapsulates a common functionality needed to serialize and de-serialize objects, while
* maintaining its integrity in the process.
*
* @param <T> type of the cached data
* @param <M> type of the metadata associated with the data
*/
public abstract class Cache<T, M extends Cache.Metadata> {
/** Returns a default level of logging for this Cache. */
protected Level logLevel;
/**
* Saves data to a cache file.
*
* @param entry data to save
* @param context the language context in which loading is taking place
* @param useGlobalCacheLocations if true, will use global cache location, local one otherwise
* @return if non-empty, returns the location of the successfully saved location of the cached
* data
*/
public Optional<TruffleFile> save(T entry, EnsoContext context, boolean useGlobalCacheLocations) {
TruffleLogger logger = context.getLogger(this.getClass());
return getCacheRoots(context)
.flatMap(
roots -> {
try {
if (useGlobalCacheLocations) {
if (saveCacheTo(roots.globalCacheRoot, entry, logger)) {
return Optional.of(roots.globalCacheRoot);
}
} else {
logger.log(
logLevel, "Skipping use of global cache locations for " + stringRepr + ".");
}
if (saveCacheTo(roots.localCacheRoot, entry, logger)) {
return Optional.of(roots.localCacheRoot);
}
logger.log(logLevel, "Unable to write cache data for " + stringRepr + ".");
return Optional.empty();
} catch (CacheException e) {
logger.log(
Level.SEVERE, "Failed to save cache for " + stringRepr + ": " + e.getMessage());
return Optional.empty();
} catch (IOException ioe) {
logger.log(Level.SEVERE, "Failed to save cache for " + stringRepr + ".", ioe);
return Optional.empty();
}
});
}
/**
* Attempts to save cache data at a specified location.
*
* @param cacheRoot parent directory where cache data should be stored
* @param entry cache data to save
* @param logger internal logger
* @return true, if successful, false otherwise
* @throws IOException IOException encountered while writing data to files
*/
private boolean saveCacheTo(TruffleFile cacheRoot, T entry, TruffleLogger logger)
throws IOException, CacheException {
if (ensureRoot(cacheRoot)) {
var byteStream = new ByteArrayOutputStream();
byte[] bytesToWrite;
try (ObjectOutputStream stream = new ObjectOutputStream(byteStream)) {
stream.writeObject(extractObjectToSerialize(entry));
bytesToWrite = byteStream.toByteArray();
}
String blobDigest = computeDigestFromBytes(bytesToWrite);
String sourceDigest = computeDigest(entry, logger).get();
if (sourceDigest == null) {
throw new CacheException("unable to compute digest");
}
;
byte[] metadataBytes = metadata(sourceDigest, blobDigest, entry);
TruffleFile cacheDataFile = getCacheDataPath(cacheRoot);
TruffleFile metadataFile = getCacheMetadataPath(cacheRoot);
TruffleFile parentPath = cacheDataFile.getParent();
if (writeBytesTo(cacheDataFile, bytesToWrite) && writeBytesTo(metadataFile, metadataBytes)) {
logger.log(
logLevel,
"Written cache data ["
+ stringRepr
+ "] to ["
+ toMaskedPath(parentPath).applyMasking()
+ "].");
return true;
} else {
// Clean up after ourselves if it fails.
cacheDataFile.delete();
}
}
return false;
}
private boolean ensureRoot(TruffleFile cacheRoot) {
try {
if (cacheRoot.exists() && cacheRoot.isDirectory()) {
return cacheRoot.isWritable();
} else {
cacheRoot.createDirectories();
return cacheRoot.isWritable();
}
} catch (Throwable e) {
return false;
}
}
/**
* Return serialized representation of data's metadata.
*
* @param sourceDigest digest of data's source
* @param blobDigest digest of serialized data
* @param entry data to serialize
* @return raw bytes representing serialized metadata
*/
protected abstract byte[] metadata(String sourceDigest, String blobDigest, T entry);
/**
* Loads cache for this data, if possible.
*
* @param context the language context in which loading is taking place
* @return the cached data if possible, and [[None]] if it could not load a valid cache
*/
public Optional<T> load(EnsoContext context) {
synchronized (this) {
TruffleLogger logger = context.getLogger(this.getClass());
return getCacheRoots(context)
.flatMap(
roots -> {
try {
Optional<T> loadedCache;
// Load from the global root as a priority.
loadedCache = loadCacheFrom(roots.globalCacheRoot(), context, logger);
if (loadedCache.isPresent()) {
logger.log(
logLevel,
"Using cache for ["
+ stringRepr
+ " at location ["
+ toMaskedPath(roots.globalCacheRoot()).applyMasking()
+ "].");
return loadedCache;
}
loadedCache = loadCacheFrom(roots.localCacheRoot(), context, logger);
if (loadedCache.isPresent()) {
logger.log(
logLevel,
"Using cache for ["
+ stringRepr
+ " at location ["
+ toMaskedPath(roots.localCacheRoot()).applyMasking()
+ "].");
return loadedCache;
}
logger.log(logLevel, "Unable to load a cache for module [" + stringRepr + "]");
} catch (IOException e) {
logger.log(
Level.WARNING, "Unable to load a cache for module [" + stringRepr + "]", e);
}
return Optional.empty();
});
}
}
/**
* Loads the cache from the provided `cacheRoot`, invalidating the cache if the loading fails for
* any reason.
*
* @param cacheRoot the root at which to find the cache for this cache entry
* @param context the language context in which loading is taking place
* @param logger a logger
* @return the cached data if available, otherwise an empty [[Optional]].
*/
private Optional<T> loadCacheFrom(
TruffleFile cacheRoot, EnsoContext context, TruffleLogger logger) throws IOException {
TruffleFile metadataPath = getCacheMetadataPath(cacheRoot);
TruffleFile dataPath = getCacheDataPath(cacheRoot);
Optional<M> optMeta = loadCacheMetadata(metadataPath);
if (optMeta.isPresent()) {
M meta = optMeta.get();
boolean sourceDigestValid =
computeDigestFromSource(context, logger)
.map(digest -> digest.equals(meta.sourceHash()))
.orElseGet(() -> false);
byte[] blobBytes = dataPath.readAllBytes();
boolean blobDigestValid = computeDigestFromBytes(blobBytes).equals(meta.blobHash());
if (sourceDigestValid && blobDigestValid) {
Object readObject;
try (ObjectInputStream stream =
new ObjectInputStream(new ByteArrayInputStream(blobBytes))) {
readObject = stream.readObject();
} catch (IOException ioe) {
logger.log(
logLevel,
"`" + stringRepr + "` failed to load (caused by: " + ioe.getMessage() + ").");
invalidateCache(cacheRoot, logger);
return Optional.empty();
} catch (ClassNotFoundException e) {
logger.log(Level.WARNING, stringRepr + " appears to be corrupted", e);
return Optional.empty();
}
T cachedObject = null;
try {
cachedObject = validateReadObject(readObject, meta, logger);
if (cachedObject != null) {
return Optional.of(cachedObject);
} else {
logger.log(logLevel, "`" + stringRepr + "` was corrupt on disk.");
invalidateCache(cacheRoot, logger);
return Optional.empty();
}
} catch (CacheException e) {
logger.log(logLevel, "`" + stringRepr + "` was corrupt on disk: " + e.getMessage());
return Optional.empty();
}
} else {
logger.log(
logLevel, "One or more digests did not match for the cache for [" + stringRepr + "].");
invalidateCache(cacheRoot, logger);
return Optional.empty();
}
} else {
logger.log(
logLevel,
"Could not load the cache metadata at ["
+ toMaskedPath(metadataPath).applyMasking()
+ "].");
invalidateCache(cacheRoot, logger);
return Optional.empty();
}
}
/**
* Validates the deserialized data by returning the expected cached entry, or [[null]].
*
* @param obj deserialized object
* @param meta metadata corresponding to the `obj`
* @param logger Truffle's logger
* @return an `obj` transformed to a cached entry
* @throws CacheException exception thrown on unexpected deserialized data
*/
protected abstract T validateReadObject(Object obj, M meta, TruffleLogger logger)
throws CacheException;
/**
* Read metadata representation from the provided location
*
* @param path location of the serialized metadata
* @return deserialized metadata, or [[None]] if invalid
*/
private Optional<M> loadCacheMetadata(TruffleFile path) throws IOException {
if (path.isReadable()) {
return metadataFromBytes(path.readAllBytes());
} else {
return Optional.empty();
}
}
/**
* De-serializes raw bytes to data's metadata.
*
* @param bytes raw bytes representing metadata
* @return non-empty metadata, if de-serialization was successful
*/
protected abstract Optional<M> metadataFromBytes(byte[] bytes);
/**
* Compute digest of cache's data
*
* @param entry data for which digest should be computed
* @param logger Truffle's logger
* @return non-empty digest, if successful
*/
protected abstract Optional<String> computeDigest(T entry, TruffleLogger logger);
/**
* Compute digest of data's source
*
* @param context the language context in which loading is taking place
* @param logger Truffle's logger
* @return non-empty digest, if successful
*/
protected abstract Optional<String> computeDigestFromSource(
EnsoContext context, TruffleLogger logger);
/**
* Computes digest from an array of bytes using a default hashing algorithm.
*
* @param bytes bytes for which hash will be computed
* @return string representation of bytes' hash
*/
protected String computeDigestFromBytes(byte[] bytes) {
return Hex.toHexString(messageDigest().digest(bytes));
}
/**
* Returns a default hashing algorithm used for Enso caches.
*
* @return digest used for computing hashes
*/
protected MessageDigest messageDigest() {
return new SHA3.Digest224();
}
/**
* Returns locations where caches can be located
*
* @param context the language context in which loading is taking place
* @return non-empty if the locations have been inferred successfully, empty otherwise
*/
protected abstract Optional<Roots> getCacheRoots(EnsoContext context);
/** Returns the exact data to be serialized */
protected abstract Object extractObjectToSerialize(T entry);
protected String stringRepr;
protected String entryName;
/** Suffix to be used */
protected String dataSuffix;
protected String metadataSuffix;
/**
* Gets the path to the cache data within the `cacheRoot`.
*
* @param cacheRoot the root of the cache for this entry
* @return the name of the data file for this entry's cache
*/
private TruffleFile getCacheDataPath(TruffleFile cacheRoot) {
return cacheRoot.resolve(cacheFileName(dataSuffix));
}
private TruffleFile getCacheMetadataPath(TruffleFile cacheRoot) {
return cacheRoot.resolve(cacheFileName(metadataSuffix));
}
/**
* Computes the cache file name for a given extension.
*
* @param suffix the extension
* @return the cache file name with the provided `ext`
*/
private String cacheFileName(String suffix) {
return entryName + suffix;
}
/**
* Deletes the cache for this data in the provided `cacheRoot`.
*
* @param cacheRoot the root of the cache to delete
* @param logger a logger
*/
private void invalidateCache(TruffleFile cacheRoot, TruffleLogger logger) {
TruffleFile metadataFile = getCacheMetadataPath(cacheRoot);
TruffleFile dataFile = getCacheDataPath(cacheRoot);
doDeleteAt(cacheRoot, metadataFile, logger);
doDeleteAt(cacheRoot, dataFile, logger);
}
private void doDeleteAt(TruffleFile cacheRoot, TruffleFile file, TruffleLogger logger) {
try {
if (file.exists()) {
if (file.isWritable()) {
file.delete();
logger.log(
logLevel, "Invalidated the cache at [" + toMaskedPath(file).applyMasking() + "].");
} else {
logger.log(
logLevel,
"Cannot invalidate the cache at ["
+ toMaskedPath(file).applyMasking()
+ "]. "
+ "Cache location not writable.");
}
}
} catch (NoSuchFileException nsfe) {
// If it doesn't exist, our work has already been done for us!
} catch (IOException | SecurityException e) {
logger.log(
logLevel,
"Unable to delete the cache at [" + toMaskedPath(cacheRoot).applyMasking() + "].");
}
}
/**
* Invalidates all caches associated with this cache.
*
* @param context the langage context in which loading is taking place
*/
public void invalidate(EnsoContext context) {
synchronized (this) {
TruffleLogger logger = context.getLogger(this.getClass());
getCacheRoots(context)
.ifPresent(
roots -> {
invalidateCache(roots.globalCacheRoot, logger);
invalidateCache(roots.localCacheRoot, logger);
});
}
}
protected static final Charset metadataCharset = StandardCharsets.UTF_8;
/**
* Roots encapsulates two possible locations where caches can be stored.
*
* @param localCacheRoot project's local location of the cache
* @param globalCacheRoot system's global location of the cache
*/
record Roots(TruffleFile localCacheRoot, TruffleFile globalCacheRoot) {}
private static boolean writeBytesTo(TruffleFile file, byte[] bytes) {
try (OutputStream stream =
file.newOutputStream(
StandardOpenOption.WRITE,
StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING)) {
stream.write(bytes);
} catch (IOException ioe) {
return false;
} catch (SecurityException se) {
return false;
}
return true;
}
private static MaskedPath toMaskedPath(TruffleFile truffleFile) {
return new MaskedPath(Path.of(truffleFile.getPath()));
}
interface Metadata {
String sourceHash();
String blobHash();
}
public static class CacheException extends Exception {
public CacheException(String errorMessage) {
super(errorMessage);
}
}
}

View File

@ -0,0 +1,167 @@
package org.enso.compiler;
import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleFile;
import com.oracle.truffle.api.TruffleLogger;
import org.bouncycastle.util.encoders.Hex;
import org.enso.compiler.data.BindingsMap;
import org.enso.editions.LibraryName;
import org.enso.interpreter.runtime.EnsoContext;
import org.enso.pkg.QualifiedName;
import org.enso.pkg.SourceFile;
import scala.collection.immutable.Map;
import scala.jdk.CollectionConverters;
import java.io.IOException;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import java.util.logging.Level;
public class ImportExportCache extends Cache<ImportExportCache.CachedBindings, ImportExportCache.Metadata> {
private final LibraryName libraryName;
public ImportExportCache(LibraryName libraryName) {
this.libraryName = libraryName;
this.logLevel = Level.FINEST;
this.stringRepr = libraryName.toString();
this.entryName = libraryName.name();
this.dataSuffix = bindingsCacheDataExtension;
this.metadataSuffix = bindingsCacheMetadataExtension;
}
@Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedBindings entry) {
var mapper = new ObjectMapper();
try {
return mapper.writeValueAsString(new Metadata(sourceDigest, blobDigest)).getBytes(metadataCharset);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
@Override
protected CachedBindings validateReadObject(Object obj, Metadata meta, TruffleLogger logger) throws CacheException {
if (obj instanceof MapToBindings bindings) {
return new CachedBindings(libraryName, bindings, Optional.empty());
} else {
throw new CacheException("Expected ImportExportCache.FileToBindings, got " + obj.getClass());
}
}
@Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes) {
var maybeJsonString = new String(bytes, Cache.metadataCharset);
var mapper = new ObjectMapper();
try {
return Optional.of(mapper.readValue(maybeJsonString, ImportExportCache.Metadata.class));
} catch (JsonProcessingException e) {
return Optional.empty();
}
}
@Override
protected Optional<String> computeDigest(CachedBindings entry, TruffleLogger logger) {
return entry.sources().map(sources -> computeDigestOfLibrarySources(sources, logger));
}
@Override
@SuppressWarnings("unchecked")
protected Optional<String> computeDigestFromSource(EnsoContext context, TruffleLogger logger) {
return context
.getPackageRepository()
.getPackageForLibraryJava(libraryName)
.map(pkg -> computeDigestOfLibrarySources(pkg.listSourcesJava(), logger));
}
private String computeDigestOfLibrarySources(List<SourceFile<TruffleFile>> pkgSources, TruffleLogger logger) {
pkgSources.sort(Comparator.comparing(o -> o.qualifiedName().toString()));
var digest = messageDigest();
pkgSources.forEach(source ->
{
try {
digest.update(source.file().readAllBytes());
} catch (IOException e) {
logger.log(logLevel, "failed to compute digest for " + source.qualifiedName().toString(), e);
}
}
);
return Hex.toHexString(digest.digest());
}
@Override
@SuppressWarnings("unchecked")
protected Optional<Cache.Roots> getCacheRoots(EnsoContext context) {
return context.getPackageRepository().getPackageForLibraryJava(libraryName).map(pkg -> {
var bindingsCacheRoot =
pkg.getBindingsCacheRootForPackage(Info.ensoVersion());
var localCacheRoot = bindingsCacheRoot.resolve(libraryName.namespace());
var distribution = context.getDistributionManager();
var pathSegments = CollectionConverters.ListHasAsScala(Arrays.asList(
pkg.namespace(),
pkg.name(),
pkg.config().version(),
Info.ensoVersion(),
libraryName.namespace()
)).asScala();
var path = distribution.LocallyInstalledDirectories().irCacheDirectory()
.resolve(pathSegments.mkString("/"));
var globalCacheRoot = context.getTruffleFile(path.toFile());
return new Cache.Roots(localCacheRoot, globalCacheRoot);
});
}
@Override
protected Object extractObjectToSerialize(CachedBindings entry) {
return entry.bindings();
}
static class MapToBindings implements Serializable {
private final Map<QualifiedName, BindingsMap> _entries;
public MapToBindings(Map<QualifiedName, BindingsMap> entries) {
this._entries = entries;
}
Map<QualifiedName, BindingsMap> entries() {
return _entries;
}
}
// CachedBindings is not a record **on purpose**. There appears to be a Frgaal bug leading to invalid compilation error.
static class CachedBindings {
private final LibraryName _libraryName;
private final MapToBindings _bindings;
private final Optional<List<SourceFile<TruffleFile>>> _sources;
public CachedBindings(LibraryName libraryName, MapToBindings bindings, Optional<List<SourceFile<TruffleFile>>> sources) {
this._libraryName = libraryName;
this._bindings = bindings;
this._sources = sources;
}
LibraryName libraryName() {
return _libraryName;
}
MapToBindings bindings() {
return _bindings;
}
Optional<List<SourceFile<TruffleFile>>> sources() {
return _sources;
}
}
public record Metadata(
@JsonProperty("source_hash") String sourceHash,
@JsonProperty("blob_hash") String blobHash) implements Cache.Metadata {}
private static final String bindingsCacheDataExtension = ".bindings";
private static final String bindingsCacheMetadataExtension =".bindings.meta";
}

View File

@ -0,0 +1,176 @@
package org.enso.compiler;
import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleLogger;
import com.oracle.truffle.api.source.Source;
import org.enso.compiler.core.IR;
import org.enso.interpreter.runtime.EnsoContext;
import org.enso.interpreter.runtime.Module;
import scala.jdk.CollectionConverters;
import org.enso.interpreter.runtime.builtin.Builtins;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Optional;
import java.util.logging.Level;
public class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCache.Metadata> {
private final Module module;
public ModuleCache(Module module) {
this.module = module;
this.logLevel = Level.FINEST;
this.stringRepr = module.getName().toString();
this.entryName = module.getName().item();
this.dataSuffix = irCacheDataExtension;
this.metadataSuffix = irCacheMetadataExtension;
}
@Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedModule entry) {
var mapper = new ObjectMapper();
try {
return mapper.writeValueAsBytes(new Metadata(sourceDigest, blobDigest, entry.compilationStage().toString()));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
@Override
protected CachedModule validateReadObject(Object obj, Metadata meta, TruffleLogger logger) throws CacheException {
if (obj instanceof IR.Module ir) {
try {
return new CachedModule(ir, Module.CompilationStage.valueOf(meta.compilationStage()), module.getSource());
} catch (IOException ioe) {
throw new CacheException(ioe.getMessage());
}
} else {
throw new CacheException("Expected IR.Module, got " + obj.getClass());
}
}
@Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes) {
var maybeJsonString = new String(bytes, Cache.metadataCharset);
var mapper = new ObjectMapper();
try {
return Optional.of(mapper.readValue(maybeJsonString, Metadata.class));
} catch (JsonProcessingException e) {
return Optional.empty();
}
}
private Optional<String> computeDigestOfModuleSources(Source source) {
if (source != null) {
byte[] sourceBytes;
if (source.hasBytes()) {
sourceBytes = source.getBytes().toByteArray();
} else {
sourceBytes = source.getCharacters().toString().getBytes(metadataCharset);
}
return Optional.of(computeDigestFromBytes(sourceBytes));
} else {
return Optional.empty();
}
}
@Override
protected Optional<String> computeDigest(CachedModule entry, TruffleLogger logger) {
return computeDigestOfModuleSources(entry.source());
}
@Override
protected Optional<String> computeDigestFromSource(EnsoContext context, TruffleLogger logger) {
try {
return computeDigestOfModuleSources(module.getSource());
} catch (IOException e) {
logger.log(logLevel, "failed to retrieve the source of " + module.getName(), e);
return Optional.empty();
}
}
@Override
protected Optional<Cache.Roots> getCacheRoots(EnsoContext context) {
if (module != context.getBuiltins().getModule()) {
return context.getPackageOf(module.getSourceFile()).map(pkg -> {
var irCacheRoot = pkg.getIrCacheRootForPackage(Info.ensoVersion());
var qualName = module.getName();
var localCacheRoot = irCacheRoot.resolve(qualName.path().mkString("/"));
var distribution = context.getDistributionManager();
var pathSegmentsJava = new ArrayList<String>();
pathSegmentsJava.addAll(Arrays.asList(
pkg.namespace(),
pkg.name(),
pkg.config().version(),
Info.ensoVersion()
));
pathSegmentsJava.addAll(qualName.pathAsJava());
var pathSegments = CollectionConverters.ListHasAsScala(pathSegmentsJava).asScala();
var path = distribution.LocallyInstalledDirectories().irCacheDirectory()
.resolve(pathSegments.mkString("/"));
var globalCacheRoot = context.getTruffleFile(path.toFile());
return new Cache.Roots(localCacheRoot, globalCacheRoot);
});
} else {
var distribution = context.getDistributionManager();
var pathSegmentsJava = new ArrayList<String>();
pathSegmentsJava.addAll(Arrays.asList(
Builtins.NAMESPACE,
Builtins.PACKAGE_NAME,
Info.ensoVersion(),
Info.ensoVersion()
));
pathSegmentsJava.addAll(module.getName().pathAsJava());
var pathSegments = CollectionConverters.ListHasAsScala(pathSegmentsJava).asScala();
var path = distribution.LocallyInstalledDirectories().irCacheDirectory()
.resolve(pathSegments.mkString("/"));
var globalCacheRoot = context.getTruffleFile(path.toFile());
return Optional.of(new Cache.Roots(globalCacheRoot, globalCacheRoot));
}
}
@Override
protected Object extractObjectToSerialize(CachedModule entry) {
return entry.moduleIR();
}
// CachedModule is not a record **on purpose**. There appears to be a Frgaal bug leading to invalid compilation error.
static class CachedModule {
private final IR.Module _moduleIR;
private final Module.CompilationStage _compilationStage;
private final Source _source;
public CachedModule(IR.Module moduleIR, Module.CompilationStage compilationStage, Source source) {
this._moduleIR = moduleIR;
this._compilationStage = compilationStage;
this._source = source;
}
IR.Module moduleIR() {
return _moduleIR;
}
Module.CompilationStage compilationStage() {
return _compilationStage;
}
Source source() {
return _source;
}
}
public record Metadata(
@JsonProperty("source_hash") String sourceHash,
@JsonProperty("blob_hash") String blobHash,
@JsonProperty("compilation_stage") String compilationStage) implements Cache.Metadata {}
private final static String irCacheDataExtension = ".ir";
private final static String irCacheMetadataExtension = ".meta";
}

View File

@ -757,11 +757,12 @@ public abstract class EqualsNode extends Node {
return equalsNode.execute(selfFuncStrRepr, otherFuncStrRepr);
}
@Specialization(guards = "fallbackGuard(left, right, interop)")
@Specialization(guards = "fallbackGuard(left, right, interop, warningsLib)")
@TruffleBoundary
boolean equalsGeneric(Object left, Object right,
@CachedLibrary(limit = "10") InteropLibrary interop,
@CachedLibrary(limit = "10") TypesLibrary typesLib) {
@CachedLibrary(limit = "10") TypesLibrary typesLib,
@CachedLibrary(limit = "10") WarningsLibrary warningsLib) {
return left == right
|| interop.isIdentical(left, right, interop)
|| left.equals(right)
@ -771,7 +772,7 @@ public abstract class EqualsNode extends Node {
// We have to manually specify negation of guards of other specializations, because
// we cannot use @Fallback here. Note that this guard is not precisely the negation of
// all the other guards on purpose.
boolean fallbackGuard(Object left, Object right, InteropLibrary interop) {
boolean fallbackGuard(Object left, Object right, InteropLibrary interop, WarningsLibrary warnings) {
if (isPrimitive(left) && isPrimitive(right)) {
return false;
}
@ -817,6 +818,9 @@ public abstract class EqualsNode extends Node {
if (interop.isDuration(left) && interop.isDuration(right)) {
return false;
}
if (warnings.hasWarnings(left) || warnings.hasWarnings(right)) {
return false;
}
// For all other cases, fall through to the generic specialization
return true;
}

View File

@ -436,7 +436,7 @@ public class EnsoContext {
}
/**
* @param name human readable name of the pool
* @param name human-readable name of the pool
* @param systemThreads use system threads or polyglot threads
* @return new execution service for this context
*/
@ -446,7 +446,7 @@ public class EnsoContext {
/**
* @param parallel amount of parallelism for the pool
* @param name human readable name of the pool
* @param name human-readable name of the pool
* @param systemThreads use system threads or polyglot threads
* @return new execution service for this context
*/

View File

@ -6,7 +6,8 @@ import org.enso.compiler.codegen.{AstToIr, IrToTruffle, RuntimeStubsGenerator}
import org.enso.compiler.context.{FreshNameSupply, InlineContext, ModuleContext}
import org.enso.compiler.core.IR
import org.enso.compiler.core.IR.Expression
import org.enso.compiler.data.CompilerConfig
import org.enso.compiler.data.{BindingsMap, CompilerConfig}
import org.enso.compiler.exception.{CompilationAbortedException, CompilerError}
import org.enso.compiler.pass.PassManager
import org.enso.compiler.pass.analyse._
@ -27,6 +28,13 @@ import org.enso.syntax.text.{AST, Parser}
import org.enso.syntax2.Tree
import java.io.{PrintStream, StringReader}
import java.util.concurrent.{
CompletableFuture,
ExecutorService,
LinkedBlockingDeque,
ThreadPoolExecutor,
TimeUnit
}
import java.util.logging.Level
import scala.jdk.OptionConverters._
@ -61,6 +69,20 @@ class Compiler(
else context.getOut
private lazy val ensoCompiler: EnsoCompiler = new EnsoCompiler()
/** The thread pool that handles parsing of modules. */
private val pool: ExecutorService = if (config.parallelParsing) {
new ThreadPoolExecutor(
Compiler.startingThreadCount,
Compiler.maximumThreadCount,
Compiler.threadKeepalive,
TimeUnit.SECONDS,
new LinkedBlockingDeque[Runnable](),
(runnable: Runnable) => {
context.getEnvironment.createThread(runnable)
}
)
} else null
/** Duplicates this compiler with a different config.
* @param newConfig Configuration to be used in the duplicated Compiler.
*/
@ -107,7 +129,7 @@ class Compiler(
}
if (irCachingEnabled && !builtins.getModule.wasLoadedFromCache()) {
serializationManager.serialize(
serializationManager.serializeModule(
builtins.getModule,
useGlobalCacheLocations = true // Builtins can't have a local cache.
)
@ -115,20 +137,6 @@ class Compiler(
}
}
/** Runs the import resolver on the given module.
*
* @param module the entry module for import resolution
* @return the list of modules imported by `module`
*/
def runImportsResolution(module: Module): List[Module] = {
initialize()
try {
importResolver.mapImports(module)
} catch {
case e: ImportResolver.HiddenNamesConflict => reportExportConflicts(e)
}
}
/** Processes the provided language sources, registering any bindings in the
* given scope.
*
@ -187,6 +195,10 @@ class Compiler(
generateCode = false,
shouldCompileDependencies
)
serializationManager.serializeLibraryBindings(
pkg.libraryName,
useGlobalCacheLocations = true
)
}
}
}
@ -241,10 +253,11 @@ class Compiler(
modules.foreach(m => parseModule(m))
var requiredModules = modules.flatMap { module =>
val modules = runImportsAndExportsResolution(module)
val modules = runImportsAndExportsResolution(module, generateCode)
if (
module
.wasLoadedFromCache() && modules.exists(!_.wasLoadedFromCache())
.wasLoadedFromCache() && modules
.exists(m => !m.wasLoadedFromCache() && !m.isSynthetic)
) {
logger.log(
Compiler.defaultLogLevel,
@ -252,7 +265,7 @@ class Compiler(
)
module.getCache.invalidate(context)
parseModule(module)
runImportsAndExportsResolution(module)
runImportsAndExportsResolution(module, generateCode)
} else {
modules
}
@ -290,7 +303,8 @@ class Compiler(
s"export resolution."
)
requiredModules = modules.flatMap(runImportsAndExportsResolution)
requiredModules =
modules.flatMap(runImportsAndExportsResolution(_, generateCode))
}
requiredModules.foreach { module =>
@ -368,7 +382,10 @@ class Compiler(
val shouldStoreCache =
irCachingEnabled && !module.wasLoadedFromCache()
if (shouldStoreCache && !hasErrors(module) && !module.isInteractive) {
serializationManager.serialize(module, useGlobalCacheLocations)
serializationManager.serializeModule(
module,
useGlobalCacheLocations
)
}
} else {
logger.log(
@ -389,10 +406,13 @@ class Compiler(
} else false
}
private def runImportsAndExportsResolution(module: Module): List[Module] = {
val importedModules =
private def runImportsAndExportsResolution(
module: Module,
bindingsCachingEnabled: Boolean
): List[Module] = {
val (importedModules, modulesImportedWithCachedBindings) =
try {
importResolver.mapImports(module)
importResolver.mapImports(module, bindingsCachingEnabled)
} catch {
case e: ImportResolver.HiddenNamesConflict => reportExportConflicts(e)
}
@ -401,7 +421,30 @@ class Compiler(
try { new ExportsResolution().run(importedModules) }
catch { case e: ExportCycleException => reportCycle(e) }
requiredModules
val parsingTasks: List[CompletableFuture[Unit]] =
modulesImportedWithCachedBindings.map { module =>
if (config.parallelParsing) {
CompletableFuture.supplyAsync(() => ensureParsed(module), pool)
} else {
CompletableFuture.completedFuture(ensureParsed(module))
}
}
joinAllFutures(parsingTasks).get()
// ** Order matters for codegen **
// Consider a case when an exported symbol is referenced but the module that defines the symbol
// has not yet registered the method in its scope. This will result in No_Such_Method method during runtime;
// the symbol brought to the scope has not been properly resolved yet.
val sortedCachedModules =
new ExportsResolution().runSort(modulesImportedWithCachedBindings)
sortedCachedModules ++ requiredModules
}
private def joinAllFutures[T](
futures: List[CompletableFuture[T]]
): CompletableFuture[List[T]] = {
CompletableFuture.allOf(futures: _*).thenApply(_ => futures.map(_.join()))
}
/** Runs the initial passes of the compiler to gather the import statements,
@ -451,6 +494,20 @@ class Compiler(
uncachedParseModule(module, isGenDocs)
}
/** Retrieve module bindings from cache, if available.
*
* @param module module which is conssidered
* @return module's bindings, if available in libraries' bindings cache
*/
def importExportBindings(module: Module): Option[BindingsMap] = {
if (irCachingEnabled && !module.isInteractive) {
val libraryName = Option(module.getPackage).map(_.libraryName)
libraryName
.flatMap(packageRepository.getLibraryBindings(_, serializationManager))
.flatMap(_.bindings.entries.get(module.getName))
} else None
}
private def uncachedParseModule(module: Module, isGenDocs: Boolean): Unit = {
logger.log(
Compiler.defaultLogLevel,
@ -1009,6 +1066,28 @@ class Compiler(
*/
def shutdown(waitForPendingJobCompletion: Boolean): Unit = {
serializationManager.shutdown(waitForPendingJobCompletion)
shutdownParsingPool(waitForPendingJobCompletion)
}
private def shutdownParsingPool(waitForPendingCompilation: Boolean): Unit = {
if (pool != null) {
if (waitForPendingCompilation) {
pool.shutdown()
// Bound the waiting loop
val maxCount = 10
var counter = 0
while (!pool.isTerminated && counter < maxCount) {
counter += 1
pool.awaitTermination((50 * counter).toLong, TimeUnit.MILLISECONDS)
}
pool.shutdownNow()
Thread.sleep(100)
} else {
pool.shutdownNow()
}
}
}
/** Updates the metadata in a copy of the IR when updating that metadata
@ -1034,4 +1113,13 @@ object Compiler {
/** The default logging level for the compiler. */
private val defaultLogLevel: Level = Level.FINE
/** The maximum number of parsing threads allowed. */
val maximumThreadCount: Integer = 10
/** The number of threads at compiler start. */
val startingThreadCount: Integer = 2
/** The thread keep-alive time in seconds. */
val threadKeepalive: Long = 2
}

View File

@ -1,540 +0,0 @@
package org.enso.compiler
import java.io._
import java.nio.charset.{Charset, StandardCharsets}
import java.nio.file._
import java.util.logging.Level
import buildinfo.Info
import com.oracle.truffle.api.source.Source
import com.oracle.truffle.api.{TruffleFile, TruffleLogger}
import io.circe.generic.JsonCodec
import io.circe.parser._
import io.circe.syntax._
import org.bouncycastle.jcajce.provider.digest.SHA3
import org.bouncycastle.util.encoders.Hex
import org.enso.compiler.ModuleCache.{logLevel, ToMaskedPath}
import org.enso.compiler.core.IR
import org.enso.interpreter.runtime.builtin.Builtins
import org.enso.interpreter.runtime.{EnsoContext, Module}
import org.enso.logger.masking.MaskedPath
import scala.jdk.OptionConverters._
import scala.util.{Failure, Success, Using}
// TODO Once #1971 is fixed, the logging statements should go back to using our
// normal templating syntax.
/** Responsible for the management of caches associated with a given module.
*
* @param module the module for which the caches are managed.
*/
class ModuleCache(private val module: Module) {
// === Interface ============================================================
/** Saves the provided IR into the cache for this module.
*
* @param module the module representation to be saved
* @param context the language context in which saving is taking place
* @param useGlobalCacheLocations if true, will use global caches' location, local one otherwise
* @return returns the location of the cache if successful, and [[None]] if
* it was unable to save
*/
def save(
module: ModuleCache.CachedModule,
context: EnsoContext,
useGlobalCacheLocations: Boolean
): Option[TruffleFile] = this.synchronized {
implicit val logger: TruffleLogger = context.getLogger(this.getClass)
getIrCacheRoots(context) match {
case Some(roots) =>
if (useGlobalCacheLocations) {
if (saveCacheTo(roots.globalCacheRoot, module)) {
return Some(roots.globalCacheRoot)
}
} else {
logger.log(
logLevel,
s"Skipping use of global cache locations for module " +
s"${this.module.getName}."
)
}
if (saveCacheTo(roots.localCacheRoot, module)) {
return Some(roots.localCacheRoot)
}
logger.log(
ModuleCache.logLevel,
s"Unable to write cache data for module [${this.module.getName.toString}]."
)
None
case None => None
}
}
/** Loads the cache for this module where possible.
*
* @param context the langage context in which loading is taking place
* @return the cached module data if possible, and [[None]] if it could not
* load a valid cache
*/
//noinspection DuplicatedCode
def load(context: EnsoContext): Option[ModuleCache.CachedModule] =
this.synchronized {
implicit val logger: TruffleLogger = context.getLogger(this.getClass)
getIrCacheRoots(context) match {
case Some(roots) =>
// Load from the global root as a priority.
loadCacheFrom(roots.globalCacheRoot) match {
case cache @ Some(_) =>
logger.log(
ModuleCache.logLevel,
s"Using cache for module " +
s"[${module.getName.toString}] at location " +
s"[${roots.globalCacheRoot.toMaskedPath.applyMasking()}]."
)
return cache
case None =>
}
// Load from the local root if the global root fails.
loadCacheFrom(roots.localCacheRoot) match {
case cache @ Some(_) =>
logger.log(
ModuleCache.logLevel,
s"Using cache for module " +
s"[${module.getName.toString}] at location " +
s"[${roots.localCacheRoot.toMaskedPath.applyMasking()}]."
)
return cache
case None =>
}
logger.log(
ModuleCache.logLevel,
s"Unable to load a cache for module [${module.getName.toString}]"
)
None
case None => None
}
}
/** Invalidates all caches associated with the module.
*
* @param context the langage context in which loading is taking place
*/
def invalidate(context: EnsoContext): Unit = {
this.synchronized {
implicit val logger: TruffleLogger = context.getLogger(this.getClass)
getIrCacheRoots(context).foreach { roots =>
invalidateCache(roots.globalCacheRoot)
invalidateCache(roots.localCacheRoot)
}
}
}
// === Internals ============================================================
/** Saves the cache into the provided `cacheRoot`.
*
* @param cacheRoot the root into which the cache should be saved
* @param module the data that should be cached for this module
* @param logger a logger
* @return `true` if the cache was written successfully, `false` otherwise
*/
private def saveCacheTo(
cacheRoot: TruffleFile,
module: ModuleCache.CachedModule
)(implicit logger: TruffleLogger): Boolean = {
if (ensureRoot(cacheRoot)) {
val byteStream: ByteArrayOutputStream = new ByteArrayOutputStream()
val bytesToWrite =
Using.resource(new ObjectOutputStream(byteStream)) { stream =>
stream.writeObject(module.module)
byteStream.toByteArray
}
val blobDigest = computeDigest(bytesToWrite)
val sourceDigest = computeSourceDigest(module.source)
val compilationStage = module.compilationStage.toString
val metadata =
ModuleCache.Metadata(sourceDigest, blobDigest, compilationStage)
val metadataBytes =
metadata.asJson.toString().getBytes(ModuleCache.metadataCharset)
val cacheDataFile = getCacheDataPath(cacheRoot)
val metadataFile = getCacheMetadataPath(cacheRoot)
val parentPath = cacheDataFile.getParent
if (writeBytesTo(cacheDataFile, bytesToWrite)) {
if (writeBytesTo(metadataFile, metadataBytes)) {
logger.log(
ModuleCache.logLevel,
s"Written cache data for module " +
s"[${this.module.getName.toString}] " +
s"to [${parentPath.toMaskedPath.applyMasking()}]."
)
return true
} else {
// Clean up after ourselves if it fails.
cacheDataFile.delete()
}
}
return true
}
false
}
/** Write the provided `bytes` to the provided `file`.
*
* @param file the file to write the bytes to
* @param bytes the bytes to write into `file`
* @return `true` if writing completed successfully, `false` otherwise
*/
private def writeBytesTo(
file: TruffleFile,
bytes: Array[Byte]
): Boolean = {
try {
Using.resource(
file.newOutputStream(
StandardOpenOption.WRITE,
StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING
)
) { stream =>
stream.write(bytes)
}
true
} catch {
case _: IOException | _: SecurityException => false
}
}
/** Loads the cache from the provided `cacheRoot`, invalidating the cache if
* the loading fails for any reason.
*
* @param cacheRoot the root at which to find the cache for this module
* @param logger a logger
* @return the cached module representation if available, otherwise [[None]].
*/
private def loadCacheFrom(
cacheRoot: TruffleFile
)(implicit logger: TruffleLogger): Option[ModuleCache.CachedModule] = {
val metadataPath = getCacheMetadataPath(cacheRoot)
val dataPath = getCacheDataPath(cacheRoot)
loadCacheMetadata(metadataPath) match {
case Some(meta) =>
val sourceDigestValid =
computeSourceDigest(module.getSource) == meta.sourceHash
val blobBytes = dataPath.readAllBytes()
val blobDigestValid = computeDigest(blobBytes) == meta.blobHash
val compilationStage =
Module.CompilationStage.valueOf(meta.compilationStage)
if (sourceDigestValid && blobDigestValid) {
val readObject =
Using(new ObjectInputStream(new ByteArrayInputStream(blobBytes))) {
_.readObject()
}
readObject match {
case Success(mod: IR.Module) =>
Some(
ModuleCache.CachedModule(
mod,
compilationStage,
module.getSource
)
)
case Success(_) =>
logger.log(
ModuleCache.logLevel,
s"Module `${module.getName.toString}` was corrupt on disk."
)
None
case Failure(ex) =>
logger.log(
ModuleCache.logLevel,
s"Module `${module.getName.toString}` failed to load " +
s"(caused by: ${ex.getMessage})."
)
None
}
} else {
logger.log(
ModuleCache.logLevel,
s"One or more digests did not match for the cache for " +
s"module [${module.getName.toString}]."
)
invalidateCache(cacheRoot)
None
}
case None =>
logger.log(
ModuleCache.logLevel,
s"Could not load the cache metadata " +
s"at [${metadataPath.toMaskedPath.applyMasking()}]"
)
invalidateCache(cacheRoot)
None
}
}
/** Loads the cache metadata into an object from the provided path.
*
* It provides _no_ verification of the provided path, and will
* unconditionally try and load it.
*
* @param path the path to the metadata file
* @return an object representation of the metadata at `path`, where possible
*/
private def loadCacheMetadata(
path: TruffleFile
): Option[ModuleCache.Metadata] = {
if (path.isReadable) {
val bytes = path.readAllBytes()
val maybeJsonString = new String(bytes, ModuleCache.metadataCharset)
val decoded =
decode[ModuleCache.Metadata](maybeJsonString).toOption
decoded
} else {
None
}
}
/** Deletes the cache for this module in the provided `cacheRoot`.
*
* @param cacheRoot the root of the cache to delete
* @param logger a logger
*/
private def invalidateCache(
cacheRoot: TruffleFile
)(implicit logger: TruffleLogger): Unit = {
val metadataFile = getCacheMetadataPath(cacheRoot)
val dataFile = getCacheDataPath(cacheRoot)
def doDeleteAt(file: TruffleFile): Unit = {
try {
if (file.exists()) {
if (file.isWritable) {
file.delete()
logger.log(
ModuleCache.logLevel,
s"Invalidated the cache at [${file.toMaskedPath.applyMasking()}]."
)
} else {
logger.log(
ModuleCache.logLevel,
s"Cannot invalidate the cache at " +
s"[${file.toMaskedPath.applyMasking()}]. " +
s"Cache location not writable."
)
}
}
} catch {
case _: NoSuchFileException =>
// If it doesn't exist, our work has already been done for us!
case _: DirectoryNotEmptyException | _: IOException |
_: SecurityException =>
logger.log(
ModuleCache.logLevel,
s"Unable to delete the cache at " +
s"[${cacheRoot.toMaskedPath.applyMasking()}]."
)
}
}
doDeleteAt(metadataFile)
doDeleteAt(dataFile)
}
/** Gets the potential cache roots for this module.
*
* @param context the language context in which the cache is being run
* @return a representation of the local and global cache roots for this
* module
*/
private def getIrCacheRoots(
context: EnsoContext
): Option[ModuleCache.Roots] = {
if (module != context.getBuiltins.getModule) {
context.getPackageOf(module.getSourceFile).toScala.map { pkg =>
val irCacheRoot = pkg.getIrCacheRootForPackage(Info.ensoVersion)
val qualName = module.getName
val localCacheRoot = irCacheRoot.resolve(qualName.path.mkString("/"))
val distribution = context.getDistributionManager
val pathSegments = List(
pkg.namespace,
pkg.name,
pkg.config.version,
Info.ensoVersion
) ++ qualName.path
val path = distribution.LocallyInstalledDirectories.irCacheDirectory
.resolve(pathSegments.mkString("/"))
val globalCacheRoot = context.getTruffleFile(path.toFile)
ModuleCache.Roots(localCacheRoot, globalCacheRoot)
}
} else {
val distribution = context.getDistributionManager
val pathSegments = List(
Builtins.NAMESPACE,
Builtins.PACKAGE_NAME,
Info.ensoVersion,
Info.ensoVersion
) ++ module.getName.path
val path = distribution.LocallyInstalledDirectories.irCacheDirectory
.resolve(pathSegments.mkString("/"))
val globalCacheRoot = context.getTruffleFile(path.toFile)
Some(ModuleCache.Roots(globalCacheRoot, globalCacheRoot))
}
}
// === Utilities ============================================================
/** Ensures that the provided cache root exists and is writable.
*
* @param cacheRoot the cache root
* @return `true` if the root exists and is writable, `false` otherwise
*/
private def ensureRoot(cacheRoot: TruffleFile): Boolean = {
try {
if (cacheRoot.exists() && cacheRoot.isDirectory()) {
return cacheRoot.isWritable
} else {
cacheRoot.createDirectories()
return cacheRoot.isWritable
}
} catch {
case _: IOException | _: UnsupportedOperationException |
_: SecurityException | _: FileAlreadyExistsException =>
}
false
}
/** Computes the SHA3-224 digest of the source file that underlies this
* module.
*
* @return the hex string representing the digest
*/
private def computeSourceDigest(source: Source): String = {
val sourceBytes = if (source.hasBytes) {
source.getBytes.toByteArray
} else {
source.getCharacters.toString.getBytes(StandardCharsets.UTF_8)
}
computeDigest(sourceBytes)
}
/** Computes the SHA3-224 digest of the provided byte array.
*
* @param bytes the bytes to compute the digest of
* @return the hex string representing the digest of `bytes`
*/
private def computeDigest(bytes: Array[Byte]): String = {
Hex.toHexString(new SHA3.Digest224().digest(bytes))
}
/** @return the name of the data file for the cache
*/
private def irCacheDataName: String = {
cacheFileName(ModuleCache.irCacheDataExtension)
}
/** @return the name of the metadata file for the cache
*/
private def irCacheMetadataName: String = {
cacheFileName(ModuleCache.irCacheMetadataExtension)
}
/** Computes the cache file name for a given extension.
*
* @param ext the extension
* @return the cache file name with the provided `ext`
*/
private def cacheFileName(ext: String): String = {
s"${module.getName.item}$ext"
}
/** Gets the path to the cache metadata within the `cacheRoot`.
*
* @param cacheRoot the root of the cache for this module
* @return the name of the metadata for this module's cache
*/
private def getCacheMetadataPath(cacheRoot: TruffleFile): TruffleFile =
cacheRoot.resolve(irCacheMetadataName)
/** Gets the path to the cache data within the `cacheRoot`.
*
* @param cacheRoot the root of the cache for this module
* @return the name of the data file for this module's cache
*/
private def getCacheDataPath(cacheRoot: TruffleFile): TruffleFile = {
cacheRoot.resolve(irCacheDataName)
}
}
object ModuleCache {
val irCacheDataExtension: String = ".ir"
val irCacheMetadataExtension: String = ".meta"
val metadataCharset: Charset = StandardCharsets.UTF_8
/** The default logging level. */
private val logLevel = Level.FINEST
/** A representation of the cache roots for the module. */
case class Roots(localCacheRoot: TruffleFile, globalCacheRoot: TruffleFile)
/** A representation of a module for the cache.
*
* @param module the module
* @param compilationStage the compilation stage of the module at the point
* at which it was provided for serialisation
* @param source the source of the module
*/
case class CachedModule(
module: IR.Module,
compilationStage: Module.CompilationStage,
source: Source
)
/** Internal storage for the cache metadata to enable easy serialisation and
* deserialisation.
*
* @param sourceHash the hex-encoded SHA3-224 hash of the source file for
* which the cache was created
* @param blobHash the hex-encoded SHA3-224 hash of the IR blob as it was
* written to disk
* @param compilationStage the compilation stage at which the IR blob was
* serialised
*/
@JsonCodec
case class Metadata(
sourceHash: String,
blobHash: String,
compilationStage: String
)
/** Convert a [[TruffleFile]] to a [[MaskedPath]] for the purposes of logging.
*
* @param truffleFile the file to convert.
*/
implicit class ToMaskedPath(truffleFile: TruffleFile) {
def toMaskedPath: MaskedPath = {
MaskedPath(Path.of(truffleFile.getPath))
}
}
}

View File

@ -10,6 +10,7 @@ import org.enso.interpreter.instrument.NotificationHandler
import org.enso.interpreter.runtime.builtin.Builtins
import org.enso.interpreter.runtime.util.TruffleFileSystem
import org.enso.interpreter.runtime.{EnsoContext, Module}
import org.enso.librarymanager.published.repository.LibraryManifest
import org.enso.librarymanager.resolved.LibraryRoot
import org.enso.librarymanager.{
DefaultLibraryProvider,
@ -30,7 +31,8 @@ import org.enso.text.buffer.Rope
import java.nio.file.Path
import scala.collection.immutable.ListSet
import scala.util.Try
import scala.jdk.OptionConverters.RichOption
import scala.util.{Failure, Try, Using}
/** Manages loaded packages and modules. */
trait PackageRepository {
@ -113,6 +115,25 @@ trait PackageRepository {
/** Checks if any library with a given namespace has been registered */
def isNamespaceRegistered(namespace: String): Boolean
/** Returns a package directory corresponding to the requested library */
def getPackageForLibrary(lib: LibraryName): Option[Package[TruffleFile]]
/** Returns a package directory corresponding to the requested library */
def getPackageForLibraryJava(
libraryName: LibraryName
): java.util.Optional[Package[TruffleFile]] =
getPackageForLibrary(libraryName).toJava
/** Returns all loaded modules of the requested library */
def getModulesForLibrary(libraryName: LibraryName): List[Module]
/** Returns a deserialized bindings map for the whole library, if available */
def getLibraryBindings(
libraryName: LibraryName,
serializationManager: SerializationManager
): Option[ImportExportCache.CachedBindings]
}
object PackageRepository {
@ -213,6 +234,13 @@ object PackageRepository {
collection.mutable.LinkedHashMap(builtinsName -> ComponentGroups.empty)
}
/** The mapping between the library and its cached bindings, if already laoded. */
private val loadedLibraryBindings: collection.mutable.Map[
LibraryName,
ImportExportCache.CachedBindings
] =
collection.mutable.LinkedHashMap()
private def getComponentModules: ListSet[Module] = {
val modules = for {
componentGroups <- loadedComponents.values
@ -293,7 +321,8 @@ object PackageRepository {
val extensions = pkg.listPolyglotExtensions("java")
extensions.foreach(context.getEnvironment.addToHostClassPath)
val (regularModules, syntheticModulesMetadata) = pkg.listSources
val (regularModules, syntheticModulesMetadata) = pkg
.listSources()
.map(srcFile =>
(
new Module(srcFile.qualifiedName, pkg, srcFile.file),
@ -641,6 +670,51 @@ object PackageRepository {
override def isNamespaceRegistered(namespace: String): Boolean =
loadedPackages.keySet.exists(_.namespace == namespace)
override def getPackageForLibrary(
libraryName: LibraryName
): Option[Package[TruffleFile]] =
loadedPackages.get(libraryName).flatten
override def getModulesForLibrary(libraryName: LibraryName): List[Module] =
getPackageForLibrary(libraryName)
.map(pkg => loadedModules.values.filter(_.getPackage == pkg).toList)
.getOrElse(Nil)
override def getLibraryBindings(
libraryName: LibraryName,
serializationManager: SerializationManager
): Option[ImportExportCache.CachedBindings] = {
ensurePackageIsLoaded(libraryName).toOption.flatMap { _ =>
if (!loadedLibraryBindings.contains(libraryName)) {
loadedPackages.get(libraryName).flatten.foreach(loadDependencies(_))
serializationManager
.deserializeLibraryBindings(libraryName)
.foreach(cache =>
loadedLibraryBindings.addOne((libraryName, cache))
)
}
loadedLibraryBindings.get(libraryName)
}
}
private def loadDependencies(pkg: Package[TruffleFile]): Unit = {
val manifestFile = fs.getChild(pkg.root, LibraryManifest.filename)
readManifest(manifestFile)
.flatMap(LibraryManifest.fromYaml(_))
.foreach(
_.dependencies.foreach(ensurePackageIsLoaded)
)
}
private def readManifest(file: TruffleFile): Try[String] = {
import scala.jdk.CollectionConverters._
if (file.exists())
Using(file.newBufferedReader) { reader =>
reader.lines().iterator().asScala.mkString("\n")
}
else Failure(PackageManager.PackageNotFound())
}
}
/** Creates a [[PackageRepository]] for the run.

View File

@ -3,6 +3,8 @@ package org.enso.compiler
import com.oracle.truffle.api.TruffleLogger
import com.oracle.truffle.api.source.Source
import org.enso.compiler.core.IR
import org.enso.compiler.pass.analyse.BindingAnalysis
import org.enso.editions.LibraryName
import org.enso.interpreter.runtime.Module
import org.enso.pkg.QualifiedName
@ -18,8 +20,10 @@ import java.util.concurrent.{
}
import java.util.logging.Level
import scala.collection.mutable
import scala.jdk.OptionConverters.RichOptional
class SerializationManager(compiler: Compiler) {
import SerializationManager._
/** The debug logging level. */
private val debugLogLevel = Level.FINE
@ -86,7 +90,7 @@ class SerializationManager(compiler: Compiler) {
* @return Future referencing the serialization task. On completion Future will return
* `true` if `module` has been successfully serialized, `false` otherwise
*/
def serialize(
def serializeModule(
module: Module,
useGlobalCacheLocations: Boolean
): Future[Boolean] = {
@ -100,7 +104,7 @@ class SerializationManager(compiler: Compiler) {
)
duplicatedIr.preorder.foreach(_.passData.prepareForSerialization(compiler))
val task = doSerialize(
val task = doSerializeModule(
module.getCache,
duplicatedIr,
module.getCompilationStage,
@ -129,6 +133,119 @@ class SerializationManager(compiler: Compiler) {
}
}
def serializeLibraryBindings(
libraryName: LibraryName,
useGlobalCacheLocations: Boolean
): Future[Boolean] = {
logger.log(
Level.INFO,
s"Requesting serialization for library [$libraryName] bindings."
)
val task: Callable[Boolean] =
doSerializeLibrary(libraryName, useGlobalCacheLocations)
if (compiler.context.getEnvironment.isCreateThreadAllowed) {
isWaitingForSerialization.synchronized {
val future = pool.submit(task)
isWaitingForSerialization.put(libraryName.toQualifiedName, future)
future
}
} else {
try {
CompletableFuture.completedFuture(task.call())
} catch {
case e: Throwable =>
logger.log(
debugLogLevel,
s"Serialization task failed for library [${libraryName}].",
e
)
CompletableFuture.completedFuture(false)
}
}
}
def doSerializeLibrary(
libraryName: LibraryName,
useGlobalCacheLocations: Boolean
): Callable[Boolean] = () => {
logger.log(
debugLogLevel,
s"Running serialization for bindings [$libraryName]."
)
startSerializing(libraryName.toQualifiedName)
val bindingsCache = new ImportExportCache.CachedBindings(
libraryName,
new ImportExportCache.MapToBindings(
compiler.packageRepository
.getModulesForLibrary(libraryName)
.map { module =>
val ir = module.getIr
val bindings = ir.unsafeGetMetadata(
BindingAnalysis,
"Non-parsed module used in ImportResolver"
)
val abstractBindings = bindings.prepareForSerialization(compiler)
(module.getName, abstractBindings)
}
.toMap
),
compiler.packageRepository
.getPackageForLibraryJava(libraryName)
.map(_.listSourcesJava())
)
try {
new ImportExportCache(libraryName)
.save(bindingsCache, compiler.context, useGlobalCacheLocations)
.isPresent()
} catch {
case e: NotSerializableException =>
logger.log(
Level.SEVERE,
s"Could not serialize bindings [$libraryName].",
e
)
throw e
case e: Throwable =>
logger.log(
Level.SEVERE,
s"Serialization of bindings `$libraryName` failed: ${e.getMessage}`",
e
)
throw e
} finally {
finishSerializing(libraryName.toQualifiedName)
}
}
def deserializeLibraryBindings(
libraryName: LibraryName
): Option[ImportExportCache.CachedBindings] = {
if (isWaitingForSerialization(libraryName)) {
abort(libraryName)
None
} else {
while (isSerializingLibrary(libraryName)) {
Thread.sleep(100)
}
new ImportExportCache(libraryName).load(compiler.context).toScala match {
case result @ Some(_: ImportExportCache.CachedBindings) =>
logger.log(
Level.FINE,
s"Restored bindings for library [$libraryName]."
)
result
case _ =>
logger.log(
Level.FINEST,
s"Unable to load bindings for library [${libraryName}]."
)
None
}
}
}
/** Deserializes the requested module from the cache if possible.
*
* If the requested module is currently being serialized it will wait for
@ -146,20 +263,23 @@ class SerializationManager(compiler: Compiler) {
abort(module)
None
} else {
while (isSerializing(module)) {
while (isSerializingModule(module)) {
Thread.sleep(100)
}
module.getCache.load(compiler.context) match {
case Some(ModuleCache.CachedModule(ir, stage, _)) =>
module.getCache.load(compiler.context).toScala match {
case Some(loadedCache) =>
val relinkedIrChecks =
ir.preorder.map(_.passData.restoreFromSerialization(this.compiler))
module.unsafeSetIr(ir)
module.unsafeSetCompilationStage(stage)
loadedCache
.moduleIR()
.preorder
.map(_.passData.restoreFromSerialization(this.compiler))
module.unsafeSetIr(loadedCache.moduleIR())
module.unsafeSetCompilationStage(loadedCache.compilationStage())
module.setLoadedFromCache(true)
logger.log(
debugLogLevel,
s"Restored IR from cache for module [${module.getName}] at stage [$stage]."
s"Restored IR from cache for module [${module.getName}] at stage [${loadedCache.compilationStage()}]."
)
if (!relinkedIrChecks.contains(false)) {
@ -193,18 +313,46 @@ class SerializationManager(compiler: Compiler) {
* @return `true` if `module` is currently being serialized, `false`
* otherwise
*/
def isSerializing(module: Module): Boolean = {
def isSerializingModule(module: Module): Boolean = {
isSerializing.contains(module.getName)
}
def isSerializingLibrary(library: LibraryName): Boolean = {
isSerializing.contains(library.toQualifiedName)
}
private def isWaitingForSerialization(name: QualifiedName): Boolean = {
isWaitingForSerialization.synchronized {
isWaitingForSerialization.contains(name)
}
}
/** Checks if the provided module is waiting for serialization.
*
* @param module the module to check
* @return `true` if `module` is waiting for serialization, `false` otherwise
*/
def isWaitingForSerialization(module: Module): Boolean = {
isWaitingForSerialization(module.getName)
}
/** Checks if the provided library's bindings are waiting for serialization.
*
* @param library the library to check
* @return `true` if `library` is waiting for serialization, `false` otherwise
*/
def isWaitingForSerialization(library: LibraryName): Boolean = {
isWaitingForSerialization(library.toQualifiedName)
}
def abort(name: QualifiedName): Boolean = {
isWaitingForSerialization.synchronized {
isWaitingForSerialization.contains(module.getName)
if (isWaitingForSerialization(name)) {
isWaitingForSerialization
.remove(name)
.map(_.cancel(false))
.getOrElse(false)
} else false
}
}
@ -218,14 +366,20 @@ class SerializationManager(compiler: Compiler) {
* otherwise
*/
def abort(module: Module): Boolean = {
isWaitingForSerialization.synchronized {
if (isWaitingForSerialization(module)) {
isWaitingForSerialization
.remove(module.getName)
.map(_.cancel(false))
.getOrElse(false)
} else false
}
abort(module.getName)
}
/** Requests that serialization of library's bindings be aborted.
*
* If the library is already in the process of serialization it will not be
* aborted.
*
* @param library the library for which to abort serialization
* @return `true` if serialization for `library` was aborted, `false`
* otherwise
*/
def abort(library: LibraryName): Boolean = {
abort(library.toQualifiedName)
}
/** Performs shutdown actions for the serialization manager.
@ -291,7 +445,7 @@ class SerializationManager(compiler: Compiler) {
* @param useGlobalCacheLocations if true, will use global caches location, local one otherwise
* @return the task that serialies the provided `ir`
*/
private def doSerialize(
private def doSerializeModule(
cache: ModuleCache,
ir: IR.Module,
stage: Module.CompilationStage,
@ -311,12 +465,12 @@ class SerializationManager(compiler: Compiler) {
} else stage
cache
.save(
ModuleCache.CachedModule(ir, fixedStage, source),
new ModuleCache.CachedModule(ir, fixedStage, source),
compiler.context,
useGlobalCacheLocations
)
.map(_ => true)
.getOrElse(false)
.orElse(false)
} catch {
case e: NotSerializableException =>
logger.log(
@ -381,4 +535,11 @@ object SerializationManager {
/** The thread keep-alive time in seconds. */
val threadKeepalive: Long = 3
implicit private class LibraryOps(val libraryName: LibraryName)
extends AnyVal {
def toQualifiedName: QualifiedName =
QualifiedName(List(libraryName.namespace), libraryName.name)
}
}

View File

@ -818,6 +818,10 @@ class IrToTruffle(
val fun = actualModule.getScope.getMethods
.get(actualModule.getScope.getAssociatedType)
.get(method.name)
assert(
fun != null,
s"exported symbol `${method.name}` needs to be registered first in the module "
)
moduleScope.registerMethod(
moduleScope.getAssociatedType,
name,

View File

@ -21,6 +21,10 @@ import scala.annotation.unused
* @param moduleMethods the methods defined with current module as `this`
* @param currentModule the module holding these bindings
*/
@SerialVersionUID(
5568L // stable serialization of bindings
)
case class BindingsMap(
definedEntities: List[DefinedEntity],
currentModule: ModuleReference

View File

@ -15,4 +15,6 @@ case class CompilerConfig(
warningsEnabled: Boolean = true,
isStrictErrors: Boolean = false,
outputRedirect: Option[PrintStream] = None
)
) {
def parallelParsing: Boolean = false
}

View File

@ -17,7 +17,7 @@ object MethodCalls extends IRPass {
override type Config = IRPass.Configuration.Default
override val precursorPasses: Seq[IRPass] =
Seq(BindingAnalysis)
Seq(BindingAnalysis, GlobalNames)
override val invalidatedPasses: Seq[IRPass] = Seq()
override def updateMetadataInDuplicate[T <: IR](sourceIr: T, copyOfIr: T): T =
@ -36,9 +36,7 @@ object MethodCalls extends IRPass {
ir: IR.Module,
moduleContext: ModuleContext
): IR.Module = {
ir.mapExpressions(
doExpression(ir.unsafeGetMetadata(BindingAnalysis, ""), _)
)
ir.mapExpressions(doExpression)
}
/** Executes the pass on the provided `ir`, and returns a possibly transformed
@ -54,18 +52,14 @@ object MethodCalls extends IRPass {
ir: IR.Expression,
inlineContext: InlineContext
): IR.Expression = {
doExpression(
inlineContext.module.getIr.unsafeGetMetadata(BindingAnalysis, ""),
ir
)
doExpression(ir)
}
private def doExpression(
bindingsMap: BindingsMap,
expr: IR.Expression
): IR.Expression = {
expr.transformExpressions { case app: IR.Application.Prefix =>
def fallback = app.mapExpressions(doExpression(bindingsMap, _))
def fallback = app.mapExpressions(doExpression(_))
app.function match {
case name: IR.Name if name.isMethod =>
app.arguments match {
@ -91,7 +85,7 @@ object MethodCalls extends IRPass {
name.updateMetadata(this -->> Resolution(resolution))
val newArgs =
app.arguments.map(
_.mapExpressions(doExpression(bindingsMap, _))
_.mapExpressions(doExpression(_))
)
app.copy(function = newName, arguments = newArgs)
case _ => fallback

View File

@ -228,4 +228,14 @@ class ExportsResolution {
// Take _last_ occurrence of each module
topModules.map(_.module.unsafeAsModule()).reverse.distinct.reverse
}
/** A fast version of [[run]] that does sorting of modules but
* neither performs cycle checks nor resolves exports.
*/
def runSort(modules: List[Module]): List[Module] = {
val graph = buildGraph(modules)
val tops = topsort(graph)
val topModules = tops.map(_.module)
topModules.map(_.module.unsafeAsModule()).reverse.distinct.reverse
}
}

View File

@ -33,67 +33,103 @@ class ImportResolver(compiler: Compiler) {
/** Runs the import mapping logic.
*
* @param module the entry-point module.
* @return a list of all modules that need to be compiled in order to run
* the program.
* @return a tuple containing a list of all modules that need to go through the full compilation pipeline and
* a list of all modules which have been inferred from bindings cache and could potentially be compiled lazilly
*/
def mapImports(module: Module): List[Module] = {
def mapImports(
module: Module,
bindingsCachingEnabled: Boolean
): (List[Module], List[Module]) = {
def analyzeModule(current: Module): List[Module] = {
val ir = current.getIr
val currentLocal = ir.unsafeGetMetadata(
BindingAnalysis,
"Non-parsed module used in ImportResolver"
)
// put the list of resolved imports in the module metadata
if (
current.getCompilationStage
.isBefore(
Module.CompilationStage.AFTER_IMPORT_RESOLUTION
) || !current.hasCrossModuleLinks
) {
val importedModules: List[
(IR.Module.Scope.Import, Option[BindingsMap.ResolvedImport])
] =
ir.imports.map {
case imp: IR.Module.Scope.Import.Module =>
tryResolveImport(ir, imp)
case other => (other, None)
}
currentLocal.resolvedImports = importedModules.flatMap(_._2)
val newIr = ir.copy(imports = importedModules.map(_._1))
current.unsafeSetIr(newIr)
if (!current.wasLoadedFromCache()) {
current.unsafeSetCompilationStage(
Module.CompilationStage.AFTER_IMPORT_RESOLUTION
)
}
}
currentLocal.resolvedImports
.map(_.target.module.unsafeAsModule())
.distinct
}
@scala.annotation.tailrec
def go(
stack: mutable.Stack[Module],
seen: mutable.Set[Module]
): List[Module] = {
seen: mutable.Set[Module],
required: mutable.Set[Module]
): (List[Module], List[Module]) = {
if (stack.isEmpty) {
seen.toList
(required.toList, seen.toList diff required.toList)
} else {
val current = stack.pop()
if (seen.contains(current)) {
go(stack, seen)
go(stack, seen, required)
} else {
// get module metadata
compiler.ensureParsed(current)
val ir = current.getIr
val currentLocal = ir.unsafeGetMetadata(
BindingAnalysis,
"Non-parsed module used in ImportResolver"
)
// put the list of resolved imports in the module metadata
if (
current.getCompilationStage
.isBefore(
Module.CompilationStage.AFTER_IMPORT_RESOLUTION
) || !current.hasCrossModuleLinks
) {
val importedModules: List[
(IR.Module.Scope.Import, Option[BindingsMap.ResolvedImport])
] =
ir.imports.map {
case imp: IR.Module.Scope.Import.Module =>
tryResolveImport(ir, imp)
case other => (other, None)
}
currentLocal.resolvedImports = importedModules.flatMap(_._2)
val newIr = ir.copy(imports = importedModules.map(_._1))
current.unsafeSetIr(newIr)
if (!current.wasLoadedFromCache()) {
current.unsafeSetCompilationStage(
Module.CompilationStage.AFTER_IMPORT_RESOLUTION
)
val (next, isRequired) = if (bindingsCachingEnabled) {
// Do we have bindings available for this modules' library?
// - yes - extract the resolved imports but don't add them to the import/export resolution
// - no - ensure they are parsed (load them from cache) and add them to the import/export resolution
compiler.importExportBindings(current) match {
case Some(bindings) =>
val converted = bindings
.toConcrete(compiler.packageRepository.getModuleMap)
.map { concreteBindings =>
concreteBindings
}
(
converted
.map(
_.resolvedImports
.map(_.target.module.unsafeAsModule())
.distinct
)
.getOrElse(Nil),
false
)
case None =>
compiler.ensureParsed(current)
(analyzeModule(current), true)
}
} else {
compiler.ensureParsed(current)
(analyzeModule(current), true)
}
// continue with updated stack
go(
stack.pushAll(
currentLocal.resolvedImports
.map(_.target.module.unsafeAsModule())
.distinct
),
seen += current
stack.pushAll(next),
seen += current,
if (isRequired) { required += current }
else required
)
}
}
}
go(mutable.Stack(module), mutable.Set())
go(mutable.Stack(module), mutable.Set(), mutable.Set())
}
private def tryResolveAsType(

View File

@ -56,7 +56,7 @@ public class SerializerTest {
var result = compiler.run(module);
assertEquals(result.compiledModules().exists(m -> m == module), true);
var serializationManager = new SerializationManager(ensoContext.getCompiler());
var future = serializationManager.serialize(module, true);
var future = serializationManager.serializeModule(module, true);
var serialized = future.get(5, TimeUnit.SECONDS);
assertEquals(serialized, true);
var deserialized = serializationManager.deserialize(module);

View File

@ -14,10 +14,10 @@ class CacheInvalidationTest extends ModifiedTest {
val outLines2 = consumeOut
outLines2(0) shouldEqual "hmm..."
the[InterpreterException] thrownBy (evalTestProjectIteration(
the[InterpreterException] thrownBy evalTestProjectIteration(
"Test_Caching_Invalidation",
iteration = 3
)) should have message "Compilation aborted due to errors."
) should have message "Compilation aborted due to errors."
val outLines3 = consumeOut
outLines3(2) should endWith("The name `foo` could not be found.")
}

View File

@ -44,7 +44,7 @@ class LibraryDownloadTest
val pkg =
PackageManager.Default.loadPackage(libPath.location.toFile).get
pkg.name shouldEqual "Bar"
val sources = pkg.listSources
val sources = pkg.listSources()
sources should have size 1
sources.head.file.getName shouldEqual "Main.enso"
assert(

View File

@ -84,7 +84,7 @@ class LibraryUploadTest
.loadPackage(installedRoot.location.toFile)
.get
pkg.name shouldEqual libraryName.name
val sources = pkg.listSources
val sources = pkg.listSources()
sources should have size 1
sources.head.file.getName shouldEqual "Main.enso"
}

View File

@ -20,7 +20,7 @@ class DependencyResolver(
publishedLibraryProvider: PublishedLibraryCache,
edition: Editions.ResolvedEdition,
preferLocalLibraries: Boolean,
versionResolver: LibraryResolver,
libraryResolver: LibraryResolver,
dependencyExtractor: DependencyExtractor[File]
) {
@ -50,7 +50,7 @@ class DependencyResolver(
if (parents.contains(libraryName)) {
Set.empty
} else {
val version = versionResolver
val version = libraryResolver
.resolveLibraryVersion(libraryName, edition, preferLocalLibraries)
.toTry
.get

View File

@ -1,9 +1,12 @@
package org.enso.librarymanager.published.repository
import io.circe.syntax.EncoderOps
import io.circe.{Decoder, Encoder, Json}
import io.circe.syntax.EncoderOps
import io.circe.yaml
import org.enso.editions.LibraryName
import scala.util.Try
/** The manifest file containing metadata related to a published library.
*
* @param archives sequence of sub-archives that the library package is
@ -69,6 +72,11 @@ object LibraryManifest {
Json.obj(allFields: _*)
}
/** Parser the provided string and returns a LibraryManifest, if valid */
def fromYaml(yamlString: String): Try[LibraryManifest] = {
yaml.parser.parse(yamlString).flatMap(_.as[LibraryManifest]).toTry
}
/** The name of the manifest file as included in the directory associated with
* a given library in the library repository.
*/

View File

@ -40,6 +40,9 @@ case class Package[F](
val irCacheDirectory: F = internalDirectory
.getChild(Package.cacheDirName)
.getChild(Package.irCacheDirName)
val bindingsCacheDirectory: F = internalDirectory
.getChild(Package.cacheDirName)
.getChild(Package.bindingsCacheDirName)
/** Sets the package name.
*
@ -79,6 +82,10 @@ case class Package[F](
irCacheDirectory.getChild(ensoVersion)
}
def getBindingsCacheRootForPackage(ensoVersion: String): F = {
bindingsCacheDirectory.getChild(ensoVersion)
}
/** Changes the package name.
*
* @param newName the new package name
@ -155,13 +162,19 @@ case class Package[F](
*
* @return the list of all source files in this package, together with their qualified names.
*/
def listSources: List[SourceFile[F]] = {
val sources = sourceDir.walk
def listSources(): List[SourceFile[F]] = {
listSourcesJava().asScala.toList
}
/** Lists the source files in this package.
*
* @return the list of all source files in this package, together with their qualified names.
*/
def listSourcesJava(): java.util.List[SourceFile[F]] = {
sourceDir.walk
.filter(f => f.isRegularFile && f.getName.endsWith(".enso"))
.iterator
.asScala
.toList
sources.map { path => SourceFile(moduleNameForFile(path), path) }
.map(path => SourceFile(moduleNameForFile(path), path))
.collect(java.util.stream.Collectors.toList[SourceFile[F]])
}
/** Lists contents of the polyglot extensions directory for a given language.
@ -481,4 +494,5 @@ object Package {
val thumbFileName = "thumb.png"
val cacheDirName = "cache"
val irCacheDirName = "ir"
val bindingsCacheDirName = "bindings"
}

View File

@ -180,6 +180,51 @@ object DistributionPackage {
graalVersion = graalVersion,
javaVersion = javaVersion
)
indexStdLib(
stdLibVersion = targetStdlibVersion,
ensoVersion = ensoVersion,
stdLibRoot = distributionRoot / "lib",
ensoExecutable =
distributionRoot / "bin" / "enso",
cacheFactory = cacheFactory.sub("stdlib"),
log = log
)
}
def indexStdLib(
stdLibVersion: String,
ensoVersion: String,
stdLibRoot: File,
ensoExecutable: File,
cacheFactory: CacheStoreFactory,
log: Logger
): Unit = {
for {
libMajor <- stdLibRoot.listFiles()
libName <- (stdLibRoot / libMajor.getName).listFiles()
} yield {
val cache = cacheFactory.make(s"$libName.$ensoVersion")
val path = (libName / ensoVersion)
Tracked.diffInputs(cache, FileInfo.lastModified)(path.globRecursive("*.enso").get().toSet) { diff =>
if (diff.modified.nonEmpty) {
println(s"Generating index for ${libName} ")
val command = Seq(
Platform.executableFileName(ensoExecutable.toString),
"--no-compile-dependencies",
"--compile",
path.toString
)
log.info(command.mkString(" "))
val exitCode = command.!
if (exitCode != 0) {
throw new RuntimeException(s"Cannot compile $libMajor.$libName.")
}
} else {
println(s"No modified files. Not generating index for ${libName} ")
}
}
}
}
def runEnginePackage(

View File

@ -14,4 +14,28 @@ object Platform {
*/
def isMacOS: Boolean =
sys.props("os.name").toLowerCase().contains("mac")
/** Returns the dynamic library file name on the current platform.
*
* @param libraryName the library name
* @return the file name of provided library on the current platform
*/
def dynamicLibraryFileName(libraryName: String): String = {
if (isMacOS) s"lib$libraryName.dylib"
else if (isWindows) s"$libraryName.dll"
else if (isLinux) s"lib$libraryName.so"
else {
throw new RuntimeException(s"Unknown platform [${sys.props("os.name")}].")
}
}
/** Returns the executable file name on the current platform.
*
* @param name the executable name
* @return the file name of provided executable on the current platform
*/
def executableFileName(name: String): String = {
if (isWindows) s".\\$name.bat" else name
}
}