mirror of
https://github.com/enso-org/enso.git
synced 2024-12-22 15:21:43 +03:00
Update java formatter sbt plugin (#8543)
Add a local clone of javaFormatter plugin. The upstream is not maintained anymore. And we need to update it to use the newest Google java formatter because the old one, that we use, cannot format sources with Java 8+ syntax. # Important Notes Update to Google java formatter 1.18.1 - https://github.com/google/google-java-format/releases/tag/v1.18.1
This commit is contained in:
parent
56cc9561b1
commit
c1098865f2
4
.github/workflows/formatting.yml
vendored
4
.github/workflows/formatting.yml
vendored
@ -8,9 +8,9 @@ on:
|
||||
|
||||
env:
|
||||
# Please ensure that this is in sync with graalVersion in build.sbt
|
||||
javaVersion: 17.0.7
|
||||
javaVersion: 21.0.1
|
||||
# Please ensure that this is in sync with project/build.properties
|
||||
sbtVersion: 1.9.0
|
||||
sbtVersion: 1.9.7
|
||||
|
||||
jobs:
|
||||
test_formatting:
|
||||
|
6
.jvmopts
6
.jvmopts
@ -1,3 +1,9 @@
|
||||
-Xss16M
|
||||
-Xmx4G
|
||||
-XX:+UseCompressedOops
|
||||
--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED
|
||||
--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED
|
||||
--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED
|
||||
--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED
|
||||
--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED
|
||||
--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED
|
||||
|
@ -1,17 +1,20 @@
|
||||
package org.enso.interpreter.dsl.test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.nodes.DirectCallNode;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import org.enso.interpreter.runtime.callable.function.Function;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
import org.junit.Test;
|
||||
import org.enso.interpreter.node.InlineableNode;
|
||||
import org.enso.interpreter.runtime.callable.function.Function;
|
||||
import org.junit.Test;
|
||||
|
||||
public class InliningBuiltinsTest {
|
||||
|
||||
/** @see InliningBuiltinsInNode#execute(long, long) */
|
||||
/**
|
||||
* @see InliningBuiltinsInNode#execute(long, long)
|
||||
*/
|
||||
@Test
|
||||
public void executeWithoutVirtualFrame() {
|
||||
var fn = InliningBuiltinsInMethodGen.makeFunction(null);
|
||||
@ -19,18 +22,27 @@ public class InliningBuiltinsTest {
|
||||
var call = root.createInlineableNode();
|
||||
var clazz = call.getClass();
|
||||
assertEquals("InlineableNode", clazz.getSuperclass().getSimpleName());
|
||||
assertEquals("org.enso.interpreter.node.InlineableNode$Root", clazz.getEnclosingClass().getInterfaces()[0].getName());
|
||||
assertEquals(
|
||||
"org.enso.interpreter.node.InlineableNode$Root",
|
||||
clazz.getEnclosingClass().getInterfaces()[0].getName());
|
||||
|
||||
var res = WithFrame.invoke((frame) -> {
|
||||
return call.call(frame, Function.ArgumentsHelper.buildArguments(null, null, new Object[] { null, 5L, 7L }));
|
||||
});
|
||||
var res =
|
||||
WithFrame.invoke(
|
||||
(frame) -> {
|
||||
return call.call(
|
||||
frame,
|
||||
Function.ArgumentsHelper.buildArguments(
|
||||
null, null, new Object[] {null, 5L, 7L}));
|
||||
});
|
||||
assertEquals(12L, res);
|
||||
} else {
|
||||
fail("It is inlineable: " + fn.getCallTarget().getRootNode());
|
||||
}
|
||||
}
|
||||
|
||||
/** @see InliningBuiltinsOutNode#execute(com.oracle.truffle.api.frame.VirtualFrame, long, long) */
|
||||
/**
|
||||
* @see InliningBuiltinsOutNode#execute(com.oracle.truffle.api.frame.VirtualFrame, long, long)
|
||||
*/
|
||||
@Test
|
||||
public void executeWithVirtualFrame() {
|
||||
var fn = InliningBuiltinsOutMethodGen.makeFunction(null);
|
||||
@ -41,14 +53,20 @@ public class InliningBuiltinsTest {
|
||||
var clazz = call.getClass().getSuperclass();
|
||||
assertEquals("com.oracle.truffle.api.nodes.DirectCallNode", clazz.getName());
|
||||
|
||||
var res = WithFrame.invoke((frame) -> {
|
||||
return call.call(Function.ArgumentsHelper.buildArguments(null, null, new Object[] { null, 3L, 9L }));
|
||||
});
|
||||
var res =
|
||||
WithFrame.invoke(
|
||||
(frame) -> {
|
||||
return call.call(
|
||||
Function.ArgumentsHelper.buildArguments(
|
||||
null, null, new Object[] {null, 3L, 9L}));
|
||||
});
|
||||
assertEquals(12L, res);
|
||||
}
|
||||
}
|
||||
|
||||
/** @see InliningBuiltinsNeedsNode#execute(long, long) */
|
||||
/**
|
||||
* @see InliningBuiltinsNeedsNode#execute(long, long)
|
||||
*/
|
||||
@Test
|
||||
public void executeWhenNeedsVirtualFrame() {
|
||||
var fn = InliningBuiltinsNeedsMethodGen.makeFunction(null);
|
||||
@ -59,14 +77,20 @@ public class InliningBuiltinsTest {
|
||||
var clazz = call.getClass().getSuperclass();
|
||||
assertEquals("com.oracle.truffle.api.nodes.DirectCallNode", clazz.getName());
|
||||
|
||||
var res = WithFrame.invoke((frame) -> {
|
||||
return call.call(Function.ArgumentsHelper.buildArguments(null, null, new Object[] { null, 3L, 9L }));
|
||||
});
|
||||
var res =
|
||||
WithFrame.invoke(
|
||||
(frame) -> {
|
||||
return call.call(
|
||||
Function.ArgumentsHelper.buildArguments(
|
||||
null, null, new Object[] {null, 3L, 9L}));
|
||||
});
|
||||
assertEquals(12L, res);
|
||||
}
|
||||
}
|
||||
|
||||
/** @see InliningBuiltinsNeedNotNode#execute(com.oracle.truffle.api.frame.VirtualFrame, long, long) */
|
||||
/**
|
||||
* @see InliningBuiltinsNeedNotNode#execute(com.oracle.truffle.api.frame.VirtualFrame, long, long)
|
||||
*/
|
||||
@Test
|
||||
public void executeWhenNeedNotVirtualFrame() {
|
||||
var fn = InliningBuiltinsNeedNotMethodGen.makeFunction(null);
|
||||
@ -74,11 +98,18 @@ public class InliningBuiltinsTest {
|
||||
var call = root.createInlineableNode();
|
||||
var clazz = call.getClass();
|
||||
assertEquals("InlineableNode", clazz.getSuperclass().getSimpleName());
|
||||
assertEquals("org.enso.interpreter.node.InlineableNode$Root", clazz.getEnclosingClass().getInterfaces()[0].getName());
|
||||
assertEquals(
|
||||
"org.enso.interpreter.node.InlineableNode$Root",
|
||||
clazz.getEnclosingClass().getInterfaces()[0].getName());
|
||||
|
||||
var res = WithFrame.invoke((frame) -> {
|
||||
return call.call(frame, Function.ArgumentsHelper.buildArguments(null, null, new Object[] { null, 5L, 7L }));
|
||||
});
|
||||
var res =
|
||||
WithFrame.invoke(
|
||||
(frame) -> {
|
||||
return call.call(
|
||||
frame,
|
||||
Function.ArgumentsHelper.buildArguments(
|
||||
null, null, new Object[] {null, 5L, 7L}));
|
||||
});
|
||||
assertEquals(12L, res);
|
||||
} else {
|
||||
fail("It is inlineable: " + fn.getCallTarget().getRootNode());
|
||||
|
@ -10,13 +10,12 @@ public class ThrowBuiltinNode extends Node {
|
||||
public Object execute(Text type, long exceptionIdx) {
|
||||
switch (type.toString()) {
|
||||
case "exception" -> {
|
||||
Supplier<RuntimeException> exceptionSupplier =
|
||||
ThrowableCatchTest.exceptionSuppliers.get((int) exceptionIdx);
|
||||
Supplier<RuntimeException> exceptionSupplier =
|
||||
ThrowableCatchTest.exceptionSuppliers.get((int) exceptionIdx);
|
||||
throw exceptionSupplier.get();
|
||||
}
|
||||
case "error" -> {
|
||||
Supplier<Error> errorSupplier =
|
||||
ThrowableCatchTest.errorSuppliers.get((int) exceptionIdx);
|
||||
Supplier<Error> errorSupplier = ThrowableCatchTest.errorSuppliers.get((int) exceptionIdx);
|
||||
throw errorSupplier.get();
|
||||
}
|
||||
default -> throw new AssertionError("Unknown type: " + type);
|
||||
|
@ -4,12 +4,11 @@ import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import com.oracle.truffle.api.dsl.UnsupportedSpecializationException;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import org.enso.interpreter.EnsoLanguage;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.callable.function.Function;
|
||||
@ -24,7 +23,6 @@ import org.graalvm.polyglot.io.IOAccess;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
|
||||
/**
|
||||
* Most of the exceptions thrown by the builtin methods, generated by {@link
|
||||
@ -72,10 +70,7 @@ public class ThrowableCatchTest {
|
||||
.allowAllAccess(true)
|
||||
.logHandler(System.err)
|
||||
.option(RuntimeOptions.STRICT_ERRORS, "true")
|
||||
.option(
|
||||
RuntimeOptions.LOG_LEVEL,
|
||||
Level.WARNING.getName()
|
||||
)
|
||||
.option(RuntimeOptions.LOG_LEVEL, Level.WARNING.getName())
|
||||
.option(
|
||||
RuntimeOptions.LANGUAGE_HOME_OVERRIDE,
|
||||
Paths.get("../../distribution/component").toFile().getAbsolutePath())
|
||||
|
@ -5,7 +5,9 @@ import java.util.concurrent.CompletableFuture;
|
||||
/** A component that should be initialized. */
|
||||
public interface InitializationComponent {
|
||||
|
||||
/** @return `true` if the component is initialized */
|
||||
/**
|
||||
* @return `true` if the component is initialized
|
||||
*/
|
||||
boolean isInitialized();
|
||||
|
||||
/** Initialize the component. */
|
||||
|
@ -163,7 +163,8 @@ public class RepoInitialization implements InitializationComponent {
|
||||
return CompletableFuture.completedFuture(null);
|
||||
} else if (error instanceof FileSystemException) {
|
||||
logger.error(
|
||||
"Failed to delete the database file. Attempt #{}. The file will be removed during the shutdown.",
|
||||
"Failed to delete the database file. Attempt #{}. The file will be removed during the"
|
||||
+ " shutdown.",
|
||||
retries + 1,
|
||||
error);
|
||||
Runtime.getRuntime()
|
||||
|
@ -8,7 +8,6 @@ import java.util.UUID;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.LogRecord;
|
||||
import java.util.logging.XMLFormatter;
|
||||
|
||||
import org.enso.languageserver.runtime.RuntimeConnector;
|
||||
import org.enso.polyglot.runtime.Runtime;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$Request;
|
||||
@ -55,9 +54,7 @@ public final class RuntimeEventsMonitor implements EventsMonitor {
|
||||
this(out, Clock.systemUTC());
|
||||
}
|
||||
|
||||
/**
|
||||
* Direction of the message.
|
||||
*/
|
||||
/** Direction of the message. */
|
||||
private enum Direction {
|
||||
REQUEST,
|
||||
RESPONSE
|
||||
|
@ -11,13 +11,19 @@ public interface IdExecutionService {
|
||||
|
||||
public abstract class Info {
|
||||
|
||||
/** @return UUID of the node, never {@code null}. */
|
||||
/**
|
||||
* @return UUID of the node, never {@code null}.
|
||||
*/
|
||||
public abstract UUID getId();
|
||||
|
||||
/** @return associated result or {@code null} if there is no associated result. */
|
||||
/**
|
||||
* @return associated result or {@code null} if there is no associated result.
|
||||
*/
|
||||
public abstract Object getResult();
|
||||
|
||||
/** @return {@code true} when the result is panic, {@code false} otherwise. */
|
||||
/**
|
||||
* @return {@code true} when the result is panic, {@code false} otherwise.
|
||||
*/
|
||||
public abstract boolean isPanic();
|
||||
|
||||
/**
|
||||
|
@ -3,5 +3,4 @@ package org.enso.compiler.context;
|
||||
/**
|
||||
* A representation of a pointer into a stack frame at a given number of levels above the current.
|
||||
*/
|
||||
public record FramePointer(int parentLevel, int frameSlotIdx) {
|
||||
}
|
||||
public record FramePointer(int parentLevel, int frameSlotIdx) {}
|
||||
|
@ -10,9 +10,4 @@ import org.enso.text.editing.model.TextEdit;
|
||||
* @param edit the editor change
|
||||
* @param newIr the new literal
|
||||
*/
|
||||
public record SimpleUpdate(
|
||||
Literal ir,
|
||||
TextEdit edit,
|
||||
Literal newIr
|
||||
) {
|
||||
}
|
||||
public record SimpleUpdate(Literal ir, TextEdit edit, Literal newIr) {}
|
||||
|
@ -38,10 +38,7 @@ public final class ExportSymbolAnalysis implements IRPass {
|
||||
@Override
|
||||
public Seq<IRPass> precursorPasses() {
|
||||
if (precursorPasses == null) {
|
||||
List<IRPass> passes = List.of(
|
||||
BindingAnalysis$.MODULE$,
|
||||
ImportSymbolAnalysis$.MODULE$
|
||||
);
|
||||
List<IRPass> passes = List.of(BindingAnalysis$.MODULE$, ImportSymbolAnalysis$.MODULE$);
|
||||
precursorPasses = CollectionConverters.asScala(passes).toList();
|
||||
}
|
||||
return precursorPasses;
|
||||
@ -59,46 +56,52 @@ public final class ExportSymbolAnalysis implements IRPass {
|
||||
List<Export> exportErrors = new ArrayList<>();
|
||||
var bindingsMap = (BindingsMap) moduleIr.passData().get(BindingAnalysis$.MODULE$).get();
|
||||
|
||||
moduleIr.exports().foreach(export -> switch (export) {
|
||||
case Export.Module exportMod -> {
|
||||
var exportNameParts = exportMod.name().parts();
|
||||
var symbolName = exportMod.name().parts().last();
|
||||
assert exportNameParts.size() > 1;
|
||||
var moduleOrTypeName = exportNameParts.apply(exportNameParts.size() - 2);
|
||||
var foundResolvedExp = findResolvedExportForIr(export, bindingsMap);
|
||||
if (foundResolvedExp == null) {
|
||||
exportErrors.add(
|
||||
ImportExport.apply(
|
||||
symbolName,
|
||||
new ImportExport.SymbolDoesNotExist(symbolName.name(), moduleOrTypeName.name()),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()
|
||||
)
|
||||
);
|
||||
} else {
|
||||
if (exportMod.onlyNames().isDefined()) {
|
||||
assert exportMod.onlyNames().isDefined();
|
||||
var exportedSymbols = exportMod.onlyNames().get();
|
||||
exportedSymbols.foreach(exportedSymbol -> {
|
||||
var foundSymbols = foundResolvedExp.target().findExportedSymbolsFor(exportedSymbol.name());
|
||||
if (foundSymbols.isEmpty()) {
|
||||
exportErrors.add(
|
||||
ImportExport.apply(
|
||||
exportedSymbol,
|
||||
new ImportExport.SymbolDoesNotExist(exportedSymbol.name(), moduleOrTypeName.name()),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()
|
||||
)
|
||||
);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
}
|
||||
yield null;
|
||||
}
|
||||
default -> export;
|
||||
});
|
||||
moduleIr
|
||||
.exports()
|
||||
.foreach(
|
||||
export ->
|
||||
switch (export) {
|
||||
case Export.Module exportMod -> {
|
||||
var exportNameParts = exportMod.name().parts();
|
||||
var symbolName = exportMod.name().parts().last();
|
||||
assert exportNameParts.size() > 1;
|
||||
var moduleOrTypeName = exportNameParts.apply(exportNameParts.size() - 2);
|
||||
var foundResolvedExp = findResolvedExportForIr(export, bindingsMap);
|
||||
if (foundResolvedExp == null) {
|
||||
exportErrors.add(
|
||||
ImportExport.apply(
|
||||
symbolName,
|
||||
new ImportExport.SymbolDoesNotExist(
|
||||
symbolName.name(), moduleOrTypeName.name()),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()));
|
||||
} else {
|
||||
if (exportMod.onlyNames().isDefined()) {
|
||||
assert exportMod.onlyNames().isDefined();
|
||||
var exportedSymbols = exportMod.onlyNames().get();
|
||||
exportedSymbols.foreach(
|
||||
exportedSymbol -> {
|
||||
var foundSymbols =
|
||||
foundResolvedExp
|
||||
.target()
|
||||
.findExportedSymbolsFor(exportedSymbol.name());
|
||||
if (foundSymbols.isEmpty()) {
|
||||
exportErrors.add(
|
||||
ImportExport.apply(
|
||||
exportedSymbol,
|
||||
new ImportExport.SymbolDoesNotExist(
|
||||
exportedSymbol.name(), moduleOrTypeName.name()),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()));
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
}
|
||||
yield null;
|
||||
}
|
||||
default -> export;
|
||||
});
|
||||
|
||||
if (exportErrors.isEmpty()) {
|
||||
return moduleIr;
|
||||
@ -110,8 +113,7 @@ public final class ExportSymbolAnalysis implements IRPass {
|
||||
moduleIr.location(),
|
||||
moduleIr.passData(),
|
||||
moduleIr.diagnostics(),
|
||||
moduleIr.id()
|
||||
);
|
||||
moduleIr.id());
|
||||
}
|
||||
}
|
||||
|
||||
@ -122,18 +124,24 @@ public final class ExportSymbolAnalysis implements IRPass {
|
||||
|
||||
/**
|
||||
* Finds a resolved export that corresponds to the export IR.
|
||||
*
|
||||
* @param exportIr Export IR that is being resolved
|
||||
* @param bindingsMap Bindings map of the module that contains the export IR
|
||||
* @return null if no resolved export was found, otherwise the resolved export
|
||||
*/
|
||||
private BindingsMap.ExportedModule findResolvedExportForIr(Export exportIr, BindingsMap bindingsMap) {
|
||||
private BindingsMap.ExportedModule findResolvedExportForIr(
|
||||
Export exportIr, BindingsMap bindingsMap) {
|
||||
switch (exportIr) {
|
||||
case Export.Module exportedModIr -> {
|
||||
var exportedModName = exportedModIr.name().name();
|
||||
var foundResolvedExp = bindingsMap.resolvedExports().find(resolvedExport -> {
|
||||
var resolvedExportName = resolvedExport.target().qualifiedName();
|
||||
return resolvedExportName.toString().equals(exportedModName);
|
||||
});
|
||||
var foundResolvedExp =
|
||||
bindingsMap
|
||||
.resolvedExports()
|
||||
.find(
|
||||
resolvedExport -> {
|
||||
var resolvedExportName = resolvedExport.target().qualifiedName();
|
||||
return resolvedExportName.toString().equals(exportedModName);
|
||||
});
|
||||
return foundResolvedExp.isEmpty() ? null : foundResolvedExp.get();
|
||||
}
|
||||
default -> throw new IllegalStateException("Unexpected value: " + exportIr);
|
||||
|
@ -20,11 +20,13 @@ import scala.jdk.javaapi.CollectionConverters;
|
||||
|
||||
/**
|
||||
* Iterates through all the imports and exports of non-synthetic modules and ensures that:
|
||||
*
|
||||
* <ul>
|
||||
* <li>No private module is exported</li>
|
||||
* <li>No private module from a different project is imported</li>
|
||||
* <li>Hierarchy of modules and submodules does not mix private and public modules</li>
|
||||
* <li>No private module is exported
|
||||
* <li>No private module from a different project is imported
|
||||
* <li>Hierarchy of modules and submodules does not mix private and public modules
|
||||
* </ul>
|
||||
*
|
||||
* Inserts errors into imports/exports IRs if the above conditions are violated.
|
||||
*/
|
||||
public final class PrivateModuleAnalysis implements IRPass {
|
||||
@ -45,10 +47,7 @@ public final class PrivateModuleAnalysis implements IRPass {
|
||||
|
||||
@Override
|
||||
public Seq<IRPass> precursorPasses() {
|
||||
List<IRPass> passes = List.of(
|
||||
BindingAnalysis$.MODULE$,
|
||||
ImportSymbolAnalysis$.MODULE$
|
||||
);
|
||||
List<IRPass> passes = List.of(BindingAnalysis$.MODULE$, ImportSymbolAnalysis$.MODULE$);
|
||||
return CollectionConverters.asScala(passes).toList();
|
||||
}
|
||||
|
||||
@ -68,78 +67,80 @@ public final class PrivateModuleAnalysis implements IRPass {
|
||||
var isCurrentModulePrivate = moduleIr.isPrivate();
|
||||
|
||||
// Ensure that imported modules from a different project are not private.
|
||||
bindingsMap.resolvedImports().foreach(resolvedImp -> {
|
||||
var importedModule = resolvedImp.target().module().unsafeAsModule("should succeed");
|
||||
var importedModuleName = importedModule.getName().toString();
|
||||
var importedModulePackage = importedModule.getPackage();
|
||||
if (currentPackage != null
|
||||
&& !currentPackage.equals(importedModulePackage)
|
||||
&& importedModule.isPrivate()) {
|
||||
importErrors.add(ImportExport.apply(
|
||||
resolvedImp.importDef(),
|
||||
new ImportExport.ImportPrivateModule(importedModuleName),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()
|
||||
));
|
||||
}
|
||||
return null;
|
||||
});
|
||||
bindingsMap
|
||||
.resolvedImports()
|
||||
.foreach(
|
||||
resolvedImp -> {
|
||||
var importedModule = resolvedImp.target().module().unsafeAsModule("should succeed");
|
||||
var importedModuleName = importedModule.getName().toString();
|
||||
var importedModulePackage = importedModule.getPackage();
|
||||
if (currentPackage != null
|
||||
&& !currentPackage.equals(importedModulePackage)
|
||||
&& importedModule.isPrivate()) {
|
||||
importErrors.add(
|
||||
ImportExport.apply(
|
||||
resolvedImp.importDef(),
|
||||
new ImportExport.ImportPrivateModule(importedModuleName),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()));
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
// Ensure that no symbols are exported from a private module.
|
||||
if (isCurrentModulePrivate && containsExport(moduleIr)) {
|
||||
exportErrors.add(ImportExport.apply(
|
||||
moduleIr.exports().apply(0),
|
||||
new ImportExport.ExportSymbolsFromPrivateModule(moduleContext.getName().toString()),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()
|
||||
));
|
||||
exportErrors.add(
|
||||
ImportExport.apply(
|
||||
moduleIr.exports().apply(0),
|
||||
new ImportExport.ExportSymbolsFromPrivateModule(moduleContext.getName().toString()),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()));
|
||||
}
|
||||
|
||||
|
||||
// Ensure that private modules are not exported and that the hierarchy of submodules
|
||||
// does not mix public and private modules.
|
||||
bindingsMap
|
||||
.getDirectlyExportedModules()
|
||||
.foreach(expModule -> {
|
||||
var expModuleRef = expModule.target().module().unsafeAsModule("should succeed");
|
||||
if (expModuleRef.isPrivate()) {
|
||||
var associatedExportIR = findExportIRByName(moduleIr, expModuleRef.getName());
|
||||
assert associatedExportIR.isDefined();
|
||||
if (isSubmoduleName(moduleContext.getName(), expModuleRef.getName())) {
|
||||
var haveSameVisibility = isCurrentModulePrivate == expModuleRef.isPrivate();
|
||||
if (!haveSameVisibility) {
|
||||
exportErrors.add(
|
||||
ImportExport.apply(
|
||||
associatedExportIR.get(),
|
||||
new ImportExport.SubmoduleVisibilityMismatch(
|
||||
moduleContext.getName().toString(),
|
||||
expModuleRef.getName().toString(),
|
||||
isCurrentModulePrivate ? "private" : "public",
|
||||
expModuleRef.isPrivate() ? "private" : "public"
|
||||
),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()
|
||||
)
|
||||
);
|
||||
.foreach(
|
||||
expModule -> {
|
||||
var expModuleRef = expModule.target().module().unsafeAsModule("should succeed");
|
||||
if (expModuleRef.isPrivate()) {
|
||||
var associatedExportIR = findExportIRByName(moduleIr, expModuleRef.getName());
|
||||
assert associatedExportIR.isDefined();
|
||||
if (isSubmoduleName(moduleContext.getName(), expModuleRef.getName())) {
|
||||
var haveSameVisibility = isCurrentModulePrivate == expModuleRef.isPrivate();
|
||||
if (!haveSameVisibility) {
|
||||
exportErrors.add(
|
||||
ImportExport.apply(
|
||||
associatedExportIR.get(),
|
||||
new ImportExport.SubmoduleVisibilityMismatch(
|
||||
moduleContext.getName().toString(),
|
||||
expModuleRef.getName().toString(),
|
||||
isCurrentModulePrivate ? "private" : "public",
|
||||
expModuleRef.isPrivate() ? "private" : "public"),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()));
|
||||
}
|
||||
} else {
|
||||
exportErrors.add(
|
||||
ImportExport.apply(
|
||||
associatedExportIR.get(),
|
||||
new ImportExport.ExportPrivateModule(expModuleRef.getName().toString()),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
exportErrors.add(
|
||||
ImportExport.apply(
|
||||
associatedExportIR.get(),
|
||||
new ImportExport.ExportPrivateModule(expModuleRef.getName().toString()),
|
||||
ImportExport.apply$default$3(),
|
||||
ImportExport.apply$default$4()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
});
|
||||
return null;
|
||||
});
|
||||
|
||||
scala.collection.immutable.List<Import> convertedImports =
|
||||
importErrors.isEmpty() ? moduleIr.imports() : CollectionConverters.asScala(importErrors).toList();
|
||||
importErrors.isEmpty()
|
||||
? moduleIr.imports()
|
||||
: CollectionConverters.asScala(importErrors).toList();
|
||||
scala.collection.immutable.List<Export> convertedExports =
|
||||
exportErrors.isEmpty() ? moduleIr.exports() : CollectionConverters.asScala(exportErrors).toList();
|
||||
exportErrors.isEmpty()
|
||||
? moduleIr.exports()
|
||||
: CollectionConverters.asScala(exportErrors).toList();
|
||||
|
||||
return moduleIr.copy(
|
||||
convertedImports,
|
||||
@ -148,15 +149,12 @@ public final class PrivateModuleAnalysis implements IRPass {
|
||||
moduleIr.location(),
|
||||
moduleIr.passData(),
|
||||
moduleIr.diagnostics(),
|
||||
moduleIr.id()
|
||||
);
|
||||
moduleIr.id());
|
||||
}
|
||||
|
||||
private boolean isSubmoduleName(QualifiedName parentModName, QualifiedName subModName) {
|
||||
if (subModName.getParent().isDefined()) {
|
||||
return parentModName.item().equals(
|
||||
subModName.getParent().get().item()
|
||||
);
|
||||
return parentModName.item().equals(subModName.getParent().get().item());
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
@ -167,30 +165,35 @@ public final class PrivateModuleAnalysis implements IRPass {
|
||||
return ir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true iff the given Module's IR contains an export that is not synthetic.
|
||||
*/
|
||||
/** Returns true iff the given Module's IR contains an export that is not synthetic. */
|
||||
private static boolean containsExport(Module moduleIr) {
|
||||
return !moduleIr.exports().isEmpty() && moduleIr.exports().exists(exp -> {
|
||||
if (exp instanceof Export.Module moduleExport) {
|
||||
return !moduleExport.isSynthetic();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
return !moduleIr.exports().isEmpty()
|
||||
&& moduleIr
|
||||
.exports()
|
||||
.exists(
|
||||
exp -> {
|
||||
if (exp instanceof Export.Module moduleExport) {
|
||||
return !moduleExport.isSynthetic();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static Option<Export> findExportIRByName(Module moduleIr, QualifiedName fqn) {
|
||||
return moduleIr.exports().find(exp -> {
|
||||
if (exp instanceof Export.Module expMod) {
|
||||
if (expMod.name().parts().last().name().equals(fqn.item())) {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
throw new IllegalStateException("unknown exp: " + exp);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
return moduleIr
|
||||
.exports()
|
||||
.find(
|
||||
exp -> {
|
||||
if (exp instanceof Export.Module expMod) {
|
||||
if (expMod.name().parts().last().name().equals(fqn.item())) {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
throw new IllegalStateException("unknown exp: " + exp);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -22,7 +22,9 @@ public final class MethodCallsCache {
|
||||
callsExecuted.add(call);
|
||||
}
|
||||
|
||||
/** @return the set of executed calls. */
|
||||
/**
|
||||
* @return the set of executed calls.
|
||||
*/
|
||||
public Set<UUID> getCallsExecuted() {
|
||||
return callsExecuted;
|
||||
}
|
||||
|
@ -45,7 +45,9 @@ public final class RuntimeCache {
|
||||
return ref == null ? null : ref.get();
|
||||
}
|
||||
|
||||
/** @return all cache keys. */
|
||||
/**
|
||||
* @return all cache keys.
|
||||
*/
|
||||
public Set<UUID> getKeys() {
|
||||
return cache.keySet();
|
||||
}
|
||||
@ -65,7 +67,9 @@ public final class RuntimeCache {
|
||||
return types.put(key, typeName);
|
||||
}
|
||||
|
||||
/** @return the cached type of the expression */
|
||||
/**
|
||||
* @return the cached type of the expression
|
||||
*/
|
||||
@CompilerDirectives.TruffleBoundary
|
||||
public String getType(UUID key) {
|
||||
return types.get(key);
|
||||
@ -87,13 +91,17 @@ public final class RuntimeCache {
|
||||
return calls.put(key, call);
|
||||
}
|
||||
|
||||
/** @return the cached function call associated with the expression. */
|
||||
/**
|
||||
* @return the cached function call associated with the expression.
|
||||
*/
|
||||
@CompilerDirectives.TruffleBoundary
|
||||
public ExecutionService.FunctionCallInfo getCall(UUID key) {
|
||||
return calls.get(key);
|
||||
}
|
||||
|
||||
/** @return the cached method calls. */
|
||||
/**
|
||||
* @return the cached method calls.
|
||||
*/
|
||||
public Set<UUID> getCalls() {
|
||||
return calls.keySet();
|
||||
}
|
||||
@ -122,7 +130,9 @@ public final class RuntimeCache {
|
||||
types.clear();
|
||||
}
|
||||
|
||||
/** @return the weights of this cache. */
|
||||
/**
|
||||
* @return the weights of this cache.
|
||||
*/
|
||||
public Map<UUID, Double> getWeights() {
|
||||
return weights;
|
||||
}
|
||||
|
@ -22,7 +22,9 @@ public abstract class BackgroundJob<A> extends Job<A> {
|
||||
this.priority = priority;
|
||||
}
|
||||
|
||||
/** @return the job priority. */
|
||||
/**
|
||||
* @return the job priority.
|
||||
*/
|
||||
public int getPriority() {
|
||||
return priority;
|
||||
}
|
||||
|
@ -8,8 +8,7 @@ import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.WithWarnings;
|
||||
|
||||
public final class VisualizationResult {
|
||||
private VisualizationResult() {
|
||||
}
|
||||
private VisualizationResult() {}
|
||||
|
||||
/**
|
||||
* Extracts a string representation for a polyglot exception.
|
||||
|
@ -8,7 +8,9 @@ public class ExecutionTime implements ProfilingInfo {
|
||||
this.nanoTimeElapsed = nanoTimeElapsed;
|
||||
}
|
||||
|
||||
/** @return the time elapsed while executing the expression */
|
||||
/**
|
||||
* @return the time elapsed while executing the expression
|
||||
*/
|
||||
public long getNanoTimeElapsed() {
|
||||
return nanoTimeElapsed;
|
||||
}
|
||||
@ -18,7 +20,9 @@ public class ExecutionTime implements ProfilingInfo {
|
||||
return "ExecutionTime{nanoTimeElapsed=" + nanoTimeElapsed + "}";
|
||||
}
|
||||
|
||||
/** @return an execution time representing that no time has passed */
|
||||
/**
|
||||
* @return an execution time representing that no time has passed
|
||||
*/
|
||||
public static ExecutionTime empty() {
|
||||
return new ExecutionTime(0);
|
||||
}
|
||||
|
@ -1,5 +1,19 @@
|
||||
package org.enso.interpreter.service;
|
||||
|
||||
import com.oracle.truffle.api.CallTarget;
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.TruffleLogger;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.instrumentation.EventBinding;
|
||||
import com.oracle.truffle.api.instrumentation.ExecutionEventNodeFactory;
|
||||
import com.oracle.truffle.api.interop.ArityException;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnknownIdentifierException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedTypeException;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
@ -8,7 +22,6 @@ import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import org.enso.compiler.context.SimpleUpdate;
|
||||
import org.enso.interpreter.instrument.Endpoint;
|
||||
import org.enso.interpreter.instrument.MethodCallsCache;
|
||||
@ -47,21 +60,6 @@ import org.enso.polyglot.debugger.IdExecutionService;
|
||||
import org.enso.text.editing.JavaEditorAdapter;
|
||||
import org.enso.text.editing.model;
|
||||
|
||||
import com.oracle.truffle.api.CallTarget;
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.TruffleLogger;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.instrumentation.EventBinding;
|
||||
import com.oracle.truffle.api.instrumentation.ExecutionEventNodeFactory;
|
||||
import com.oracle.truffle.api.interop.ArityException;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnknownIdentifierException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedTypeException;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
|
||||
/**
|
||||
* A service allowing externally-triggered code execution, registered by an instance of the
|
||||
* language.
|
||||
@ -72,7 +70,8 @@ public final class ExecutionService {
|
||||
private final EnsoContext context;
|
||||
private final Optional<IdExecutionService> idExecutionInstrument;
|
||||
private final NotificationHandler.Forwarder notificationForwarder;
|
||||
private final TruffleLogger logger = TruffleLogger.getLogger(LanguageInfo.ID, ExecutionService.class);
|
||||
private final TruffleLogger logger =
|
||||
TruffleLogger.getLogger(LanguageInfo.ID, ExecutionService.class);
|
||||
private final ConnectedLockManager connectedLockManager;
|
||||
private final ExecuteRootNode execute = new ExecuteRootNode();
|
||||
private final CallRootNode call = new CallRootNode();
|
||||
@ -103,12 +102,16 @@ public final class ExecutionService {
|
||||
this.timer = timer;
|
||||
}
|
||||
|
||||
/** @return the language context. */
|
||||
/**
|
||||
* @return the language context.
|
||||
*/
|
||||
public EnsoContext getContext() {
|
||||
return context;
|
||||
}
|
||||
|
||||
/** @return the execution service logger. */
|
||||
/**
|
||||
* @return the execution service logger.
|
||||
*/
|
||||
public TruffleLogger getLogger() {
|
||||
return logger;
|
||||
}
|
||||
@ -138,8 +141,8 @@ public final class ExecutionService {
|
||||
connectedLockManager.connect(endpoint);
|
||||
} else {
|
||||
logger.warning(
|
||||
"ConnectedLockManager was not initialized, even though a Language Server connection has been established. "
|
||||
+ "This may result in synchronization errors.");
|
||||
"ConnectedLockManager was not initialized, even though a Language Server connection has"
|
||||
+ " been established. This may result in synchronization errors.");
|
||||
}
|
||||
}
|
||||
|
||||
@ -190,8 +193,7 @@ public final class ExecutionService {
|
||||
Optional<EventBinding<ExecutionEventNodeFactory>> eventNodeFactory =
|
||||
idExecutionInstrument.map(
|
||||
service ->
|
||||
service.bind(
|
||||
module, call.getFunction().getCallTarget(), callbacks, this.timer));
|
||||
service.bind(module, call.getFunction().getCallTarget(), callbacks, this.timer));
|
||||
Object p = context.getThreadManager().enter();
|
||||
try {
|
||||
execute.getCallTarget().call(call);
|
||||
@ -402,7 +404,13 @@ public final class ExecutionService {
|
||||
module.getName(), edits, failure, module.getLiteralSource());
|
||||
},
|
||||
rope -> {
|
||||
logger.log(Level.FINE, "Applied edits. Source has {} lines, last line has {} characters", new Object[]{rope.lines().length(), rope.lines().drop(rope.lines().length() - 1).characters().length()});
|
||||
logger.log(
|
||||
Level.FINE,
|
||||
"Applied edits. Source has {} lines, last line has {} characters",
|
||||
new Object[] {
|
||||
rope.lines().length(),
|
||||
rope.lines().drop(rope.lines().length() - 1).characters().length()
|
||||
});
|
||||
module.setLiteralSource(rope, simpleUpdate);
|
||||
return new Object();
|
||||
});
|
||||
@ -459,7 +467,8 @@ public final class ExecutionService {
|
||||
var iop = InteropLibrary.getUncached();
|
||||
var p = context.getThreadManager().enter();
|
||||
try {
|
||||
// Invoking a member on an Atom that does not have a method `to_display_text` will not contrary to what is
|
||||
// Invoking a member on an Atom that does not have a method `to_display_text` will not
|
||||
// contrary to what is
|
||||
// expected from the documentation, throw an `UnsupportedMessageException`.
|
||||
// Instead it will crash with some internal assertion deep inside runtime. Hence the check.
|
||||
if (iop.isMemberInvocable(panic.getPayload(), "to_display_text")) {
|
||||
@ -487,8 +496,7 @@ public final class ExecutionService {
|
||||
}
|
||||
|
||||
private static final class ExecuteRootNode extends RootNode {
|
||||
@Node.Child
|
||||
private InteropLibrary iop = InteropLibrary.getFactory().createDispatched(5);
|
||||
@Node.Child private InteropLibrary iop = InteropLibrary.getFactory().createDispatched(5);
|
||||
|
||||
ExecuteRootNode() {
|
||||
super(null);
|
||||
@ -508,8 +516,7 @@ public final class ExecutionService {
|
||||
}
|
||||
|
||||
private static final class CallRootNode extends RootNode {
|
||||
@Node.Child
|
||||
private InteropLibrary iop = InteropLibrary.getFactory().createDispatched(5);
|
||||
@Node.Child private InteropLibrary iop = InteropLibrary.getFactory().createDispatched(5);
|
||||
|
||||
CallRootNode() {
|
||||
super(null);
|
||||
@ -528,8 +535,7 @@ public final class ExecutionService {
|
||||
}
|
||||
|
||||
private static final class InvokeMemberRootNode extends RootNode {
|
||||
@Node.Child
|
||||
private InteropLibrary iop = InteropLibrary.getFactory().createDispatched(5);
|
||||
@Node.Child private InteropLibrary iop = InteropLibrary.getFactory().createDispatched(5);
|
||||
|
||||
InvokeMemberRootNode() {
|
||||
super(null);
|
||||
@ -567,12 +573,16 @@ public final class ExecutionService {
|
||||
this.call = call;
|
||||
}
|
||||
|
||||
/** @return the id of the node performing the function call. */
|
||||
/**
|
||||
* @return the id of the node performing the function call.
|
||||
*/
|
||||
public UUID getExpressionId() {
|
||||
return expressionId;
|
||||
}
|
||||
|
||||
/** @return the function call metadata. */
|
||||
/**
|
||||
* @return the function call metadata.
|
||||
*/
|
||||
public FunctionCallInstrumentationNode.FunctionCall getCall() {
|
||||
return call;
|
||||
}
|
||||
@ -645,52 +655,72 @@ public final class ExecutionService {
|
||||
+ '}';
|
||||
}
|
||||
|
||||
/** @return the id of the expression computed. */
|
||||
/**
|
||||
* @return the id of the expression computed.
|
||||
*/
|
||||
public UUID getExpressionId() {
|
||||
return expressionId;
|
||||
}
|
||||
|
||||
/** @return the type of the returned value. */
|
||||
/**
|
||||
* @return the type of the returned value.
|
||||
*/
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
/** @return the cached type of the value. */
|
||||
/**
|
||||
* @return the cached type of the value.
|
||||
*/
|
||||
public String getCachedType() {
|
||||
return cachedType;
|
||||
}
|
||||
|
||||
/** @return the computed value of the expression. */
|
||||
/**
|
||||
* @return the computed value of the expression.
|
||||
*/
|
||||
public Object getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
/** @return the function call data. */
|
||||
/**
|
||||
* @return the function call data.
|
||||
*/
|
||||
public FunctionCallInfo getCallInfo() {
|
||||
return callInfo;
|
||||
}
|
||||
|
||||
/** @return the function call data previously associated with the expression. */
|
||||
/**
|
||||
* @return the function call data previously associated with the expression.
|
||||
*/
|
||||
public FunctionCallInfo getCachedCallInfo() {
|
||||
return cachedCallInfo;
|
||||
}
|
||||
|
||||
/** @return the profiling information associated with this expression */
|
||||
/**
|
||||
* @return the profiling information associated with this expression
|
||||
*/
|
||||
public ProfilingInfo[] getProfilingInfo() {
|
||||
return profilingInfo;
|
||||
}
|
||||
|
||||
/** @return whether or not the expression result was obtained from the cache */
|
||||
/**
|
||||
* @return whether or not the expression result was obtained from the cache
|
||||
*/
|
||||
public boolean wasCached() {
|
||||
return wasCached;
|
||||
}
|
||||
|
||||
/** @return {@code true} when the type differs from the cached value. */
|
||||
/**
|
||||
* @return {@code true} when the type differs from the cached value.
|
||||
*/
|
||||
public boolean isTypeChanged() {
|
||||
return !Objects.equals(type, cachedType);
|
||||
}
|
||||
|
||||
/** @return {@code true} when the function call differs from the cached value. */
|
||||
/**
|
||||
* @return {@code true} when the function call differs from the cached value.
|
||||
*/
|
||||
public boolean isFunctionCallChanged() {
|
||||
return !Objects.equals(callInfo, cachedCallInfo);
|
||||
}
|
||||
|
@ -102,9 +102,7 @@ public class IdExecutionInstrument extends TruffleInstrument implements IdExecut
|
||||
* @param materializedFrame the execution frame
|
||||
* @param node the entered node
|
||||
*/
|
||||
public NodeInfo(
|
||||
MaterializedFrame materializedFrame,
|
||||
Node node) {
|
||||
public NodeInfo(MaterializedFrame materializedFrame, Node node) {
|
||||
super();
|
||||
|
||||
this.nodeId = getNodeId(node);
|
||||
@ -243,7 +241,11 @@ public class IdExecutionInstrument extends TruffleInstrument implements IdExecut
|
||||
} else if (node instanceof ExpressionNode expressionNode) {
|
||||
Info info =
|
||||
new NodeInfo(
|
||||
expressionNode.getId(), result, nanoTimeElapsed, frame == null ? null : frame.materialize(), node);
|
||||
expressionNode.getId(),
|
||||
result,
|
||||
nanoTimeElapsed,
|
||||
frame == null ? null : frame.materialize(),
|
||||
node);
|
||||
callbacks.updateCachedResult(info);
|
||||
|
||||
if (info.isPanic()) {
|
||||
|
@ -1,26 +1,22 @@
|
||||
package org.enso.interpreter.instrument;
|
||||
|
||||
import org.enso.polyglot.debugger.IdExecutionService;
|
||||
|
||||
import com.oracle.truffle.api.TruffleContext;
|
||||
import com.oracle.truffle.api.instrumentation.ContextsListener;
|
||||
import com.oracle.truffle.api.instrumentation.EventBinding;
|
||||
import com.oracle.truffle.api.instrumentation.TruffleInstrument;
|
||||
import com.oracle.truffle.api.nodes.LanguageInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.enso.distribution.locking.LockManager;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.service.ExecutionService;
|
||||
import org.enso.lockmanager.client.ConnectedLockManager;
|
||||
import org.enso.polyglot.RuntimeServerInfo;
|
||||
import org.enso.polyglot.debugger.IdExecutionService;
|
||||
import org.graalvm.options.OptionDescriptor;
|
||||
import org.graalvm.options.OptionDescriptors;
|
||||
import org.graalvm.options.OptionKey;
|
||||
import org.graalvm.polyglot.io.MessageEndpoint;
|
||||
import org.graalvm.polyglot.io.MessageTransport;
|
||||
|
||||
@ -76,7 +72,11 @@ public class RuntimeServerInstrument extends TruffleInstrument {
|
||||
var timer = instrument.env.lookup(language, Timer.class);
|
||||
var notificationHandler =
|
||||
instrument.env.lookup(language, NotificationHandler.Forwarder.class);
|
||||
var connectedLockManager = instrument.env.lookup(language, LockManager.class) instanceof ConnectedLockManager connected ? connected : null;
|
||||
var connectedLockManager =
|
||||
instrument.env.lookup(language, LockManager.class)
|
||||
instanceof ConnectedLockManager connected
|
||||
? connected
|
||||
: null;
|
||||
service =
|
||||
new ExecutionService(
|
||||
ctx, idExecutionInstrument, notificationHandler, connectedLockManager, timer);
|
||||
@ -141,7 +141,8 @@ public class RuntimeServerInstrument extends TruffleInstrument {
|
||||
protected OptionDescriptors getOptionDescriptors() {
|
||||
return OptionDescriptors.create(
|
||||
Arrays.asList(
|
||||
OptionDescriptor.newBuilder(RuntimeServerInfo.ENABLE_OPTION_KEY, RuntimeServerInfo.ENABLE_OPTION)
|
||||
OptionDescriptor.newBuilder(
|
||||
RuntimeServerInfo.ENABLE_OPTION_KEY, RuntimeServerInfo.ENABLE_OPTION)
|
||||
.build()));
|
||||
}
|
||||
}
|
||||
|
@ -63,7 +63,9 @@ final class EpbContext {
|
||||
return REFERENCE.get(node);
|
||||
}
|
||||
|
||||
/** @return the language environment associated with this context. */
|
||||
/**
|
||||
* @return the language environment associated with this context.
|
||||
*/
|
||||
public TruffleLanguage.Env getEnv() {
|
||||
return env;
|
||||
}
|
||||
|
@ -1,11 +1,5 @@
|
||||
package org.enso.interpreter.epb;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.oracle.truffle.api.CallTarget;
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.frame.FrameDescriptor;
|
||||
@ -13,6 +7,11 @@ import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropException;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import com.oracle.truffle.api.source.Source;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
final class ForeignEvalNode extends RootNode {
|
||||
private final Source langAndCode;
|
||||
@ -58,20 +57,25 @@ final class ForeignEvalNode extends RootNode {
|
||||
var id = truffleId(langAndCode);
|
||||
var context = EpbContext.get(this);
|
||||
var installedLanguages = context.getEnv().getInternalLanguages();
|
||||
var node = switch (installedLanguages.containsKey(id) ? 1 : 0) {
|
||||
case 0 -> {
|
||||
var ex = new ForeignParsingException(id, installedLanguages.keySet(), this);
|
||||
yield new ExceptionForeignNode(ex);
|
||||
}
|
||||
default -> {
|
||||
context.log(Level.FINE, "Parsing foreign script {1} - language {0}", id, langAndCode.getName());
|
||||
yield switch (id) {
|
||||
case "js" -> parseJs();
|
||||
case "python" -> parseGeneric("python", PyForeignNode::new);
|
||||
default -> parseGeneric(id, GenericForeignNode::new);
|
||||
var node =
|
||||
switch (installedLanguages.containsKey(id) ? 1 : 0) {
|
||||
case 0 -> {
|
||||
var ex = new ForeignParsingException(id, installedLanguages.keySet(), this);
|
||||
yield new ExceptionForeignNode(ex);
|
||||
}
|
||||
default -> {
|
||||
context.log(
|
||||
Level.FINE,
|
||||
"Parsing foreign script {1} - language {0}",
|
||||
id,
|
||||
langAndCode.getName());
|
||||
yield switch (id) {
|
||||
case "js" -> parseJs();
|
||||
case "python" -> parseGeneric("python", PyForeignNode::new);
|
||||
default -> parseGeneric(id, GenericForeignNode::new);
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
foreign = insert(node);
|
||||
}
|
||||
try {
|
||||
@ -87,20 +91,17 @@ final class ForeignEvalNode extends RootNode {
|
||||
var inner = context.getInnerContext();
|
||||
var code = foreignSource(langAndCode);
|
||||
var args = Arrays.stream(argNames).skip(1).collect(Collectors.joining(","));
|
||||
var wrappedSrc
|
||||
= "var poly_enso_eval=function("
|
||||
+ args
|
||||
+ "){\n"
|
||||
+ code
|
||||
+ "\n};poly_enso_eval";
|
||||
var wrappedSrc = "var poly_enso_eval=function(" + args + "){\n" + code + "\n};poly_enso_eval";
|
||||
Source source = Source.newBuilder("js", wrappedSrc, "").build();
|
||||
var fn = inner.evalPublic(this, source);
|
||||
return JsForeignNode.build(fn);
|
||||
}
|
||||
|
||||
private ForeignFunctionCallNode parseGeneric(String language, Function<CallTarget,ForeignFunctionCallNode> nodeFactory) {
|
||||
private ForeignFunctionCallNode parseGeneric(
|
||||
String language, Function<CallTarget, ForeignFunctionCallNode> nodeFactory) {
|
||||
var ctx = EpbContext.get(this);
|
||||
Source source = Source.newBuilder(language, foreignSource(langAndCode), langAndCode.getName()).build();
|
||||
Source source =
|
||||
Source.newBuilder(language, foreignSource(langAndCode), langAndCode.getName()).build();
|
||||
CallTarget ct = ctx.getEnv().parsePublic(source, argNames);
|
||||
return nodeFactory.apply(ct);
|
||||
}
|
||||
|
@ -1,9 +1,5 @@
|
||||
package org.enso.interpreter.epb;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalTime;
|
||||
import java.time.ZoneId;
|
||||
|
||||
import com.oracle.truffle.api.CallTarget;
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.interop.ArityException;
|
||||
@ -17,28 +13,21 @@ import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.library.ExportLibrary;
|
||||
import com.oracle.truffle.api.library.ExportMessage;
|
||||
import com.oracle.truffle.api.source.Source;
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalTime;
|
||||
import java.time.ZoneId;
|
||||
|
||||
final class PyForeignNode extends GenericForeignNode {
|
||||
@CompilerDirectives.CompilationFinal
|
||||
private Object fnPythonDate;
|
||||
@Child
|
||||
private InteropLibrary nodePythonDate;
|
||||
@CompilerDirectives.CompilationFinal
|
||||
private Object fnPythonTime;
|
||||
@Child
|
||||
private InteropLibrary nodePythonTime;
|
||||
@CompilerDirectives.CompilationFinal
|
||||
private Object fnPythonZone;
|
||||
@Child
|
||||
private InteropLibrary nodePythonZone;
|
||||
@CompilerDirectives.CompilationFinal
|
||||
private Object fnPythonCombine;
|
||||
@CompilerDirectives.CompilationFinal
|
||||
private Object none;
|
||||
@Child
|
||||
private InteropLibrary nodePythonCombine;
|
||||
@Child
|
||||
private InteropLibrary iop = InteropLibrary.getFactory().createDispatched(3);
|
||||
@CompilerDirectives.CompilationFinal private Object fnPythonDate;
|
||||
@Child private InteropLibrary nodePythonDate;
|
||||
@CompilerDirectives.CompilationFinal private Object fnPythonTime;
|
||||
@Child private InteropLibrary nodePythonTime;
|
||||
@CompilerDirectives.CompilationFinal private Object fnPythonZone;
|
||||
@Child private InteropLibrary nodePythonZone;
|
||||
@CompilerDirectives.CompilationFinal private Object fnPythonCombine;
|
||||
@CompilerDirectives.CompilationFinal private Object none;
|
||||
@Child private InteropLibrary nodePythonCombine;
|
||||
@Child private InteropLibrary iop = InteropLibrary.getFactory().createDispatched(3);
|
||||
|
||||
PyForeignNode(CallTarget ct) {
|
||||
super(ct);
|
||||
@ -54,7 +43,10 @@ final class PyForeignNode extends GenericForeignNode {
|
||||
var time = javaTime != null ? wrapPythonTime(javaTime) : null;
|
||||
var javaDate = iop.isDate(arguments[i]) ? iop.asDate(arguments[i]) : null;
|
||||
var date = javaDate != null ? wrapPythonDate(javaDate) : null;
|
||||
var zone = iop.isTimeZone(arguments[i]) ? wrapPythonZone(iop.asTimeZone(arguments[i]), javaTime, javaDate) : null;
|
||||
var zone =
|
||||
iop.isTimeZone(arguments[i])
|
||||
? wrapPythonZone(iop.asTimeZone(arguments[i]), javaTime, javaDate)
|
||||
: null;
|
||||
if (date != null && time != null) {
|
||||
arguments[i] = combinePythonDateTimeZone(date, time, zone);
|
||||
} else if (date != null) {
|
||||
@ -82,16 +74,22 @@ final class PyForeignNode extends GenericForeignNode {
|
||||
return res;
|
||||
}
|
||||
|
||||
private Object none() throws UnsupportedTypeException, ArityException, UnsupportedMessageException {
|
||||
private Object none()
|
||||
throws UnsupportedTypeException, ArityException, UnsupportedMessageException {
|
||||
if (none == null) {
|
||||
CompilerDirectives.transferToInterpreterAndInvalidate();
|
||||
var ctx = EpbContext.get(this);
|
||||
var src = Source.newBuilder("python", """
|
||||
var src =
|
||||
Source.newBuilder(
|
||||
"python",
|
||||
"""
|
||||
import site
|
||||
def nothing():
|
||||
return None
|
||||
nothing
|
||||
""", "nothing.py").build();
|
||||
""",
|
||||
"nothing.py")
|
||||
.build();
|
||||
var nothingFn = ctx.getEnv().parsePublic(src).call();
|
||||
assert InteropLibrary.getUncached().isExecutable(nothingFn);
|
||||
none = InteropLibrary.getUncached().execute(nothingFn);
|
||||
@ -100,41 +98,58 @@ final class PyForeignNode extends GenericForeignNode {
|
||||
return none;
|
||||
}
|
||||
|
||||
private Object wrapPythonDate(LocalDate date) throws UnsupportedTypeException, ArityException, UnsupportedMessageException {
|
||||
private Object wrapPythonDate(LocalDate date)
|
||||
throws UnsupportedTypeException, ArityException, UnsupportedMessageException {
|
||||
if (nodePythonDate == null) {
|
||||
CompilerDirectives.transferToInterpreterAndInvalidate();
|
||||
var ctx = EpbContext.get(this);
|
||||
var src = Source.newBuilder("python", """
|
||||
var src =
|
||||
Source.newBuilder(
|
||||
"python",
|
||||
"""
|
||||
from datetime import date
|
||||
date
|
||||
""", "convert_date.py").build();
|
||||
""",
|
||||
"convert_date.py")
|
||||
.build();
|
||||
|
||||
fnPythonDate = ctx.getEnv().parsePublic(src).call();
|
||||
nodePythonDate = insert(InteropLibrary.getFactory().create(fnPythonDate));
|
||||
}
|
||||
return nodePythonDate.execute(fnPythonDate, date.getYear(), date.getMonthValue(), date.getDayOfMonth());
|
||||
return nodePythonDate.execute(
|
||||
fnPythonDate, date.getYear(), date.getMonthValue(), date.getDayOfMonth());
|
||||
}
|
||||
|
||||
private Object wrapPythonTime(LocalTime time) throws UnsupportedTypeException, ArityException, UnsupportedMessageException {
|
||||
private Object wrapPythonTime(LocalTime time)
|
||||
throws UnsupportedTypeException, ArityException, UnsupportedMessageException {
|
||||
if (nodePythonTime == null) {
|
||||
CompilerDirectives.transferToInterpreterAndInvalidate();
|
||||
var ctx = EpbContext.get(this);
|
||||
var src = Source.newBuilder("python", """
|
||||
var src =
|
||||
Source.newBuilder(
|
||||
"python",
|
||||
"""
|
||||
from datetime import time
|
||||
time
|
||||
""", "convert_time.py").build();
|
||||
""",
|
||||
"convert_time.py")
|
||||
.build();
|
||||
fnPythonTime = ctx.getEnv().parsePublic(src).call();
|
||||
nodePythonTime = insert(InteropLibrary.getFactory().create(fnPythonTime));
|
||||
}
|
||||
return nodePythonTime.execute(fnPythonTime, time.getHour(), time.getMinute(), time.getSecond(), time.getNano() / 1000);
|
||||
return nodePythonTime.execute(
|
||||
fnPythonTime, time.getHour(), time.getMinute(), time.getSecond(), time.getNano() / 1000);
|
||||
}
|
||||
|
||||
private Object wrapPythonZone(ZoneId zone, LocalTime time, LocalDate date)
|
||||
throws UnsupportedTypeException, ArityException, UnsupportedMessageException {
|
||||
throws UnsupportedTypeException, ArityException, UnsupportedMessageException {
|
||||
var ctx = EpbContext.get(this);
|
||||
if (nodePythonZone == null) {
|
||||
CompilerDirectives.transferToInterpreterAndInvalidate();
|
||||
var src = Source.newBuilder("python", """
|
||||
var src =
|
||||
Source.newBuilder(
|
||||
"python",
|
||||
"""
|
||||
from datetime import timezone, timedelta, tzinfo
|
||||
|
||||
class EnsoTzInfo(tzinfo):
|
||||
@ -156,7 +171,9 @@ final class PyForeignNode extends GenericForeignNode {
|
||||
return EnsoTzInfo(rules)
|
||||
|
||||
conv
|
||||
""", "convert_time_zone.py").build();
|
||||
""",
|
||||
"convert_time_zone.py")
|
||||
.build();
|
||||
|
||||
fnPythonZone = ctx.getEnv().parsePublic(src).call();
|
||||
nodePythonZone = insert(InteropLibrary.getFactory().create(fnPythonZone));
|
||||
@ -165,14 +182,19 @@ final class PyForeignNode extends GenericForeignNode {
|
||||
}
|
||||
|
||||
private Object combinePythonDateTimeZone(Object date, Object time, Object zone)
|
||||
throws UnsupportedTypeException, ArityException, UnsupportedMessageException {
|
||||
throws UnsupportedTypeException, ArityException, UnsupportedMessageException {
|
||||
if (nodePythonCombine == null) {
|
||||
CompilerDirectives.transferToInterpreterAndInvalidate();
|
||||
var ctx = EpbContext.get(this);
|
||||
var src = Source.newBuilder("python", """
|
||||
var src =
|
||||
Source.newBuilder(
|
||||
"python",
|
||||
"""
|
||||
from datetime import datetime
|
||||
datetime.combine
|
||||
""", "convert_combine.py").build();
|
||||
""",
|
||||
"convert_combine.py")
|
||||
.build();
|
||||
|
||||
fnPythonCombine = ctx.getEnv().parsePublic(src).call();
|
||||
nodePythonCombine = insert(InteropLibrary.getFactory().create(fnPythonCombine));
|
||||
@ -202,10 +224,8 @@ final class PyForeignNode extends GenericForeignNode {
|
||||
@ExportMessage
|
||||
boolean isMemberInvocable(String member) {
|
||||
return switch (member) {
|
||||
case "dst", "name", "offset" ->
|
||||
true;
|
||||
default ->
|
||||
false;
|
||||
case "dst", "name", "offset" -> true;
|
||||
default -> false;
|
||||
};
|
||||
}
|
||||
|
||||
@ -216,10 +236,8 @@ final class PyForeignNode extends GenericForeignNode {
|
||||
|
||||
@ExportMessage
|
||||
@CompilerDirectives.TruffleBoundary
|
||||
Object invokeMember(
|
||||
String name, Object[] args,
|
||||
@CachedLibrary(limit = "3") InteropLibrary iop
|
||||
) throws UnknownIdentifierException, UnsupportedMessageException {
|
||||
Object invokeMember(String name, Object[] args, @CachedLibrary(limit = "3") InteropLibrary iop)
|
||||
throws UnknownIdentifierException, UnsupportedMessageException {
|
||||
var date = iop.asDate(args[0]);
|
||||
var time = iop.asTime(args[0]);
|
||||
var when = date.atTime(time);
|
||||
@ -231,10 +249,8 @@ final class PyForeignNode extends GenericForeignNode {
|
||||
yield now.getTotalSeconds() - std.getTotalSeconds();
|
||||
}
|
||||
case "name" -> zone.getId();
|
||||
case "offset" ->
|
||||
zone.getRules().getOffset(when).getTotalSeconds();
|
||||
default ->
|
||||
throw UnknownIdentifierException.create(name);
|
||||
case "offset" -> zone.getRules().getOffset(when).getTotalSeconds();
|
||||
default -> throw UnknownIdentifierException.create(name);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -1,16 +1,14 @@
|
||||
package org.enso.interpreter.epb;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import com.oracle.truffle.api.source.Source;
|
||||
import java.util.Collections;
|
||||
import org.junit.Test;
|
||||
|
||||
public class ForeignEvalNodeTest {
|
||||
|
||||
public ForeignEvalNodeTest() {
|
||||
}
|
||||
public ForeignEvalNodeTest() {}
|
||||
|
||||
@Test
|
||||
public void sourceWithoutHash() throws Exception {
|
||||
|
@ -5,41 +5,44 @@ import scala.Option;
|
||||
|
||||
public record IdentifiedLocation(Location location, UUID uuid) {
|
||||
public IdentifiedLocation(Location location) {
|
||||
this(location, (UUID)null);
|
||||
this(location, (UUID) null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new location from an optional UUID.
|
||||
*/
|
||||
/** Creates new location from an optional UUID. */
|
||||
public static IdentifiedLocation create(Location location, Option<UUID> uuid) {
|
||||
return new IdentifiedLocation(location, uuid.isEmpty() ? null : uuid.get());
|
||||
}
|
||||
|
||||
/** @return the character index of the start of this source location.
|
||||
*/
|
||||
/**
|
||||
* @return the character index of the start of this source location.
|
||||
*/
|
||||
public int start() {
|
||||
return location().start();
|
||||
}
|
||||
|
||||
/** @return the character index of the end of this source location.
|
||||
*/
|
||||
/**
|
||||
* @return the character index of the end of this source location.
|
||||
*/
|
||||
public int end() {
|
||||
return location().end();
|
||||
}
|
||||
|
||||
/** @return the length in characters of this location.
|
||||
*/
|
||||
/**
|
||||
* @return the length in characters of this location.
|
||||
*/
|
||||
public int length() {
|
||||
return location().length();
|
||||
}
|
||||
|
||||
/** @return option with/out UUID */
|
||||
/**
|
||||
* @return option with/out UUID
|
||||
*/
|
||||
public Option<UUID> id() {
|
||||
return Option.apply(uuid());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "IdentifiedLocation[location=" + this.location() + ", uuid="+ id() + "]";
|
||||
return "IdentifiedLocation[location=" + this.location() + ", uuid=" + id() + "]";
|
||||
}
|
||||
}
|
||||
|
@ -7,11 +7,9 @@ import java.util.LinkedHashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import org.enso.persist.Persistance;
|
||||
import org.enso.persist.Persistance.Reference;
|
||||
|
||||
|
||||
final class IrLazyMap<K, V> extends AbstractMap<K, V> {
|
||||
private final Map<K, Entry<K, V>> delegate;
|
||||
|
||||
|
@ -2,7 +2,7 @@ package org.enso.compiler.core.ir;
|
||||
|
||||
import org.enso.persist.Persistable;
|
||||
|
||||
@Persistable(clazz=Location.class, id=1)
|
||||
@Persistable(clazz = Location.class, id = 1)
|
||||
public record Location(int start, int end) {
|
||||
public int length() {
|
||||
return end - start;
|
||||
|
@ -10,13 +10,10 @@ import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.enso.compiler.core.CompilerStub;
|
||||
|
||||
import scala.Option;
|
||||
|
||||
/** Stores metadata for the various passes.
|
||||
*/
|
||||
/** Stores metadata for the various passes. */
|
||||
public final class MetadataStorage {
|
||||
private Map<ProcessingPass, ProcessingPass.Metadata> metadata;
|
||||
|
||||
@ -28,37 +25,40 @@ public final class MetadataStorage {
|
||||
this.metadata = init;
|
||||
}
|
||||
|
||||
/** Adds a new metadata entity to the pass metadata, or updates it if it
|
||||
* already exists for a given pass.
|
||||
*
|
||||
* @param pass the pass to add the metadata for
|
||||
* @param newMeta the metadata to add for `pass`
|
||||
* @tparam K the concrete type of `pass`
|
||||
*/
|
||||
/**
|
||||
* Adds a new metadata entity to the pass metadata, or updates it if it already exists for a given
|
||||
* pass.
|
||||
*
|
||||
* @param pass the pass to add the metadata for
|
||||
* @param newMeta the metadata to add for `pass`
|
||||
* @tparam K the concrete type of `pass`
|
||||
*/
|
||||
public void update(ProcessingPass pass, ProcessingPass.Metadata newMeta) {
|
||||
var copy = copyMetaMap();
|
||||
copy.put(pass, newMeta);
|
||||
metadata = copy;
|
||||
}
|
||||
|
||||
/** Adds a metadata pair to the node metadata.
|
||||
*
|
||||
* This will overwrite any entry whose key matches [[MetadataPair#pass]].
|
||||
*
|
||||
* @param <K> the concrete type of the pass
|
||||
* @param metadataPair the pair to add to the storage
|
||||
*/
|
||||
/**
|
||||
* Adds a metadata pair to the node metadata.
|
||||
*
|
||||
* <p>This will overwrite any entry whose key matches [[MetadataPair#pass]].
|
||||
*
|
||||
* @param <K> the concrete type of the pass
|
||||
* @param metadataPair the pair to add to the storage
|
||||
*/
|
||||
public <K extends ProcessingPass> void update(MetadataPair<K> metadataPair) {
|
||||
update(metadataPair.pass(), metadataPair.metadata());
|
||||
}
|
||||
|
||||
/** Removes the metadata for the specified pass from the list.
|
||||
*
|
||||
* @param pass the pass to remove metadata for
|
||||
* @tparam K the concrete type of `pass`
|
||||
* @return the removed metadata for that pass, if it exists
|
||||
*/
|
||||
public Option<ProcessingPass.Metadata> remove(ProcessingPass pass) {
|
||||
/**
|
||||
* Removes the metadata for the specified pass from the list.
|
||||
*
|
||||
* @param pass the pass to remove metadata for
|
||||
* @tparam K the concrete type of `pass`
|
||||
* @return the removed metadata for that pass, if it exists
|
||||
*/
|
||||
public Option<ProcessingPass.Metadata> remove(ProcessingPass pass) {
|
||||
var prev = metadata.get(pass);
|
||||
if (prev == null) {
|
||||
return Option.empty();
|
||||
@ -68,23 +68,25 @@ public final class MetadataStorage {
|
||||
metadata = copy;
|
||||
return Option.apply(prev);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Gets the metadata for the specified pass.
|
||||
*
|
||||
* @param pass the pass to get the metadata for
|
||||
* @tparam K the concrete type of `pass`
|
||||
* @return the metadata for `pass`, if it exists
|
||||
*/
|
||||
/**
|
||||
* Gets the metadata for the specified pass.
|
||||
*
|
||||
* @param pass the pass to get the metadata for
|
||||
* @tparam K the concrete type of `pass`
|
||||
* @return the metadata for `pass`, if it exists
|
||||
*/
|
||||
public Option<ProcessingPass.Metadata> get(ProcessingPass pass) {
|
||||
var prev = (ProcessingPass.Metadata) metadata.get(pass);
|
||||
return Option.apply(prev);
|
||||
}
|
||||
|
||||
/** Creates a deep copy of `this`.
|
||||
*
|
||||
* @return a deep copy of `this`
|
||||
*/
|
||||
/**
|
||||
* Creates a deep copy of `this`.
|
||||
*
|
||||
* @return a deep copy of `this`
|
||||
*/
|
||||
public MetadataStorage duplicate() {
|
||||
var map = new HashMap<ProcessingPass, ProcessingPass.Metadata>();
|
||||
for (var entry : this.metadata.entrySet()) {
|
||||
@ -99,65 +101,72 @@ public final class MetadataStorage {
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
/** Maps across the stored metadata, transforming it to an output list.
|
||||
*
|
||||
* @param <R> the resulting element of the list
|
||||
* @param fn the function to apply over the metadata
|
||||
* @return a list containing the results of transforming the metadata storage
|
||||
*/
|
||||
/**
|
||||
* Maps across the stored metadata, transforming it to an output list.
|
||||
*
|
||||
* @param <R> the resulting element of the list
|
||||
* @param fn the function to apply over the metadata
|
||||
* @return a list containing the results of transforming the metadata storage
|
||||
*/
|
||||
public <R> List<R> map(BiFunction<ProcessingPass, ProcessingPass.Metadata, R> fn) {
|
||||
return metadata.entrySet().stream().map(
|
||||
(en) -> fn.apply(en.getKey(), en.getValue())
|
||||
).toList();
|
||||
return metadata.entrySet().stream().map((en) -> fn.apply(en.getKey(), en.getValue())).toList();
|
||||
}
|
||||
|
||||
/** Prepares the metadata for serialization.
|
||||
*
|
||||
* This operation takes place _in place_.
|
||||
*
|
||||
* Metadata prepared for serialization should not contain any links that
|
||||
* span more than one module, or any other properties that are problematic
|
||||
* when serialized.
|
||||
*
|
||||
* Due to the type safety properties of
|
||||
* [[org.enso.compiler.core.ir.MetadataStorage]], to allow this conversion
|
||||
* to work it must be type-refined to return `typeof this`. To that end,
|
||||
* there is no default definition for this method.
|
||||
*
|
||||
* @param compiler the Enso compiler
|
||||
*/
|
||||
/**
|
||||
* Prepares the metadata for serialization.
|
||||
*
|
||||
* <p>This operation takes place _in place_.
|
||||
*
|
||||
* <p>Metadata prepared for serialization should not contain any links that span more than one
|
||||
* module, or any other properties that are problematic when serialized.
|
||||
*
|
||||
* <p>Due to the type safety properties of [[org.enso.compiler.core.ir.MetadataStorage]], to allow
|
||||
* this conversion to work it must be type-refined to return `typeof this`. To that end, there is
|
||||
* no default definition for this method.
|
||||
*
|
||||
* @param compiler the Enso compiler
|
||||
*/
|
||||
public final void prepareForSerialization(CompilerStub compiler) {
|
||||
var newMap = metadata.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, (en) -> {
|
||||
var value = en.getValue();
|
||||
var newVal = value.prepareForSerialization(compiler);
|
||||
return newVal;
|
||||
}));
|
||||
var newMap =
|
||||
metadata.entrySet().stream()
|
||||
.collect(
|
||||
Collectors.toMap(
|
||||
Map.Entry::getKey,
|
||||
(en) -> {
|
||||
var value = en.getValue();
|
||||
var newVal = value.prepareForSerialization(compiler);
|
||||
return newVal;
|
||||
}));
|
||||
this.metadata.putAll(newMap);
|
||||
}
|
||||
|
||||
/** Restores metadata after it has been deserialized.
|
||||
*
|
||||
* Due to the type safety properties of
|
||||
* [[org.enso.compiler.core.ir.MetadataStorage]], to allow this conversion
|
||||
* to work it must be type-refined to return `typeof this`. To that end,
|
||||
* there is no default definition for this method.
|
||||
*
|
||||
* @param compiler the Enso compiler
|
||||
* @return `true` if restoration was successful, `false` otherwise
|
||||
*/
|
||||
/**
|
||||
* Restores metadata after it has been deserialized.
|
||||
*
|
||||
* <p>Due to the type safety properties of [[org.enso.compiler.core.ir.MetadataStorage]], to allow
|
||||
* this conversion to work it must be type-refined to return `typeof this`. To that end, there is
|
||||
* no default definition for this method.
|
||||
*
|
||||
* @param compiler the Enso compiler
|
||||
* @return `true` if restoration was successful, `false` otherwise
|
||||
*/
|
||||
public boolean restoreFromSerialization(CompilerStub compiler) {
|
||||
var ok = new boolean[] { true };
|
||||
var newMap = metadata.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, (en) -> {
|
||||
var value = en.getValue();
|
||||
var newOption = value.restoreFromSerialization(compiler);
|
||||
if (newOption.nonEmpty()) {
|
||||
return newOption.get();
|
||||
} else {
|
||||
ok[0] = false;
|
||||
return value;
|
||||
}
|
||||
}));
|
||||
var ok = new boolean[] {true};
|
||||
var newMap =
|
||||
metadata.entrySet().stream()
|
||||
.collect(
|
||||
Collectors.toMap(
|
||||
Map.Entry::getKey,
|
||||
(en) -> {
|
||||
var value = en.getValue();
|
||||
var newOption = value.restoreFromSerialization(compiler);
|
||||
if (newOption.nonEmpty()) {
|
||||
return newOption.get();
|
||||
} else {
|
||||
ok[0] = false;
|
||||
return value;
|
||||
}
|
||||
}));
|
||||
this.metadata = newMap;
|
||||
return ok[0];
|
||||
}
|
||||
@ -181,9 +190,10 @@ public final class MetadataStorage {
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private static final Comparator<ProcessingPass> COMPARATOR = (p1, p2) -> {
|
||||
return p1.getClass().getName().compareTo(p2.getClass().getName());
|
||||
};
|
||||
private static final Comparator<ProcessingPass> COMPARATOR =
|
||||
(p1, p2) -> {
|
||||
return p1.getClass().getName().compareTo(p2.getClass().getName());
|
||||
};
|
||||
|
||||
private Map<ProcessingPass, ProcessingPass.Metadata> copyMetaMap() {
|
||||
var copy = new TreeMap<ProcessingPass, ProcessingPass.Metadata>(COMPARATOR);
|
||||
@ -209,6 +219,5 @@ public final class MetadataStorage {
|
||||
return false;
|
||||
}
|
||||
|
||||
public record MetadataPair<K extends ProcessingPass> (K pass, ProcessingPass.Metadata metadata) {
|
||||
}
|
||||
public record MetadataPair<K extends ProcessingPass>(K pass, ProcessingPass.Metadata metadata) {}
|
||||
}
|
||||
|
@ -1,17 +1,17 @@
|
||||
package org.enso.compiler.core;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.enso.compiler.core.ir.Module;
|
||||
import org.junit.AfterClass;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
@ -29,8 +29,7 @@ public class EnsoParserTest {
|
||||
|
||||
@AfterClass
|
||||
public static void closeEnsoParser() throws Exception {
|
||||
if (ensoCompiler != null)
|
||||
ensoCompiler.close();
|
||||
if (ensoCompiler != null) ensoCompiler.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -56,23 +55,31 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testLocationsCorrectAssignmentOfVariableReads() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
main =
|
||||
x = 2 + 2 * 2
|
||||
y = x * x
|
||||
IO.println y
|
||||
""", true, true, true);
|
||||
""",
|
||||
true,
|
||||
true,
|
||||
true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLocationsMethodWithComplexBody() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
foo a b =
|
||||
x : Number
|
||||
x = a + 1
|
||||
y = b - 2
|
||||
x * y
|
||||
""", true, true, true);
|
||||
""",
|
||||
true,
|
||||
true,
|
||||
true);
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -101,12 +108,16 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testLocationsDeeplyNestedFunctionsNoBlock() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
Nothing.method =
|
||||
add = a -> b -> a + b
|
||||
|
||||
main = Nothing.method
|
||||
""", true, true, true);
|
||||
""",
|
||||
true,
|
||||
true,
|
||||
true);
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -119,7 +130,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testCase() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
type Msg
|
||||
Ahoj
|
||||
Ciao
|
||||
@ -128,24 +140,24 @@ public class EnsoParserTest {
|
||||
Ahoj -> 0
|
||||
Ciao -> 1
|
||||
Msg.Ciao -> 2
|
||||
"""
|
||||
);
|
||||
""");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTypeMethodWithSignature() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
@Builtin_Type
|
||||
type Fuzzy
|
||||
== : Correct -> Wrong
|
||||
== self right = @Builtin_Method "Fuzzy.=="
|
||||
"""
|
||||
);
|
||||
""");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testImport() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
import Standard.Base.Any.Any
|
||||
import project.IO
|
||||
import Standard.Base as Enso_List
|
||||
@ -159,7 +171,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testImportAll() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
## TODO Dubious constructor export
|
||||
from project.Network.Http.Version.Version import all
|
||||
from project.Network.Http.Version.Version export all
|
||||
@ -297,7 +310,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testBoolean() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
@Builtin_Type
|
||||
type Boolean
|
||||
True
|
||||
@ -348,7 +362,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testAtEq() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
type Array
|
||||
== : Array -> Boolean
|
||||
== self that =
|
||||
@ -360,7 +375,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testNestedBlocks() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
type Array
|
||||
meaning =
|
||||
catch_primitive handler
|
||||
@ -387,7 +403,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testMetadataRaw() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
main =
|
||||
foo = 42
|
||||
|
||||
@ -408,7 +425,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testColumnSelector() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
## Specifies a selection of columns from the table on which an operation is
|
||||
going to be performed.
|
||||
type Column_Selector
|
||||
@ -419,7 +437,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testAssignments() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
from_java_set java_set =
|
||||
owner = Vector.new_builder
|
||||
group = Vector.new_builder
|
||||
@ -429,7 +448,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testNumberTimes() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
Standard.Base.Number.times : List Any
|
||||
Standard.Base.Number.times self act =
|
||||
act
|
||||
@ -438,7 +458,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testIfThenBlock() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
from_java_set java_set =
|
||||
if java_set.contains PosixFilePermission.OWNER_READ then
|
||||
owner.append Read
|
||||
@ -449,7 +470,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testInvokeFilePermissions() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
from_java_set java_set =
|
||||
File_Permissions owner.to_vector group.to_vector others.to_vector
|
||||
""");
|
||||
@ -514,7 +536,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testTextLiteralWithEscape() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
wrap_junit_testsuites = '<?xml version="1.0"\\tencoding="UTF-8"?>\\n'
|
||||
""");
|
||||
}
|
||||
@ -546,7 +569,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testMultiParameterFunction() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
from Standard.Base import all
|
||||
import Standard.Base.System
|
||||
|
||||
@ -558,7 +582,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testTestGroup() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
type Test
|
||||
## Creates a new test group, describing properties of the object
|
||||
described by `self`.
|
||||
@ -582,7 +607,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testEmptyGroup() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
main =
|
||||
x = Panic.catch Any () .convert_to_dataflow_error
|
||||
x.catch_primitive err->
|
||||
@ -602,7 +628,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testTestGroupSimple() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
group1 : Text -> Any -> (Text | Nothing) -> Nothing
|
||||
|
||||
type Test
|
||||
@ -612,7 +639,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testWildcardLeftHandSide() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
Any.should_succeed self frames_to_skip=0 =
|
||||
_ = frames_to_skip
|
||||
""");
|
||||
@ -628,7 +656,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testReverseList() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
reverse_list list =
|
||||
go = list -> acc -> case list of
|
||||
List.Cons h t -> go t (List.Cons h acc)
|
||||
@ -642,7 +671,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testProblemHandling() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
test_problem_handling : (Problem_Behavior -> Any) -> Vector Any -> (Any -> Nothing) -> Nothing
|
||||
test_problem_handling action expected_problems result_checker =
|
||||
result_checker result_ignoring
|
||||
@ -651,7 +681,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testProblemHandling2() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
test_problem_handling action expected_problems result_checker =
|
||||
error_result . should_fail_with first_problem_type frames_to_skip=3
|
||||
warnings_checker warnings =
|
||||
@ -697,7 +728,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testTableDataArgumentInCase() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
process_to_json_text value =
|
||||
json = case value of
|
||||
Table.Table_Data _ -> json_from_table value
|
||||
@ -707,7 +739,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testVisualizationCaseOf() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
prepare_visualization : Any -> Integer -> Json
|
||||
prepare_visualization x max_rows=1000 = case x of
|
||||
Array ->
|
||||
@ -721,7 +754,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testAggregateColumnGroupByTrue() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
prepare_aggregate_columns : [Aggregate_Column] -> Table -> Problem_Behavior -> Resolved_Aggregate_Columns
|
||||
prepare_aggregate_columns aggregates table =
|
||||
# Grouping Key
|
||||
@ -762,7 +796,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testTypeSignature() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
resolve_aggregate table problem_builder aggregate_column =
|
||||
table_columns = table.columns
|
||||
|
||||
@ -797,7 +832,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testSelfTypeKeyword() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
type My_Type
|
||||
Cons_A x
|
||||
Cons_B y=(Self.Cons_A 10)
|
||||
@ -820,8 +856,6 @@ public class EnsoParserTest {
|
||||
""");
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testCaseOnTextLiteral() throws Exception {
|
||||
parseTest("""
|
||||
@ -874,7 +908,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testVectorVectorAny() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
type Vector
|
||||
build : Standard.Base.Vector.Matrix Standard.Base.Any Standard.Base.Float
|
||||
""");
|
||||
@ -890,7 +925,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testOperatorSectionRight() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
type Filter_Condition
|
||||
to_predicate self = case self of
|
||||
Less value -> <value
|
||||
@ -930,7 +966,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testListBody() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
list directory name_filter=Nothing recursive=False =
|
||||
new directory . list name_filter=name_filter recursive=recursive
|
||||
""");
|
||||
@ -947,7 +984,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testCaseWithComment() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
ansi_bold : Boolean -> Text -> Text
|
||||
ansi_bold enabled txt =
|
||||
case Platform.os of
|
||||
@ -967,7 +1005,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testGroupOfPatterns() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
sum self = case self of
|
||||
Group (A _) (B _ _) (C _ e _) (D _ f _ g) -> e + f + g
|
||||
""");
|
||||
@ -1034,7 +1073,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testConstructorMultipleNamedArgs1() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
x = Regex_Matcher.Regex_Matcher_Data case_sensitivity=Case_Sensitivity.Sensitive dot_matches_newline=True
|
||||
""");
|
||||
}
|
||||
@ -1064,7 +1104,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testRuntimeServerTestCode() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
from Standard.Base.Data.Numbers import Number
|
||||
|
||||
main =
|
||||
@ -1077,7 +1118,10 @@ public class EnsoParserTest {
|
||||
y = self + 3
|
||||
z = y * x
|
||||
z
|
||||
""", true, true, true);
|
||||
""",
|
||||
true,
|
||||
true,
|
||||
true);
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -1098,7 +1142,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testAtomBenchmarks1() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
import Standard.Base.Data.List.List
|
||||
|
||||
main =
|
||||
@ -1110,7 +1155,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testAtomBenchmarks3() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
import Standard.Base.Data.List.List
|
||||
|
||||
List.mapReverse self f acc = case self of
|
||||
@ -1120,12 +1166,16 @@ public class EnsoParserTest {
|
||||
main = list ->
|
||||
res = list.mapReverse (x -> x + 1) List.Nil
|
||||
res
|
||||
""", true, true, false);
|
||||
""",
|
||||
true,
|
||||
true,
|
||||
false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldQuoteValuesContainingTheCommentSymbol() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
suite =
|
||||
Test.specify "should quote values containing the comment symbol if comments are enabled" <|
|
||||
format = Delimited ',' . with_comments
|
||||
@ -1138,31 +1188,43 @@ public class EnsoParserTest {
|
||||
text_2 = File.read_text file
|
||||
text_2.should_equal expected_text_2
|
||||
file.delete
|
||||
""", true, true, false);
|
||||
""",
|
||||
true,
|
||||
true,
|
||||
false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyValueBetweenComments() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
expected_text = normalize_lines <| \"""
|
||||
A,B
|
||||
1,
|
||||
,""
|
||||
3,abc
|
||||
""", true, true, false);
|
||||
""",
|
||||
true,
|
||||
true,
|
||||
false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQuotedValues() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
expected_text = normalize_lines <| \"""
|
||||
"one, two, three",-1.5,42,"4\"000",
|
||||
""", true, true, false);
|
||||
""",
|
||||
true,
|
||||
true,
|
||||
false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleTripleQuote() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
expected_response = Json.parse <| '''
|
||||
{
|
||||
"headers": {
|
||||
@ -1183,12 +1245,16 @@ public class EnsoParserTest {
|
||||
json = Json.parse <| '''
|
||||
{"key":"val"}
|
||||
res = Http.new.post_json url_post json
|
||||
""", true, true, false);
|
||||
""",
|
||||
true,
|
||||
true,
|
||||
false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInThePresenceOfComments() throws Exception {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
# this is a comment
|
||||
#this too
|
||||
## But this is a doc.
|
||||
@ -1247,9 +1313,11 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testBlockSyntax() throws Exception {
|
||||
equivalenceTest("""
|
||||
equivalenceTest(
|
||||
"""
|
||||
nums v fm ff n = v . map fm . filter ff . take n
|
||||
""", """
|
||||
""",
|
||||
"""
|
||||
nums v fm ff n = v
|
||||
. map fm
|
||||
. filter ff
|
||||
@ -1259,9 +1327,11 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testBlockSyntaxOperators() throws Exception {
|
||||
equivalenceTest("""
|
||||
equivalenceTest(
|
||||
"""
|
||||
value = nums * each random + constant
|
||||
""", """
|
||||
""",
|
||||
"""
|
||||
value = nums
|
||||
* each random
|
||||
+ constant
|
||||
@ -1270,9 +1340,11 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testBlockSyntaxOperators2() throws Exception {
|
||||
equivalenceTest("""
|
||||
equivalenceTest(
|
||||
"""
|
||||
value = (nums + each random) * constant
|
||||
""", """
|
||||
""",
|
||||
"""
|
||||
value = nums
|
||||
+ each random
|
||||
* constant
|
||||
@ -1281,9 +1353,11 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testBlockSyntaxOperators3() throws Exception {
|
||||
equivalenceTest("""
|
||||
equivalenceTest(
|
||||
"""
|
||||
v = (rect1 . width) . center
|
||||
""", """
|
||||
""",
|
||||
"""
|
||||
v = rect1
|
||||
. width
|
||||
. center
|
||||
@ -1292,9 +1366,11 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testBlockSyntaxOperators4() throws Exception {
|
||||
equivalenceTest("""
|
||||
equivalenceTest(
|
||||
"""
|
||||
v = (rect1 . width 4) . center 3 2
|
||||
""", """
|
||||
""",
|
||||
"""
|
||||
v = rect1
|
||||
. width 4
|
||||
. center 3 2
|
||||
@ -1303,18 +1379,18 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void testPrivateModules() throws Exception {
|
||||
List<String> moduleCodes = List.of(
|
||||
"private",
|
||||
"""
|
||||
List<String> moduleCodes =
|
||||
List.of(
|
||||
"private",
|
||||
"""
|
||||
# Comment
|
||||
private
|
||||
""",
|
||||
"""
|
||||
"""
|
||||
# Comment with empty line
|
||||
|
||||
private
|
||||
"""
|
||||
);
|
||||
""");
|
||||
for (var moduleCode : moduleCodes) {
|
||||
parseTest(moduleCode);
|
||||
var module = compile("private");
|
||||
@ -1326,7 +1402,8 @@ public class EnsoParserTest {
|
||||
|
||||
@Test
|
||||
public void ise_184219679() throws IOException {
|
||||
parseTest("""
|
||||
parseTest(
|
||||
"""
|
||||
from Standard.Base import all
|
||||
|
||||
main =
|
||||
@ -1338,11 +1415,12 @@ public class EnsoParserTest {
|
||||
}
|
||||
|
||||
private static void parseTest(String code) throws IOException {
|
||||
parseTest(code, true, true, true);
|
||||
parseTest(code, true, true, true);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static void parseTest(String code, boolean noIds, boolean noLocations, boolean lessDocs) throws IOException {
|
||||
private static void parseTest(String code, boolean noIds, boolean noLocations, boolean lessDocs)
|
||||
throws IOException {
|
||||
var ir = compile(code);
|
||||
assertNotNull(ir);
|
||||
}
|
||||
@ -1373,8 +1451,18 @@ public class EnsoParserTest {
|
||||
var home = new File(System.getProperty("java.io.tmpdir")).toPath();
|
||||
var file1 = home.resolve(name + ".1");
|
||||
var file2 = home.resolve(name + ".2");
|
||||
Files.writeString(file1, ir1, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
|
||||
Files.writeString(file2, ir2, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
|
||||
Files.writeString(
|
||||
file1,
|
||||
ir1,
|
||||
StandardOpenOption.TRUNCATE_EXISTING,
|
||||
StandardOpenOption.CREATE,
|
||||
StandardOpenOption.WRITE);
|
||||
Files.writeString(
|
||||
file2,
|
||||
ir2,
|
||||
StandardOpenOption.TRUNCATE_EXISTING,
|
||||
StandardOpenOption.CREATE,
|
||||
StandardOpenOption.WRITE);
|
||||
assertEquals(msg, file1, file2);
|
||||
}
|
||||
}
|
||||
@ -1388,18 +1476,17 @@ public class EnsoParserTest {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
/** Takes an {@link IR} and converts it to text representation suitable for
|
||||
* "diffing" while "simplifying" it.
|
||||
/**
|
||||
* Takes an {@link IR} and converts it to text representation suitable for "diffing" while
|
||||
* "simplifying" it.
|
||||
*
|
||||
* @param ir the intermediate representation
|
||||
* @param noIds remove all UUIDs or keep them? Multiple runs usually assign
|
||||
* random/different UUIDs to various IR elements. Removing them is a best
|
||||
* way to make the converted text comparable
|
||||
* @param noLocations locations may slightly differ. Usually off-by-one.
|
||||
* Especially when running old and new parser in parallel - removing them
|
||||
* may be useful
|
||||
* @param lessDocs documentation often isn't an essential part of the IR
|
||||
* one can easily remove it by specifying {@code false}
|
||||
* @param noIds remove all UUIDs or keep them? Multiple runs usually assign random/different UUIDs
|
||||
* to various IR elements. Removing them is a best way to make the converted text comparable
|
||||
* @param noLocations locations may slightly differ. Usually off-by-one. Especially when running
|
||||
* old and new parser in parallel - removing them may be useful
|
||||
* @param lessDocs documentation often isn't an essential part of the IR one can easily remove it
|
||||
* by specifying {@code false}
|
||||
* @return string representation of the IR
|
||||
*/
|
||||
private static String simplifyIR(IR ir, boolean noIds, boolean noLocations, boolean lessDocs) {
|
||||
@ -1408,10 +1495,13 @@ public class EnsoParserTest {
|
||||
}
|
||||
String txt = ir.pretty();
|
||||
if (noIds) {
|
||||
txt = txt.replaceAll("[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]\\-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]\\-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]\\-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]\\-[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]", "_");
|
||||
txt =
|
||||
txt.replaceAll(
|
||||
"[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]\\-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]\\-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]\\-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]\\-[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]",
|
||||
"_");
|
||||
}
|
||||
if (lessDocs) {
|
||||
for (;;) {
|
||||
for (; ; ) {
|
||||
final String pref = "Comment.Documentation(";
|
||||
int at = txt.indexOf(pref);
|
||||
if (at == -1) {
|
||||
@ -1420,7 +1510,7 @@ public class EnsoParserTest {
|
||||
int to = txt.indexOf("location =", at + pref.length());
|
||||
txt = txt.substring(0, at) + "Comment.Doc(" + txt.substring(to);
|
||||
}
|
||||
for (;;) {
|
||||
for (; ; ) {
|
||||
final String pref = "Case.Pattern.Doc(";
|
||||
int at = txt.indexOf(pref);
|
||||
if (at == -1) {
|
||||
@ -1430,7 +1520,7 @@ public class EnsoParserTest {
|
||||
txt = txt.substring(0, at) + "Comment.CaseDoc(" + txt.substring(to);
|
||||
}
|
||||
}
|
||||
for (;;) {
|
||||
for (; ; ) {
|
||||
final String pref = "errors.Syntax(";
|
||||
int at = txt.indexOf(pref);
|
||||
if (at == -1) {
|
||||
@ -1441,5 +1531,4 @@ public class EnsoParserTest {
|
||||
}
|
||||
return txt;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -8,7 +8,6 @@ import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.enso.compiler.core.ir.DiagnosticStorage;
|
||||
import org.enso.compiler.core.ir.IdentifiedLocation;
|
||||
import org.enso.compiler.core.ir.Location;
|
||||
@ -19,7 +18,6 @@ import org.enso.persist.Persistance;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.openide.util.lookup.ServiceProvider;
|
||||
|
||||
import scala.Option;
|
||||
import scala.Tuple2;
|
||||
import scala.collection.immutable.List;
|
||||
@ -72,13 +70,15 @@ public class IrPersistanceTest {
|
||||
public void scalaImmutableMapIsLazy() throws Exception {
|
||||
var s1 = new LazySeq("Hello");
|
||||
var s2 = new LazySeq("World");
|
||||
var in = (scala.collection.immutable.Map) scala.collection.immutable.Map$.MODULE$.empty()
|
||||
.$plus(new Tuple2("Hello", s1))
|
||||
.$plus(new Tuple2("World", s2));
|
||||
var in =
|
||||
(scala.collection.immutable.Map)
|
||||
scala.collection.immutable.Map$.MODULE$
|
||||
.empty()
|
||||
.$plus(new Tuple2("Hello", s1))
|
||||
.$plus(new Tuple2("World", s2));
|
||||
|
||||
LazySeq.forbidden = true;
|
||||
var out = (scala.collection.immutable.Map)
|
||||
serde(scala.collection.immutable.Map.class, in, 64);
|
||||
var out = (scala.collection.immutable.Map) serde(scala.collection.immutable.Map.class, in, 64);
|
||||
|
||||
assertEquals("Two pairs element", 2, out.size());
|
||||
assertEquals("Two keys", 2, out.keySet().size());
|
||||
@ -250,14 +250,16 @@ public class IrPersistanceTest {
|
||||
var plain = Persistance.read(arr, (Function<Object, Object>) null);
|
||||
assertEquals("Remains five", 5, plain.get(Service.class).value());
|
||||
|
||||
var multiOnRead = Persistance.read(arr, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
var multiOnRead =
|
||||
Persistance.read(arr, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
assertEquals("Multiplied on read", 15, multiOnRead.get(Service.class).value());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void writeReplace() throws Exception {
|
||||
var in = new Service(5);
|
||||
var arr = Persistance.write(in, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
var arr =
|
||||
Persistance.write(in, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
|
||||
var plain = Persistance.read(arr, (Function<Object, Object>) null);
|
||||
assertEquals("Multiplied on write", 15, plain.get(Service.class).value());
|
||||
@ -271,14 +273,16 @@ public class IrPersistanceTest {
|
||||
var plain = Persistance.read(arr, (Function<Object, Object>) null);
|
||||
assertEquals("Remains five", 5, plain.get(ServiceSupply.class).supply().value());
|
||||
|
||||
var multiOnRead = Persistance.read(arr, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
var multiOnRead =
|
||||
Persistance.read(arr, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
assertEquals("Multiplied on read", 15, multiOnRead.get(ServiceSupply.class).supply().value());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void writeReplaceInline() throws Exception {
|
||||
var in = new ServiceSupply(new Service(5));
|
||||
var arr = Persistance.write(in, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
var arr =
|
||||
Persistance.write(in, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
|
||||
var plain = Persistance.read(arr, (Function<Object, Object>) null);
|
||||
assertEquals("Multiplied on write", 15, plain.get(ServiceSupply.class).supply().value());
|
||||
@ -293,14 +297,17 @@ public class IrPersistanceTest {
|
||||
assertEquals("Remains five", 5, (int) plain.get(IntegerSupply.class).supply().get());
|
||||
assertEquals("Remains five 2", 5, (int) plain.get(IntegerSupply.class).supply().get());
|
||||
|
||||
var multiOnRead = Persistance.read(arr, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
assertEquals("Multiplied on read", 15, (int) multiOnRead.get(IntegerSupply.class).supply().get());
|
||||
var multiOnRead =
|
||||
Persistance.read(arr, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
assertEquals(
|
||||
"Multiplied on read", 15, (int) multiOnRead.get(IntegerSupply.class).supply().get());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void writeReplaceReference() throws Exception {
|
||||
var in = new IntegerSupply(new Service(5));
|
||||
var arr = Persistance.write(in, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
var arr =
|
||||
Persistance.write(in, (obj) -> obj instanceof Service s ? new Service(s.value() * 3) : obj);
|
||||
|
||||
var plain = Persistance.read(arr, (Function<Object, Object>) null);
|
||||
assertEquals("Multiplied on write", 15, (int) plain.get(IntegerSupply.class).supply().get());
|
||||
@ -414,7 +421,7 @@ public class IrPersistanceTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Persistable(clazz=Service.class, id=432434)
|
||||
@Persistable(clazz = Service.class, id = 432434)
|
||||
public record Service(int value) implements Supplier<Integer> {
|
||||
@Override
|
||||
public Integer get() {
|
||||
@ -422,9 +429,9 @@ public class IrPersistanceTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Persistable(clazz=IntegerSupply.class, id=432435)
|
||||
@Persistable(clazz = IntegerSupply.class, id = 432435)
|
||||
public record IntegerSupply(Supplier<Integer> supply) {}
|
||||
|
||||
@Persistable(clazz=ServiceSupply.class, id=432436)
|
||||
@Persistable(clazz = ServiceSupply.class, id = 432436)
|
||||
public record ServiceSupply(Service supply) {}
|
||||
}
|
||||
|
@ -1,15 +1,15 @@
|
||||
package org.enso.interpreter.test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import org.enso.interpreter.instrument.job.VisualizationResult;
|
||||
import org.enso.polyglot.HostEnsoUtils;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import org.graalvm.polyglot.Context;
|
||||
import org.graalvm.polyglot.PolyglotException;
|
||||
import org.graalvm.polyglot.Value;
|
||||
import org.junit.AfterClass;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
@ -29,7 +29,8 @@ public class FindExceptionMessageTest extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testThrowNPE() {
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
from Standard.Base import Panic
|
||||
polyglot java import java.lang.NullPointerException
|
||||
|
||||
@ -48,7 +49,8 @@ public class FindExceptionMessageTest extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testThrowNPEWithName() {
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
from Standard.Base import Panic
|
||||
polyglot java import java.lang.NullPointerException
|
||||
|
||||
@ -67,7 +69,8 @@ public class FindExceptionMessageTest extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testPanic() {
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
from Standard.Base import Panic
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
@ -87,12 +90,14 @@ public class FindExceptionMessageTest extends TestBase {
|
||||
var msg = HostEnsoUtils.findExceptionMessage(ex);
|
||||
assertEquals(exp, msg);
|
||||
|
||||
executeInContext(ctx, () -> {
|
||||
var guestException = extractHostException(ex);
|
||||
var guestMsg = VisualizationResult.findExceptionMessage(guestException);
|
||||
assertEquals(exp, guestMsg);
|
||||
return null;
|
||||
});
|
||||
executeInContext(
|
||||
ctx,
|
||||
() -> {
|
||||
var guestException = extractHostException(ex);
|
||||
var guestMsg = VisualizationResult.findExceptionMessage(guestException);
|
||||
assertEquals(exp, guestMsg);
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
static Throwable extractHostException(PolyglotException ex) {
|
||||
|
@ -1,5 +1,7 @@
|
||||
package org.enso.interpreter.test;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.instrumentation.EventContext;
|
||||
import com.oracle.truffle.api.instrumentation.ExecutionEventNode;
|
||||
@ -9,10 +11,6 @@ import com.oracle.truffle.api.instrumentation.TruffleInstrument;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
import org.enso.interpreter.node.MethodRootNode;
|
||||
import org.enso.interpreter.node.callable.FunctionCallInstrumentationNode;
|
||||
import org.enso.pkg.QualifiedName;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
@ -20,7 +18,9 @@ import java.util.UUID;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.function.Function;
|
||||
import static org.junit.Assert.fail;
|
||||
import org.enso.interpreter.node.MethodRootNode;
|
||||
import org.enso.interpreter.node.callable.FunctionCallInstrumentationNode;
|
||||
import org.enso.pkg.QualifiedName;
|
||||
|
||||
/** Testing instrument to control newly created nodes. */
|
||||
@TruffleInstrument.Registration(
|
||||
@ -48,8 +48,8 @@ public class NodeCountingTestInstrument extends TruffleInstrument {
|
||||
|
||||
public void enable(SourceSectionFilter filter) {
|
||||
this.env
|
||||
.getInstrumenter()
|
||||
.attachExecutionEventFactory(filter, new CountingAndFunctionCallFactory());
|
||||
.getInstrumenter()
|
||||
.attachExecutionEventFactory(filter, new CountingAndFunctionCallFactory());
|
||||
}
|
||||
|
||||
public Map<UUID, FunctionCallInfo> registeredCalls() {
|
||||
@ -121,17 +121,17 @@ public class NodeCountingTestInstrument extends TruffleInstrument {
|
||||
public void onReturnValue(VirtualFrame frame, Object result) {
|
||||
Node node = context.getInstrumentedNode();
|
||||
if (node instanceof FunctionCallInstrumentationNode instrumentableNode
|
||||
&& result instanceof FunctionCallInstrumentationNode.FunctionCall functionCall) {
|
||||
&& result instanceof FunctionCallInstrumentationNode.FunctionCall functionCall) {
|
||||
onFunctionReturn(instrumentableNode, functionCall);
|
||||
}
|
||||
}
|
||||
|
||||
private void onFunctionReturn(FunctionCallInstrumentationNode node, FunctionCallInstrumentationNode.FunctionCall result) {
|
||||
private void onFunctionReturn(
|
||||
FunctionCallInstrumentationNode node, FunctionCallInstrumentationNode.FunctionCall result) {
|
||||
if (node.getId() != null) {
|
||||
calls.put(node.getId(), new FunctionCallInfo(result));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class FunctionCallInfo {
|
||||
@ -163,8 +163,8 @@ public class NodeCountingTestInstrument extends TruffleInstrument {
|
||||
}
|
||||
FunctionCallInfo that = (FunctionCallInfo) o;
|
||||
return Objects.equals(moduleName, that.moduleName)
|
||||
&& Objects.equals(typeName, that.typeName)
|
||||
&& Objects.equals(functionName, that.functionName);
|
||||
&& Objects.equals(typeName, that.typeName)
|
||||
&& Objects.equals(functionName, that.functionName);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1,14 +1,18 @@
|
||||
package org.enso.interpreter.test.instrument;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import com.oracle.truffle.api.instrumentation.InstrumentableNode;
|
||||
import com.oracle.truffle.api.instrumentation.StandardTags;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import org.enso.interpreter.node.ClosureRootNode;
|
||||
import org.enso.interpreter.runtime.tag.AvoidIdInstrumentationTag;
|
||||
import org.enso.interpreter.runtime.tag.IdentifiedTag;
|
||||
@ -20,10 +24,6 @@ import org.graalvm.polyglot.Source;
|
||||
import org.graalvm.polyglot.io.IOAccess;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
@ -34,27 +34,28 @@ public class AvoidIdInstrumentationTagTest {
|
||||
|
||||
@Before
|
||||
public void initContext() {
|
||||
context = Context.newBuilder()
|
||||
.allowExperimentalOptions(true)
|
||||
.option(
|
||||
RuntimeOptions.LANGUAGE_HOME_OVERRIDE,
|
||||
Paths.get("../../distribution/component").toFile().getAbsolutePath()
|
||||
)
|
||||
.option(
|
||||
RuntimeOptions.LOG_LEVEL,
|
||||
Level.WARNING.getName()
|
||||
)
|
||||
.logHandler(System.err)
|
||||
.allowExperimentalOptions(true)
|
||||
.allowIO(IOAccess.ALL)
|
||||
.allowAllAccess(true)
|
||||
.build();
|
||||
context =
|
||||
Context.newBuilder()
|
||||
.allowExperimentalOptions(true)
|
||||
.option(
|
||||
RuntimeOptions.LANGUAGE_HOME_OVERRIDE,
|
||||
Paths.get("../../distribution/component").toFile().getAbsolutePath())
|
||||
.option(RuntimeOptions.LOG_LEVEL, Level.WARNING.getName())
|
||||
.logHandler(System.err)
|
||||
.allowExperimentalOptions(true)
|
||||
.allowIO(IOAccess.ALL)
|
||||
.allowAllAccess(true)
|
||||
.build();
|
||||
|
||||
var engine = context.getEngine();
|
||||
Map<String, Language> langs = engine.getLanguages();
|
||||
Assert.assertNotNull("Enso found: " + langs, langs.get("enso"));
|
||||
|
||||
nodes = engine.getInstruments().get(NodeCountingTestInstrument.INSTRUMENT_ID).lookup(NodeCountingTestInstrument.class);
|
||||
nodes =
|
||||
engine
|
||||
.getInstruments()
|
||||
.get(NodeCountingTestInstrument.INSTRUMENT_ID)
|
||||
.lookup(NodeCountingTestInstrument.class);
|
||||
nodes.enable();
|
||||
}
|
||||
|
||||
@ -65,7 +66,8 @@ public class AvoidIdInstrumentationTagTest {
|
||||
|
||||
@Test
|
||||
public void avoidIdInstrumentationInLambdaMapFunctionWithFloor() throws Exception {
|
||||
var code = """
|
||||
var code =
|
||||
"""
|
||||
from Standard.Base import all
|
||||
import Standard.Visualization
|
||||
|
||||
@ -77,18 +79,21 @@ public class AvoidIdInstrumentationTagTest {
|
||||
var res = run.execute(10000);
|
||||
assertEquals("Array of the requested size computed", 10000, res.getArraySize());
|
||||
|
||||
Predicate<SourceSection> isLambda = (ss) -> {
|
||||
var sameSrc = ss.getSource().getCharacters().toString().equals(src.getCharacters().toString());
|
||||
var st = ss.getCharacters().toString();
|
||||
return sameSrc && st.contains("floor") && !st.contains("map");
|
||||
};
|
||||
Predicate<SourceSection> isLambda =
|
||||
(ss) -> {
|
||||
var sameSrc =
|
||||
ss.getSource().getCharacters().toString().equals(src.getCharacters().toString());
|
||||
var st = ss.getCharacters().toString();
|
||||
return sameSrc && st.contains("floor") && !st.contains("map");
|
||||
};
|
||||
|
||||
assertAvoidIdInstrumentationTag(isLambda);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void avoidIdInstrumentationInLambdaMapFunctionYear2010() throws Exception {
|
||||
var code = """
|
||||
var code =
|
||||
"""
|
||||
from Standard.Base import all
|
||||
|
||||
operator13 = [ 1973, 1975, 2005, 2006 ]
|
||||
@ -104,18 +109,21 @@ public class AvoidIdInstrumentationTagTest {
|
||||
assertEquals("Size is 2", 2, element.getArraySize());
|
||||
}
|
||||
|
||||
Predicate<SourceSection> isLambda = (ss) -> {
|
||||
var sameSrc = ss.getSource().getCharacters().toString().equals(src.getCharacters().toString());
|
||||
var st = ss.getCharacters().toString();
|
||||
return sameSrc && st.contains("2010") && !st.contains("map");
|
||||
};
|
||||
Predicate<SourceSection> isLambda =
|
||||
(ss) -> {
|
||||
var sameSrc =
|
||||
ss.getSource().getCharacters().toString().equals(src.getCharacters().toString());
|
||||
var st = ss.getCharacters().toString();
|
||||
return sameSrc && st.contains("2010") && !st.contains("map");
|
||||
};
|
||||
|
||||
assertAvoidIdInstrumentationTag(isLambda);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void avoidIdInstrumentationInMap() throws Exception {
|
||||
var code = """
|
||||
var code =
|
||||
"""
|
||||
from Standard.Base import all
|
||||
|
||||
run =
|
||||
@ -129,16 +137,17 @@ public class AvoidIdInstrumentationTagTest {
|
||||
var res = module.invokeMember("eval_expression", "run");
|
||||
assertEquals("Array of the requested size computed", 2, res.getArraySize());
|
||||
|
||||
Predicate<SourceSection> isLambda = (ss) -> {
|
||||
var sameSrc = ss.getSource().getCharacters().toString().equals(src.getCharacters().toString());
|
||||
var st = ss.getCharacters().toString();
|
||||
return sameSrc && st.contains("Case.Lower") && !st.contains("to_case");
|
||||
};
|
||||
Predicate<SourceSection> isLambda =
|
||||
(ss) -> {
|
||||
var sameSrc =
|
||||
ss.getSource().getCharacters().toString().equals(src.getCharacters().toString());
|
||||
var st = ss.getCharacters().toString();
|
||||
return sameSrc && st.contains("Case.Lower") && !st.contains("to_case");
|
||||
};
|
||||
|
||||
assertAvoidIdInstrumentationTag(isLambda);
|
||||
}
|
||||
|
||||
|
||||
private void assertAvoidIdInstrumentationTag(Predicate<SourceSection> isLambda) {
|
||||
var found = nodes.assertNewNodes("Give me nodes", 0, 10000);
|
||||
var err = new StringBuilder();
|
||||
@ -151,7 +160,11 @@ public class AvoidIdInstrumentationTagTest {
|
||||
continue;
|
||||
}
|
||||
if (isLambda.test(ss)) {
|
||||
err.append("\n").append("code: ").append(ss.getCharacters()).append(" for node ").append(n.getClass().getName());
|
||||
err.append("\n")
|
||||
.append("code: ")
|
||||
.append(ss.getCharacters())
|
||||
.append(" for node ")
|
||||
.append(n.getClass().getName());
|
||||
if (n instanceof InstrumentableNode in) {
|
||||
if (!hasAvoidIdInstrumentationTag(err, in, n.getRootNode())) {
|
||||
missingTagInLambda = true;
|
||||
@ -168,7 +181,8 @@ public class AvoidIdInstrumentationTagTest {
|
||||
assertNotEquals("Found some nodes", 0, count);
|
||||
}
|
||||
|
||||
private boolean hasAvoidIdInstrumentationTag(StringBuilder err, InstrumentableNode in, RootNode rn) {
|
||||
private boolean hasAvoidIdInstrumentationTag(
|
||||
StringBuilder err, InstrumentableNode in, RootNode rn) {
|
||||
var hasAvoidIdInstrumentationTag = in.hasTag(AvoidIdInstrumentationTag.class);
|
||||
if (!hasAvoidIdInstrumentationTag) {
|
||||
err.append("\nERROR!");
|
||||
@ -176,10 +190,14 @@ public class AvoidIdInstrumentationTagTest {
|
||||
|
||||
err.append("\n").append(" AvoidIdInstrumentationTag: ").append(hasAvoidIdInstrumentationTag);
|
||||
err.append("\n").append(" IdentifiedTag: ").append(in.hasTag(IdentifiedTag.class));
|
||||
err.append("\n").append(" ExpressionTag: ").append(in.hasTag(StandardTags.ExpressionTag.class));
|
||||
err.append("\n")
|
||||
.append(" ExpressionTag: ")
|
||||
.append(in.hasTag(StandardTags.ExpressionTag.class));
|
||||
err.append("\n").append(" RootNode: ").append(rn);
|
||||
if (rn instanceof ClosureRootNode crn) {
|
||||
err.append("\n").append(" ClosureRootNode.subject to instr: ").append(crn.isSubjectToInstrumentation());
|
||||
err.append("\n")
|
||||
.append(" ClosureRootNode.subject to instr: ")
|
||||
.append(crn.isSubjectToInstrumentation());
|
||||
err.append("\n").append(" ClosureRootNode.used in bindings: ").append(crn.isUsedInBinding());
|
||||
}
|
||||
return hasAvoidIdInstrumentationTag;
|
||||
|
@ -1,5 +1,9 @@
|
||||
package org.enso.interpreter.test.instrument;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
@ -20,20 +24,16 @@ import org.enso.polyglot.runtime.Runtime$Api$ExpressionUpdates;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$InitializedNotification;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$MethodCall;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$MethodPointer;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$OpenFileRequest;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$OpenFileResponse$;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$PushContextRequest;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$PushContextResponse;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$Request;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$Response;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$SetExpressionValueNotification;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$OpenFileRequest;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$OpenFileResponse$;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$StackItem$ExplicitCall;
|
||||
import org.enso.polyglot.runtime.Runtime$Api$StackItem$LocalCall;
|
||||
import org.enso.text.editing.model;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@ -72,7 +72,8 @@ public class IncrementalUpdatesTest {
|
||||
|
||||
@Test
|
||||
public void sendUpdatesWhenFunctionBodyIsChanged() {
|
||||
sendUpdatesWhenFunctionBodyIsChangedBySettingValue("4", ConstantsGen.INTEGER, "4", "5", "5", LiteralNode.class);
|
||||
sendUpdatesWhenFunctionBodyIsChangedBySettingValue(
|
||||
"4", ConstantsGen.INTEGER, "4", "5", "5", LiteralNode.class);
|
||||
var m = context.languageContext().findModule(MODULE_NAME).orElse(null);
|
||||
assertNotNull("Module found", m);
|
||||
var numbers = m.getIr().preorder().filter((v1) -> v1 instanceof Literal.Number);
|
||||
@ -84,17 +85,20 @@ public class IncrementalUpdatesTest {
|
||||
|
||||
@Test
|
||||
public void sendUpdatesWhenWhenLineIsChangedBySettingValue() {
|
||||
sendUpdatesWhenFunctionBodyIsChangedBySettingValue("4", ConstantsGen.INTEGER, "4", "1000", "1000", LiteralNode.class);
|
||||
sendUpdatesWhenFunctionBodyIsChangedBySettingValue(
|
||||
"4", ConstantsGen.INTEGER, "4", "1000", "1000", LiteralNode.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sendUpdatesWhenWhenLineIsChangedByTextEdit() {
|
||||
sendUpdatesWhenFunctionBodyIsChangedByTextEdit("4", ConstantsGen.INTEGER, "4", "1000", "1000", LiteralNode.class);
|
||||
sendUpdatesWhenFunctionBodyIsChangedByTextEdit(
|
||||
"4", ConstantsGen.INTEGER, "4", "1000", "1000", LiteralNode.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sendMultipleUpdates() {
|
||||
sendUpdatesWhenFunctionBodyIsChangedBySettingValue("4", ConstantsGen.INTEGER, "4", "1000", "1000", LiteralNode.class);
|
||||
sendUpdatesWhenFunctionBodyIsChangedBySettingValue(
|
||||
"4", ConstantsGen.INTEGER, "4", "1000", "1000", LiteralNode.class);
|
||||
sendExpressionValue("1000", "333");
|
||||
assertEquals(List.newBuilder().addOne("333"), context.consumeOut());
|
||||
nodeCountingInstrument.assertNewNodes("No execution on 333, no nodes yet", 0, 0);
|
||||
@ -105,30 +109,38 @@ public class IncrementalUpdatesTest {
|
||||
|
||||
@Test
|
||||
public void sendUpdatesWhenTextIsChangedByTextEdit() {
|
||||
sendUpdatesWhenFunctionBodyIsChangedByTextEdit("\"hi\"", ConstantsGen.TEXT, "hi", "\"text\"", "text", LiteralNode.class);
|
||||
sendUpdatesWhenFunctionBodyIsChangedByTextEdit(
|
||||
"\"hi\"", ConstantsGen.TEXT, "hi", "\"text\"", "text", LiteralNode.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sendUpdatesWhenTextIsChangedBySettingValue() {
|
||||
sendUpdatesWhenFunctionBodyIsChangedBySettingValue("\"hi\"", ConstantsGen.TEXT, "hi", "\"text\"", "text", LiteralNode.class);
|
||||
sendUpdatesWhenFunctionBodyIsChangedBySettingValue(
|
||||
"\"hi\"", ConstantsGen.TEXT, "hi", "\"text\"", "text", LiteralNode.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sendNotANumberChange() {
|
||||
var result = sendUpdatesWhenFunctionBodyIsChangedBySettingValue("4", ConstantsGen.INTEGER, "4", "x", null, LiteralNode.class);
|
||||
assertTrue("Execution succeeds: " + result, result.head().payload() instanceof Runtime$Api$ExecutionComplete);
|
||||
assertEquals("Error is printed as a result",
|
||||
List.newBuilder().addOne("(Error: Uninitialized value)"), context.consumeOut()
|
||||
);
|
||||
var result =
|
||||
sendUpdatesWhenFunctionBodyIsChangedBySettingValue(
|
||||
"4", ConstantsGen.INTEGER, "4", "x", null, LiteralNode.class);
|
||||
assertTrue(
|
||||
"Execution succeeds: " + result,
|
||||
result.head().payload() instanceof Runtime$Api$ExecutionComplete);
|
||||
assertEquals(
|
||||
"Error is printed as a result",
|
||||
List.newBuilder().addOne("(Error: Uninitialized value)"),
|
||||
context.consumeOut());
|
||||
}
|
||||
|
||||
private static String extractPositions(String code, String chars, Map<Character, int[]> beginAndLength) {
|
||||
for (int at = 0; at < code.length();) {
|
||||
private static String extractPositions(
|
||||
String code, String chars, Map<Character, int[]> beginAndLength) {
|
||||
for (int at = 0; at < code.length(); ) {
|
||||
char ch = code.charAt(at);
|
||||
if (chars.indexOf(ch) >= 0) {
|
||||
int[] prev = beginAndLength.get(ch);
|
||||
if (prev == null) {
|
||||
beginAndLength.put(ch, new int[]{at, -1});
|
||||
beginAndLength.put(ch, new int[] {at, -1});
|
||||
} else if (prev[1] == -1) {
|
||||
prev[1] = at - prev[0];
|
||||
} else {
|
||||
@ -145,30 +157,55 @@ public class IncrementalUpdatesTest {
|
||||
}
|
||||
|
||||
private List<Runtime$Api$Response> sendUpdatesWhenFunctionBodyIsChangedByTextEdit(
|
||||
String originalText, String exprType, String originalOutput,
|
||||
String newText, String executionOutput, Class<? extends Node> truffleNodeType
|
||||
) {
|
||||
return sendUpdatesWhenFunctionBodyIsChanged(originalText, exprType, originalOutput, newText, executionOutput, truffleNodeType, this::sendEditFile);
|
||||
String originalText,
|
||||
String exprType,
|
||||
String originalOutput,
|
||||
String newText,
|
||||
String executionOutput,
|
||||
Class<? extends Node> truffleNodeType) {
|
||||
return sendUpdatesWhenFunctionBodyIsChanged(
|
||||
originalText,
|
||||
exprType,
|
||||
originalOutput,
|
||||
newText,
|
||||
executionOutput,
|
||||
truffleNodeType,
|
||||
this::sendEditFile);
|
||||
}
|
||||
|
||||
private List<Runtime$Api$Response> sendUpdatesWhenFunctionBodyIsChangedBySettingValue(
|
||||
String originalText, String exprType, String originalOutput,
|
||||
String newText, String executionOutput, Class<? extends Node> truffleNodeType
|
||||
) {
|
||||
return sendUpdatesWhenFunctionBodyIsChanged(originalText, exprType, originalOutput, newText, executionOutput, truffleNodeType, this::sendExpressionValue);
|
||||
String originalText,
|
||||
String exprType,
|
||||
String originalOutput,
|
||||
String newText,
|
||||
String executionOutput,
|
||||
Class<? extends Node> truffleNodeType) {
|
||||
return sendUpdatesWhenFunctionBodyIsChanged(
|
||||
originalText,
|
||||
exprType,
|
||||
originalOutput,
|
||||
newText,
|
||||
executionOutput,
|
||||
truffleNodeType,
|
||||
this::sendExpressionValue);
|
||||
}
|
||||
|
||||
private List<Runtime$Api$Response> sendUpdatesWhenFunctionBodyIsChanged(
|
||||
String originalText, String exprType, String originalOutput,
|
||||
String newText, String executionOutput, Class<? extends Node> truffleNodeType,
|
||||
java.util.function.BiFunction<String, String, List<Runtime$Api$Response>> sendEdit
|
||||
) {
|
||||
String originalText,
|
||||
String exprType,
|
||||
String originalOutput,
|
||||
String newText,
|
||||
String executionOutput,
|
||||
Class<? extends Node> truffleNodeType,
|
||||
java.util.function.BiFunction<String, String, List<Runtime$Api$Response>> sendEdit) {
|
||||
var contextId = UUID.randomUUID();
|
||||
var requestId = UUID.randomUUID();
|
||||
var metadata = new Metadata();
|
||||
|
||||
var pos = new HashMap<Character, int[]>();
|
||||
var code = extractPositions("""
|
||||
var code =
|
||||
extractPositions(
|
||||
"""
|
||||
import Standard.Base.IO
|
||||
|
||||
&$foo$ =
|
||||
@ -178,13 +215,16 @@ public class IncrementalUpdatesTest {
|
||||
main =
|
||||
y = @foo@
|
||||
%IO.println y%
|
||||
""".replace("{originalText}", originalText),
|
||||
"&$#*@%", pos);
|
||||
"""
|
||||
.replace("{originalText}", originalText),
|
||||
"&$#*@%",
|
||||
pos);
|
||||
|
||||
Function<Character, UUID> registerRegion = (ch) -> {
|
||||
int[] beginAndLength = pos.get(ch);
|
||||
return metadata.addItem(beginAndLength[0], beginAndLength[1], null);
|
||||
};
|
||||
Function<Character, UUID> registerRegion =
|
||||
(ch) -> {
|
||||
int[] beginAndLength = pos.get(ch);
|
||||
return metadata.addItem(beginAndLength[0], beginAndLength[1], null);
|
||||
};
|
||||
// foo definition
|
||||
registerRegion.apply('&');
|
||||
// foo name
|
||||
@ -203,62 +243,61 @@ public class IncrementalUpdatesTest {
|
||||
var request = Request(requestId, new Runtime$Api$CreateContextRequest(contextId));
|
||||
context.send(request);
|
||||
var response = context.receive().get();
|
||||
assertEquals(response,
|
||||
Response(requestId, new Runtime$Api$CreateContextResponse(contextId))
|
||||
);
|
||||
assertEquals(response, Response(requestId, new Runtime$Api$CreateContextResponse(contextId)));
|
||||
// Open the new file
|
||||
context.send(
|
||||
Request(requestId, new Runtime$Api$OpenFileRequest(mainFile, contents))
|
||||
);
|
||||
context.send(Request(requestId, new Runtime$Api$OpenFileRequest(mainFile, contents)));
|
||||
response = context.receive().get();
|
||||
assertEquals(response,
|
||||
Response(requestId, Runtime$Api$OpenFileResponse$.MODULE$)
|
||||
);
|
||||
assertEquals(response, Response(requestId, Runtime$Api$OpenFileResponse$.MODULE$));
|
||||
|
||||
nodeCountingInstrument.assertNewNodes("No execution, no nodes yet", 0, 0);
|
||||
|
||||
context.send(
|
||||
Request(
|
||||
requestId,
|
||||
new Runtime$Api$PushContextRequest(
|
||||
contextId,
|
||||
new Runtime$Api$StackItem$ExplicitCall(
|
||||
new Runtime$Api$MethodPointer(MODULE_NAME, "Enso_Test.Test.Main", "main"),
|
||||
None(),
|
||||
new Vector1<>(new String[]{"0"})
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
Request(
|
||||
requestId,
|
||||
new Runtime$Api$PushContextRequest(
|
||||
contextId,
|
||||
new Runtime$Api$StackItem$ExplicitCall(
|
||||
new Runtime$Api$MethodPointer(MODULE_NAME, "Enso_Test.Test.Main", "main"),
|
||||
None(),
|
||||
new Vector1<>(new String[] {"0"})))));
|
||||
|
||||
assertSameElements(context.receiveNIgnorePendingExpressionUpdates(4, 10, emptySet()),
|
||||
Response(requestId, new Runtime$Api$PushContextResponse(contextId)),
|
||||
TestMessages.update(contextId, mainFoo, exprType, new Runtime$Api$MethodCall(new Runtime$Api$MethodPointer("Enso_Test.Test.Main", "Enso_Test.Test.Main", "foo"), Vector$.MODULE$.empty())),
|
||||
TestMessages.update(contextId, mainRes, ConstantsGen.NOTHING),
|
||||
context.executionComplete(contextId)
|
||||
);
|
||||
assertSameElements(
|
||||
context.receiveNIgnorePendingExpressionUpdates(4, 10, emptySet()),
|
||||
Response(requestId, new Runtime$Api$PushContextResponse(contextId)),
|
||||
TestMessages.update(
|
||||
contextId,
|
||||
mainFoo,
|
||||
exprType,
|
||||
new Runtime$Api$MethodCall(
|
||||
new Runtime$Api$MethodPointer("Enso_Test.Test.Main", "Enso_Test.Test.Main", "foo"),
|
||||
Vector$.MODULE$.empty())),
|
||||
TestMessages.update(contextId, mainRes, ConstantsGen.NOTHING),
|
||||
context.executionComplete(contextId));
|
||||
assertEquals(List.newBuilder().addOne(originalOutput), context.consumeOut());
|
||||
|
||||
var allNodesAfterException = nodeCountingInstrument.assertNewNodes("Execution creates some nodes", 20, 35);
|
||||
var allNodesAfterException =
|
||||
nodeCountingInstrument.assertNewNodes("Execution creates some nodes", 20, 35);
|
||||
|
||||
// push foo call
|
||||
context.send(
|
||||
Request(
|
||||
requestId,
|
||||
new Runtime$Api$PushContextRequest(contextId, new Runtime$Api$StackItem$LocalCall(mainFoo))
|
||||
)
|
||||
);
|
||||
assertSameElements(context.receiveNIgnorePendingExpressionUpdates(4, 10, emptySet()),
|
||||
Response(requestId, new Runtime$Api$PushContextResponse(contextId)),
|
||||
TestMessages.update(contextId, fooX, exprType),
|
||||
TestMessages.update(contextId, fooRes, exprType),
|
||||
context.executionComplete(contextId)
|
||||
);
|
||||
Request(
|
||||
requestId,
|
||||
new Runtime$Api$PushContextRequest(
|
||||
contextId, new Runtime$Api$StackItem$LocalCall(mainFoo))));
|
||||
assertSameElements(
|
||||
context.receiveNIgnorePendingExpressionUpdates(4, 10, emptySet()),
|
||||
Response(requestId, new Runtime$Api$PushContextResponse(contextId)),
|
||||
TestMessages.update(contextId, fooX, exprType),
|
||||
TestMessages.update(contextId, fooRes, exprType),
|
||||
context.executionComplete(contextId));
|
||||
assertEquals(List.newBuilder().addOne(originalOutput), context.consumeOut());
|
||||
|
||||
nodeCountingInstrument.assertNewNodes("No new nodes created", 0, 0);
|
||||
var literalNode = findLiteralNode(truffleNodeType, allNodesAfterException);
|
||||
assertEquals("Check Literal node text in the source", originalText, literalNode.getSourceSection().getCharacters().toString());
|
||||
assertEquals(
|
||||
"Check Literal node text in the source",
|
||||
originalText,
|
||||
literalNode.getSourceSection().getCharacters().toString());
|
||||
|
||||
var executionCompleteEvents = sendEdit.apply(originalText, newText);
|
||||
if (executionOutput != null) {
|
||||
@ -266,50 +305,57 @@ public class IncrementalUpdatesTest {
|
||||
assertEquals(List.newBuilder().addOne(executionOutput), context.consumeOut());
|
||||
nodeCountingInstrument.assertNewNodes("No new nodes created", 0, 0);
|
||||
|
||||
assertEquals("Literal node has been updated in the source", newText, literalNode.getSourceSection().getCharacters().toString());
|
||||
assertEquals(
|
||||
"Literal node has been updated in the source",
|
||||
newText,
|
||||
literalNode.getSourceSection().getCharacters().toString());
|
||||
}
|
||||
return executionCompleteEvents;
|
||||
}
|
||||
|
||||
private List<Runtime$Api$Response> sendEditFile(String originalText, String newText) {
|
||||
assertNotNull("Main file must be defined before", mainFile);
|
||||
context.send(Request(new Runtime$Api$EditFileNotification(
|
||||
mainFile,
|
||||
makeSeq(
|
||||
new model.TextEdit(
|
||||
new model.Range(new model.Position(3, 8), new model.Position(3, 8 + originalText.length())),
|
||||
newText
|
||||
)
|
||||
),
|
||||
true
|
||||
)));
|
||||
context.send(
|
||||
Request(
|
||||
new Runtime$Api$EditFileNotification(
|
||||
mainFile,
|
||||
makeSeq(
|
||||
new model.TextEdit(
|
||||
new model.Range(
|
||||
new model.Position(3, 8),
|
||||
new model.Position(3, 8 + originalText.length())),
|
||||
newText)),
|
||||
true)));
|
||||
return context.receiveNIgnorePendingExpressionUpdates(1, 10, emptySet());
|
||||
}
|
||||
|
||||
private List<Runtime$Api$Response> sendExpressionValue(String originalText, String newText) {
|
||||
assertNotNull("Main file must be defined before", mainFile);
|
||||
context.send(Request(new Runtime$Api$SetExpressionValueNotification(
|
||||
mainFile,
|
||||
makeSeq(
|
||||
new model.TextEdit(
|
||||
new model.Range(new model.Position(3, 8), new model.Position(3, 8 + originalText.length())),
|
||||
newText
|
||||
)
|
||||
),
|
||||
UUID.randomUUID(),
|
||||
newText
|
||||
)));
|
||||
context.send(
|
||||
Request(
|
||||
new Runtime$Api$SetExpressionValueNotification(
|
||||
mainFile,
|
||||
makeSeq(
|
||||
new model.TextEdit(
|
||||
new model.Range(
|
||||
new model.Position(3, 8),
|
||||
new model.Position(3, 8 + originalText.length())),
|
||||
newText)),
|
||||
UUID.randomUUID(),
|
||||
newText)));
|
||||
return context.receiveNIgnoreExpressionUpdates(1, 10);
|
||||
}
|
||||
|
||||
private <T extends Node> T findLiteralNode(Class<T> type, Map<Class, java.util.List<Node>> nodes) {
|
||||
private <T extends Node> T findLiteralNode(
|
||||
Class<T> type, Map<Class, java.util.List<Node>> nodes) {
|
||||
var intNodes = nodes.get(type);
|
||||
assertNotNull("Found LiteralNode in " + nodes, intNodes);
|
||||
assertEquals("Expecting one node: " + intNodes, 1, intNodes.size());
|
||||
return type.cast(intNodes.get(0));
|
||||
}
|
||||
|
||||
private static void assertSameElements(List<Runtime$Api$Response> actual, Runtime$Api$Response... seq) {
|
||||
private static void assertSameElements(
|
||||
List<Runtime$Api$Response> actual, Runtime$Api$Response... seq) {
|
||||
assertEquals("Same size: " + actual, seq.length, actual.size());
|
||||
for (int i = 0; i < seq.length; i++) {
|
||||
var real = actual.drop(i).head();
|
||||
@ -340,7 +386,8 @@ public class IncrementalUpdatesTest {
|
||||
return Set$.MODULE$.empty();
|
||||
}
|
||||
|
||||
private static Runtime$Api$Request Request(UUID id, org.enso.polyglot.runtime.Runtime.ApiRequest request) {
|
||||
private static Runtime$Api$Request Request(
|
||||
UUID id, org.enso.polyglot.runtime.Runtime.ApiRequest request) {
|
||||
return org.enso.polyglot.runtime.Runtime$Api$Request$.MODULE$.apply(id, request);
|
||||
}
|
||||
|
||||
@ -348,11 +395,13 @@ public class IncrementalUpdatesTest {
|
||||
return org.enso.polyglot.runtime.Runtime$Api$Request$.MODULE$.apply(request);
|
||||
}
|
||||
|
||||
private static Runtime$Api$Response Response(org.enso.polyglot.runtime.Runtime.ApiResponse request) {
|
||||
private static Runtime$Api$Response Response(
|
||||
org.enso.polyglot.runtime.Runtime.ApiResponse request) {
|
||||
return org.enso.polyglot.runtime.Runtime$Api$Response$.MODULE$.apply(request);
|
||||
}
|
||||
|
||||
private static Runtime$Api$Response Response(UUID id, org.enso.polyglot.runtime.Runtime.ApiResponse request) {
|
||||
private static Runtime$Api$Response Response(
|
||||
UUID id, org.enso.polyglot.runtime.Runtime.ApiResponse request) {
|
||||
return org.enso.polyglot.runtime.Runtime$Api$Response$.MODULE$.apply(id, request);
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,12 @@
|
||||
package org.enso.interpreter.test.instrument;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import com.oracle.truffle.api.instrumentation.SourceSectionFilter;
|
||||
import com.oracle.truffle.api.instrumentation.StandardTags;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Map;
|
||||
import java.util.logging.Level;
|
||||
import org.enso.interpreter.runtime.tag.AvoidIdInstrumentationTag;
|
||||
import org.enso.interpreter.runtime.tag.IdentifiedTag;
|
||||
import org.enso.interpreter.test.Metadata;
|
||||
@ -10,90 +15,86 @@ import org.enso.polyglot.RuntimeOptions;
|
||||
import org.graalvm.polyglot.Context;
|
||||
import org.graalvm.polyglot.Language;
|
||||
import org.graalvm.polyglot.Source;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import org.graalvm.polyglot.io.IOAccess;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Map;
|
||||
import java.util.logging.Level;
|
||||
|
||||
public class WarningInstrumentationTest {
|
||||
|
||||
private Context context;
|
||||
private NodeCountingTestInstrument instrument;
|
||||
private Context context;
|
||||
private NodeCountingTestInstrument instrument;
|
||||
|
||||
@Before
|
||||
public void initContext() {
|
||||
context = Context.newBuilder()
|
||||
.allowExperimentalOptions(true)
|
||||
.option(
|
||||
RuntimeOptions.LANGUAGE_HOME_OVERRIDE,
|
||||
Paths.get("../../distribution/component").toFile().getAbsolutePath()
|
||||
)
|
||||
.option(
|
||||
RuntimeOptions.LOG_LEVEL,
|
||||
Level.WARNING.getName()
|
||||
)
|
||||
.logHandler(System.err)
|
||||
.allowExperimentalOptions(true)
|
||||
.allowIO(IOAccess.ALL)
|
||||
.allowAllAccess(true)
|
||||
.build();
|
||||
@Before
|
||||
public void initContext() {
|
||||
context =
|
||||
Context.newBuilder()
|
||||
.allowExperimentalOptions(true)
|
||||
.option(
|
||||
RuntimeOptions.LANGUAGE_HOME_OVERRIDE,
|
||||
Paths.get("../../distribution/component").toFile().getAbsolutePath())
|
||||
.option(RuntimeOptions.LOG_LEVEL, Level.WARNING.getName())
|
||||
.logHandler(System.err)
|
||||
.allowExperimentalOptions(true)
|
||||
.allowIO(IOAccess.ALL)
|
||||
.allowAllAccess(true)
|
||||
.build();
|
||||
|
||||
var engine = context.getEngine();
|
||||
Map<String, Language> langs = engine.getLanguages();
|
||||
Assert.assertNotNull("Enso found: " + langs, langs.get("enso"));
|
||||
var engine = context.getEngine();
|
||||
Map<String, Language> langs = engine.getLanguages();
|
||||
Assert.assertNotNull("Enso found: " + langs, langs.get("enso"));
|
||||
|
||||
instrument = engine.getInstruments().get(NodeCountingTestInstrument.INSTRUMENT_ID).lookup(NodeCountingTestInstrument.class);
|
||||
SourceSectionFilter builder = SourceSectionFilter.newBuilder()
|
||||
.tagIs(StandardTags.ExpressionTag.class, StandardTags.CallTag.class)
|
||||
.tagIs(IdentifiedTag.class)
|
||||
.tagIsNot(AvoidIdInstrumentationTag.class)
|
||||
.build();
|
||||
instrument.enable(builder);
|
||||
}
|
||||
instrument =
|
||||
engine
|
||||
.getInstruments()
|
||||
.get(NodeCountingTestInstrument.INSTRUMENT_ID)
|
||||
.lookup(NodeCountingTestInstrument.class);
|
||||
SourceSectionFilter builder =
|
||||
SourceSectionFilter.newBuilder()
|
||||
.tagIs(StandardTags.ExpressionTag.class, StandardTags.CallTag.class)
|
||||
.tagIs(IdentifiedTag.class)
|
||||
.tagIsNot(AvoidIdInstrumentationTag.class)
|
||||
.build();
|
||||
instrument.enable(builder);
|
||||
}
|
||||
|
||||
@After
|
||||
public void disposeContext() {
|
||||
context.close();
|
||||
}
|
||||
@After
|
||||
public void disposeContext() {
|
||||
context.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void instrumentValueWithWarnings() throws Exception {
|
||||
var metadata = new Metadata();
|
||||
@Test
|
||||
public void instrumentValueWithWarnings() throws Exception {
|
||||
var metadata = new Metadata();
|
||||
|
||||
var idOp1 = metadata.addItem(151, 34, null);
|
||||
var idOp2 = metadata.addItem(202, 31, null);
|
||||
var idOp3 = metadata.addItem(250, 13, null);
|
||||
var rawCode = """
|
||||
var idOp1 = metadata.addItem(151, 34, null);
|
||||
var idOp2 = metadata.addItem(202, 31, null);
|
||||
var idOp3 = metadata.addItem(250, 13, null);
|
||||
var rawCode =
|
||||
"""
|
||||
from Standard.Base import all
|
||||
from Standard.Base.Warning import Warning
|
||||
from Standard.Table.Data.Table import Table
|
||||
|
||||
|
||||
run column_name =
|
||||
operator1 = Table.new [[column_name, [1,2,3]]]
|
||||
operator2 = Warning.attach "Text" operator1
|
||||
operator3 = operator2.get
|
||||
operator3
|
||||
""";
|
||||
var code = metadata.appendToCode(rawCode);
|
||||
var src = Source.newBuilder("enso", code, "TestWarning.enso").build();
|
||||
var module = context.eval(src);
|
||||
var res = module.invokeMember("eval_expression", "run");
|
||||
res.execute("A");
|
||||
var code = metadata.appendToCode(rawCode);
|
||||
var src = Source.newBuilder("enso", code, "TestWarning.enso").build();
|
||||
var module = context.eval(src);
|
||||
var res = module.invokeMember("eval_expression", "run");
|
||||
res.execute("A");
|
||||
|
||||
var calls = instrument.registeredCalls();
|
||||
var calls = instrument.registeredCalls();
|
||||
|
||||
assertEquals(calls.keySet().size(), 3);
|
||||
assertEquals(calls.get(idOp1).getFunctionName(), "new");
|
||||
assertEquals(calls.get(idOp2).getFunctionName(), "attach");
|
||||
assertEquals(calls.get(idOp3).getTypeName().item(), "Table");
|
||||
assertEquals(calls.get(idOp3).getFunctionName(), "get");
|
||||
}
|
||||
assertEquals(calls.keySet().size(), 3);
|
||||
assertEquals(calls.get(idOp1).getFunctionName(), "new");
|
||||
assertEquals(calls.get(idOp2).getFunctionName(), "attach");
|
||||
assertEquals(calls.get(idOp3).getTypeName().item(), "Table");
|
||||
assertEquals(calls.get(idOp3).getFunctionName(), "get");
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,10 @@
|
||||
package org.enso.interpreter.test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.graalvm.polyglot.Context;
|
||||
import org.graalvm.polyglot.Value;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
@ -9,12 +14,6 @@ import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class ConversionMethodTests extends TestBase {
|
||||
private static Context ctx;
|
||||
|
||||
@ -41,17 +40,18 @@ public class ConversionMethodTests extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testSimpleConversion() {
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
type Foo
|
||||
Mk_Foo foo
|
||||
type Bar
|
||||
Mk_Bar bar
|
||||
type Baz
|
||||
Mk_Baz baz
|
||||
|
||||
|
||||
Foo.from (that:Bar) = Foo.Mk_Foo that.bar
|
||||
Foo.from (that:Baz) = Foo.Mk_Foo that.baz
|
||||
|
||||
|
||||
main = (Foo.from (Baz.Mk_Baz 10)).foo + (Foo.from (Bar.Mk_Bar 20)).foo
|
||||
""";
|
||||
Value res = evalModule(ctx, src);
|
||||
@ -60,15 +60,16 @@ public class ConversionMethodTests extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testDispatchOnHostMap() {
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
polyglot java import java.util.Map as Java_Map
|
||||
import Standard.Base.Data.Map.Map
|
||||
|
||||
|
||||
type Foo
|
||||
Mk_Foo data
|
||||
|
||||
|
||||
Foo.from (that:Map) = Foo.Mk_Foo that
|
||||
|
||||
|
||||
main =
|
||||
jmap = Java_Map.of "A" 1 "B" 2 "C" 3
|
||||
Foo.from jmap . data . size
|
||||
@ -79,20 +80,21 @@ public class ConversionMethodTests extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testDispatchOnJSMap() {
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
import Standard.Base.Data.Map.Map
|
||||
|
||||
|
||||
foreign js js_map = '''
|
||||
let m = new Map()
|
||||
m.set("A", 1)
|
||||
m.set("B", 2)
|
||||
return m
|
||||
|
||||
|
||||
type Foo
|
||||
Mk_Foo data
|
||||
|
||||
|
||||
Foo.from (that:Map) = Foo.Mk_Foo that
|
||||
|
||||
|
||||
main =
|
||||
Foo.from js_map . data . size
|
||||
""";
|
||||
@ -102,17 +104,18 @@ public class ConversionMethodTests extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testDispatchOnJSDateTime() {
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
import Standard.Base.Data.Time.Date_Time.Date_Time
|
||||
|
||||
|
||||
foreign js js_date year month day hour minute second nanosecond = '''
|
||||
return new Date(year, month - 1, day, hour, minute, second, nanosecond / 1000000);
|
||||
|
||||
|
||||
type Foo
|
||||
Mk_Foo data
|
||||
|
||||
|
||||
Foo.from (that:Date_Time) = Foo.Mk_Foo that
|
||||
|
||||
|
||||
main =
|
||||
Foo.from (js_date 2023 2 7 23 59 0 10) . data . day
|
||||
""";
|
||||
@ -122,15 +125,16 @@ public class ConversionMethodTests extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testAmbiguousConversionStrictUnused() {
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
type Foo
|
||||
Mk_Foo data
|
||||
type Bar
|
||||
Mk_Bar x
|
||||
|
||||
|
||||
Foo.from (that:Bar) = Foo.Mk_Foo that.x+100
|
||||
Foo.from (that:Bar) = Foo.Mk_Foo that.x+1000
|
||||
|
||||
|
||||
main = 42
|
||||
""";
|
||||
try {
|
||||
@ -138,7 +142,11 @@ public class ConversionMethodTests extends TestBase {
|
||||
fail("Expected an exception, but got " + res);
|
||||
} catch (Exception e) {
|
||||
assertEquals("Compilation aborted due to errors.", e.getMessage());
|
||||
MatcherAssert.assertThat(getStdOut(), Matchers.containsString("Unnamed:7:1: error: Ambiguous conversion: Foo.from Bar is defined multiple times in this module."));
|
||||
MatcherAssert.assertThat(
|
||||
getStdOut(),
|
||||
Matchers.containsString(
|
||||
"Unnamed:7:1: error: Ambiguous conversion: Foo.from Bar is defined multiple times in"
|
||||
+ " this module."));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
package org.enso.interpreter.test;
|
||||
|
||||
import java.util.concurrent.Executors;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.concurrent.Executors;
|
||||
import org.graalvm.polyglot.Context;
|
||||
import org.graalvm.polyglot.Value;
|
||||
import org.junit.AfterClass;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
@ -28,7 +28,8 @@ public class ForeignMethodInvokeTest extends TestBase {
|
||||
public void testForeignFunctionParseFailure() {
|
||||
// python is not a permitted language, therefore, invoking `py_array` method
|
||||
// should fail with a Polyglot_Error, rather than crashing whole engine.
|
||||
String source = """
|
||||
String source =
|
||||
"""
|
||||
from Standard.Base import all
|
||||
|
||||
foreign python py_array = \"\"\"
|
||||
@ -36,21 +37,25 @@ public class ForeignMethodInvokeTest extends TestBase {
|
||||
|
||||
main =
|
||||
Panic.recover Any py_array
|
||||
""".trim();
|
||||
"""
|
||||
.trim();
|
||||
Value module = ctx.eval("enso", source);
|
||||
Value res = module.invokeMember("eval_expression", "main");
|
||||
assertTrue("Invoking non-installed foreign function should recover", res.isException());
|
||||
try {
|
||||
throw res.throwException();
|
||||
} catch (Exception e) {
|
||||
assertTrue("Wrong error message",
|
||||
e.getMessage().matches("Cannot parse foreign python method. Only available languages are .+"));
|
||||
assertTrue(
|
||||
"Wrong error message",
|
||||
e.getMessage()
|
||||
.matches("Cannot parse foreign python method. Only available languages are .+"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInteropWithJavaScript() throws Exception {
|
||||
var source = """
|
||||
var source =
|
||||
"""
|
||||
from Standard.Base import all
|
||||
|
||||
foreign js js_array t = \"\"\"
|
||||
@ -68,9 +73,13 @@ public class ForeignMethodInvokeTest extends TestBase {
|
||||
assertEquals(2, res.getArrayElement(1).asInt());
|
||||
assertEquals(13, res.getArrayElement(2).asInt());
|
||||
|
||||
var res2 = Executors.newSingleThreadExecutor().submit(() -> {
|
||||
return third.execute(12);
|
||||
}).get();
|
||||
var res2 =
|
||||
Executors.newSingleThreadExecutor()
|
||||
.submit(
|
||||
() -> {
|
||||
return third.execute(12);
|
||||
})
|
||||
.get();
|
||||
|
||||
assertTrue("It is an array2", res2.hasArrayElements());
|
||||
assertEquals(12, res2.getArrayElement(2).asInt());
|
||||
@ -79,7 +88,8 @@ public class ForeignMethodInvokeTest extends TestBase {
|
||||
@Ignore
|
||||
@Test
|
||||
public void testParallelInteropWithJavaScript() throws Exception {
|
||||
var source = """
|
||||
var source =
|
||||
"""
|
||||
from Standard.Base import all
|
||||
|
||||
polyglot java import java.lang.Thread
|
||||
@ -94,9 +104,12 @@ public class ForeignMethodInvokeTest extends TestBase {
|
||||
var module = ctx.eval("enso", source);
|
||||
var third = module.invokeMember("eval_expression", "third");
|
||||
|
||||
var future = Executors.newSingleThreadExecutor().submit(() -> {
|
||||
return third.execute(12);
|
||||
});
|
||||
var future =
|
||||
Executors.newSingleThreadExecutor()
|
||||
.submit(
|
||||
() -> {
|
||||
return third.execute(12);
|
||||
});
|
||||
var res = third.execute(13);
|
||||
assertTrue("It is an array", res.hasArrayElements());
|
||||
assertEquals(3, res.getArraySize());
|
||||
|
@ -1,57 +1,58 @@
|
||||
package org.enso.interpreter.test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import org.enso.polyglot.RuntimeOptions;
|
||||
import org.graalvm.polyglot.Context;
|
||||
import org.graalvm.polyglot.Language;
|
||||
import org.graalvm.polyglot.Source;
|
||||
import org.graalvm.polyglot.io.IOAccess;
|
||||
import org.junit.After;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class InsightForEnsoTest {
|
||||
private Context ctx;
|
||||
private AutoCloseable insightHandle;
|
||||
private final ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
private Context ctx;
|
||||
private AutoCloseable insightHandle;
|
||||
private final ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
|
||||
@Before
|
||||
public void initContext() throws Exception {
|
||||
this.ctx = Context.newBuilder()
|
||||
.allowExperimentalOptions(true)
|
||||
.option(
|
||||
RuntimeOptions.LANGUAGE_HOME_OVERRIDE,
|
||||
Paths.get("../../distribution/component").toFile().getAbsolutePath()
|
||||
)
|
||||
.option(
|
||||
RuntimeOptions.LOG_LEVEL,
|
||||
Level.WARNING.getName()
|
||||
)
|
||||
.logHandler(System.err)
|
||||
.allowExperimentalOptions(true)
|
||||
.allowIO(IOAccess.ALL)
|
||||
.out(out)
|
||||
.allowAllAccess(true)
|
||||
.build();
|
||||
@Before
|
||||
public void initContext() throws Exception {
|
||||
this.ctx =
|
||||
Context.newBuilder()
|
||||
.allowExperimentalOptions(true)
|
||||
.option(
|
||||
RuntimeOptions.LANGUAGE_HOME_OVERRIDE,
|
||||
Paths.get("../../distribution/component").toFile().getAbsolutePath())
|
||||
.option(RuntimeOptions.LOG_LEVEL, Level.WARNING.getName())
|
||||
.logHandler(System.err)
|
||||
.allowExperimentalOptions(true)
|
||||
.allowIO(IOAccess.ALL)
|
||||
.out(out)
|
||||
.allowAllAccess(true)
|
||||
.build();
|
||||
|
||||
var engine = ctx.getEngine();
|
||||
Map<String, Language> langs = engine.getLanguages();
|
||||
assertNotNull("Enso found: " + langs, langs.get("enso"));
|
||||
var engine = ctx.getEngine();
|
||||
Map<String, Language> langs = engine.getLanguages();
|
||||
assertNotNull("Enso found: " + langs, langs.get("enso"));
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
var fn = (Function<Source,AutoCloseable>) engine.getInstruments().get("insight").lookup(Function.class);
|
||||
assertNotNull(fn);
|
||||
@SuppressWarnings("unchecked")
|
||||
var fn =
|
||||
(Function<Source, AutoCloseable>)
|
||||
engine.getInstruments().get("insight").lookup(Function.class);
|
||||
assertNotNull(fn);
|
||||
|
||||
var insightScript = Source.newBuilder("js", """
|
||||
var insightScript =
|
||||
Source.newBuilder(
|
||||
"js",
|
||||
"""
|
||||
insight.on('enter', (ctx, frame) => {
|
||||
print(`${ctx.name} at ${ctx.source.name}:${ctx.line}:`);
|
||||
let dump = "";
|
||||
@ -63,40 +64,47 @@ public class InsightForEnsoTest {
|
||||
}, {
|
||||
roots : true
|
||||
});
|
||||
""", "trace.js").build();
|
||||
this.insightHandle = fn.apply(insightScript);
|
||||
}
|
||||
""",
|
||||
"trace.js")
|
||||
.build();
|
||||
this.insightHandle = fn.apply(insightScript);
|
||||
}
|
||||
|
||||
@After
|
||||
public void disposeContext() throws Exception {
|
||||
this.insightHandle.close();
|
||||
this.ctx.close();
|
||||
}
|
||||
@After
|
||||
public void disposeContext() throws Exception {
|
||||
this.insightHandle.close();
|
||||
this.ctx.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void computeFactorial() throws Exception {
|
||||
var code = Source.newBuilder("enso", """
|
||||
@Test
|
||||
public void computeFactorial() throws Exception {
|
||||
var code =
|
||||
Source.newBuilder(
|
||||
"enso",
|
||||
"""
|
||||
fac n =
|
||||
acc n v = if n <= 1 then v else
|
||||
@Tail_Call acc n-1 n*v
|
||||
|
||||
acc n 1
|
||||
""", "factorial.enso").build();
|
||||
""",
|
||||
"factorial.enso")
|
||||
.build();
|
||||
|
||||
var m = ctx.eval(code);
|
||||
var fac = m.invokeMember("eval_expression", "fac");
|
||||
var res = fac.execute(5);
|
||||
assertEquals(120, res.asInt());
|
||||
var m = ctx.eval(code);
|
||||
var fac = m.invokeMember("eval_expression", "fac");
|
||||
var res = fac.execute(5);
|
||||
assertEquals(120, res.asInt());
|
||||
|
||||
var msgs = out.toString();
|
||||
assertNotEquals("Step one: " + msgs, -1, msgs.indexOf("n=5 v=1 acc=function"));
|
||||
assertNotEquals("Step two: " + msgs, -1, msgs.indexOf("n=4 v=5 acc=function"));
|
||||
assertNotEquals("3rd step: " + msgs, -1, msgs.indexOf("n=3 v=20 acc=function"));
|
||||
assertNotEquals("4th step: " + msgs, -1, msgs.indexOf("n=2 v=60 acc=function"));
|
||||
var msgs = out.toString();
|
||||
assertNotEquals("Step one: " + msgs, -1, msgs.indexOf("n=5 v=1 acc=function"));
|
||||
assertNotEquals("Step two: " + msgs, -1, msgs.indexOf("n=4 v=5 acc=function"));
|
||||
assertNotEquals("3rd step: " + msgs, -1, msgs.indexOf("n=3 v=20 acc=function"));
|
||||
assertNotEquals("4th step: " + msgs, -1, msgs.indexOf("n=2 v=60 acc=function"));
|
||||
|
||||
assertNotEquals(
|
||||
assertNotEquals(
|
||||
"Uninitialized variables are seen as JavaScript null: " + msgs,
|
||||
-1, msgs.indexOf("n=null v=null acc=function")
|
||||
);
|
||||
}
|
||||
-1,
|
||||
msgs.indexOf("n=null v=null acc=function"));
|
||||
}
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ public class JsInteropTest extends TestBase {
|
||||
ctx = createDefaultContext(out);
|
||||
out.reset();
|
||||
}
|
||||
|
||||
|
||||
@After
|
||||
public void disposeCtx() {
|
||||
ctx.close();
|
||||
@ -27,7 +27,8 @@ public class JsInteropTest extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testDefaultJSPrint() {
|
||||
var src = """
|
||||
var src =
|
||||
"""
|
||||
from Standard.Base import Json
|
||||
|
||||
main =
|
||||
|
@ -1,5 +1,9 @@
|
||||
package org.enso.interpreter.test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
||||
import java.util.Map;
|
||||
import org.enso.polyglot.RuntimeOptions;
|
||||
import org.graalvm.polyglot.Context;
|
||||
import org.graalvm.polyglot.Language;
|
||||
@ -9,13 +13,6 @@ import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
||||
public class NonStrictModeTests extends TestBase {
|
||||
private static Context nonStrictCtx;
|
||||
private static MockLogHandler logHandler;
|
||||
@ -28,7 +25,10 @@ public class NonStrictModeTests extends TestBase {
|
||||
|
||||
protected static Context createNonStrictContext() {
|
||||
var context =
|
||||
defaultContextBuilder().logHandler(logHandler).option(RuntimeOptions.STRICT_ERRORS, "false").build();
|
||||
defaultContextBuilder()
|
||||
.logHandler(logHandler)
|
||||
.option(RuntimeOptions.STRICT_ERRORS, "false")
|
||||
.build();
|
||||
final Map<String, Language> langs = context.getEngine().getLanguages();
|
||||
assertNotNull("Enso found: " + langs, langs.get("enso"));
|
||||
return context;
|
||||
@ -46,15 +46,16 @@ public class NonStrictModeTests extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testAmbiguousConversion() {
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
type Foo
|
||||
Mk_Foo data
|
||||
type Bar
|
||||
Mk_Bar x
|
||||
|
||||
|
||||
Foo.from (that:Bar) = Foo.Mk_Foo that.x+100
|
||||
Foo.from (that:Bar) = Foo.Mk_Foo that.x+1000
|
||||
|
||||
|
||||
main = 42
|
||||
""";
|
||||
Value res = evalModule(nonStrictCtx, src);
|
||||
@ -63,23 +64,25 @@ public class NonStrictModeTests extends TestBase {
|
||||
// Even if the conversion is unused and non-strict mode, we still get a diagnostic report:
|
||||
logHandler.assertMessage(
|
||||
"enso.org.enso.compiler.Compiler",
|
||||
"Unnamed:7:1: error: Ambiguous conversion: Foo.from Bar is defined multiple times in this module."
|
||||
);
|
||||
"Unnamed:7:1: error: Ambiguous conversion: Foo.from Bar is defined multiple times in this"
|
||||
+ " module.");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAmbiguousConversionUsage() {
|
||||
// In non-strict mode, the conversion declarations will have errors attached to the IR, but the overall operation
|
||||
// In non-strict mode, the conversion declarations will have errors attached to the IR, but the
|
||||
// overall operation
|
||||
// will simply not see the second conversion and succeed with the first one.
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
type Foo
|
||||
Mk_Foo data
|
||||
type Bar
|
||||
Mk_Bar x
|
||||
|
||||
|
||||
Foo.from (that:Bar) = Foo.Mk_Foo that.x+100
|
||||
Foo.from (that:Bar) = Foo.Mk_Foo that.x+1000
|
||||
|
||||
|
||||
main = (Foo.from (Bar.Mk_Bar 42)) . data
|
||||
""";
|
||||
|
||||
@ -88,25 +91,27 @@ public class NonStrictModeTests extends TestBase {
|
||||
|
||||
logHandler.assertMessage(
|
||||
"enso.org.enso.compiler.Compiler",
|
||||
"Unnamed:7:1: error: Ambiguous conversion: Foo.from Bar is defined multiple times in this module."
|
||||
);
|
||||
"Unnamed:7:1: error: Ambiguous conversion: Foo.from Bar is defined multiple times in this"
|
||||
+ " module.");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testBadImport() {
|
||||
String src = """
|
||||
import That.Does.Not.Exist
|
||||
|
||||
|
||||
main = 2+2
|
||||
""";
|
||||
Value res = evalModule(nonStrictCtx, src);
|
||||
assertEquals(4, res.asInt());
|
||||
|
||||
String line1 = "Unnamed:1:1: error: Package containing the module That.Does.Not.Exist could not be loaded: The " +
|
||||
"package could not be resolved: The library `That.Does` is not defined within the edition.";
|
||||
String line1 =
|
||||
"Unnamed:1:1: error: Package containing the module That.Does.Not.Exist could not be loaded:"
|
||||
+ " The package could not be resolved: The library `That.Does` is not defined within"
|
||||
+ " the edition.";
|
||||
String line2 = " 1 | import That.Does.Not.Exist";
|
||||
String line3 = " | ^~~~~~~~~~~~~~~~~~~~~~~~~~";
|
||||
logHandler.assertMessage("enso.org.enso.compiler.Compiler", line1 + "\n" + line2 + "\n" + line3);
|
||||
logHandler.assertMessage(
|
||||
"enso.org.enso.compiler.Compiler", line1 + "\n" + line2 + "\n" + line3);
|
||||
}
|
||||
}
|
||||
|
@ -1,13 +1,13 @@
|
||||
package org.enso.interpreter.test;
|
||||
|
||||
import org.enso.polyglot.HostEnsoUtils;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import org.enso.polyglot.HostEnsoUtils;
|
||||
import org.graalvm.polyglot.Context;
|
||||
import org.graalvm.polyglot.PolyglotException;
|
||||
import org.graalvm.polyglot.Value;
|
||||
import org.junit.AfterClass;
|
||||
import static org.junit.Assert.fail;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
@ -26,7 +26,8 @@ public class PolyglotFindExceptionMessageTest extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testJavaScriptException() {
|
||||
String src = """
|
||||
String src =
|
||||
"""
|
||||
main = err
|
||||
|
||||
foreign js err = \"""
|
||||
|
@ -1,8 +1,10 @@
|
||||
package org.enso.interpreter.test.instrument;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
||||
import java.nio.file.Paths;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import org.enso.interpreter.test.MockLogHandler;
|
||||
import org.enso.polyglot.MethodNames;
|
||||
import org.enso.polyglot.RuntimeOptions;
|
||||
@ -10,8 +12,6 @@ import org.graalvm.polyglot.Context;
|
||||
import org.graalvm.polyglot.Source;
|
||||
import org.graalvm.polyglot.io.IOAccess;
|
||||
import org.junit.AfterClass;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
@ -45,8 +45,7 @@ public class VerifyLanguageAvailabilityTest {
|
||||
ctx.close();
|
||||
|
||||
var args =
|
||||
handler.assertMessage(
|
||||
"epb.org.enso.interpreter.epb.EpbContext", "Parsing foreign script");
|
||||
handler.assertMessage("epb.org.enso.interpreter.epb.EpbContext", "Parsing foreign script");
|
||||
assertEquals("js", args[0]);
|
||||
assertEquals("mul.mul", args[1]);
|
||||
}
|
||||
@ -55,12 +54,17 @@ public class VerifyLanguageAvailabilityTest {
|
||||
public void javaScriptIsPresent() throws Exception {
|
||||
var js = ctx.getEngine().getLanguages().get("js");
|
||||
assertNotNull("JavaScript is available", js);
|
||||
var src = Source.newBuilder("enso", """
|
||||
var src =
|
||||
Source.newBuilder(
|
||||
"enso",
|
||||
"""
|
||||
foreign js mul a b = \"\"\"
|
||||
return a * b
|
||||
|
||||
run = mul 6 7
|
||||
""", "mul.enso").build();
|
||||
""",
|
||||
"mul.enso")
|
||||
.build();
|
||||
var fourtyTwo = ctx.eval(src).invokeMember(MethodNames.Module.EVAL_EXPRESSION, "run");
|
||||
assertEquals(42, fourtyTwo.asInt());
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ public class ClassLoaderConstants {
|
||||
List.of("org.graalvm", "java", "org.slf4j", "ch.qos");
|
||||
|
||||
public static final List<String> RESOURCE_DELEGATION_PATTERNS = List.of("org.slf4j", "ch.qos");
|
||||
|
||||
/**
|
||||
* Path to the {@code runner.jar} fat jar. This must not be on the system's module-path, because
|
||||
* the JVM would not be able to boot.
|
||||
|
@ -1,10 +1,20 @@
|
||||
package org.enso.interpreter;
|
||||
|
||||
import com.oracle.truffle.api.CallTarget;
|
||||
import com.oracle.truffle.api.Option;
|
||||
import com.oracle.truffle.api.TruffleLanguage;
|
||||
import com.oracle.truffle.api.TruffleLogger;
|
||||
import com.oracle.truffle.api.debug.DebuggerTags;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.instrumentation.ProvidedTags;
|
||||
import com.oracle.truffle.api.instrumentation.StandardTags;
|
||||
import com.oracle.truffle.api.nodes.ExecutableNode;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.enso.compiler.Compiler;
|
||||
import org.enso.compiler.context.InlineContext;
|
||||
import org.enso.compiler.context.LocalScope;
|
||||
@ -41,18 +51,6 @@ import org.graalvm.options.OptionDescriptors;
|
||||
import org.graalvm.options.OptionKey;
|
||||
import org.graalvm.options.OptionType;
|
||||
|
||||
import com.oracle.truffle.api.CallTarget;
|
||||
import com.oracle.truffle.api.Option;
|
||||
import com.oracle.truffle.api.TruffleLanguage;
|
||||
import com.oracle.truffle.api.TruffleLogger;
|
||||
import com.oracle.truffle.api.debug.DebuggerTags;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.instrumentation.ProvidedTags;
|
||||
import com.oracle.truffle.api.instrumentation.StandardTags;
|
||||
import com.oracle.truffle.api.nodes.ExecutableNode;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
|
||||
/**
|
||||
* The root of the Enso implementation.
|
||||
*
|
||||
@ -72,8 +70,7 @@ import com.oracle.truffle.api.nodes.RootNode;
|
||||
contextPolicy = TruffleLanguage.ContextPolicy.EXCLUSIVE,
|
||||
dependentLanguages = {"epb"},
|
||||
fileTypeDetectors = FileDetector.class,
|
||||
services= { Timer.class, NotificationHandler.Forwarder.class, LockManager.class }
|
||||
)
|
||||
services = {Timer.class, NotificationHandler.Forwarder.class, LockManager.class})
|
||||
@ProvidedTags({
|
||||
DebuggerTags.AlwaysHalt.class,
|
||||
StandardTags.CallTag.class,
|
||||
@ -135,7 +132,6 @@ public final class EnsoLanguage extends TruffleLanguage<EnsoContext> {
|
||||
env.registerService(lockManager);
|
||||
}
|
||||
|
||||
|
||||
boolean isExecutionTimerEnabled =
|
||||
env.getOptions().get(RuntimeOptions.ENABLE_EXECUTION_TIMER_KEY);
|
||||
Timer timer = isExecutionTimerEnabled ? new Timer.Nanosecond() : new Timer.Disabled();
|
||||
@ -200,30 +196,28 @@ public final class EnsoLanguage extends TruffleLanguage<EnsoContext> {
|
||||
|
||||
/**
|
||||
* Parses the given Enso source code snippet in {@code request}.
|
||||
* <p>
|
||||
* Inline parsing does not handle the following expressions:
|
||||
*
|
||||
* <p>Inline parsing does not handle the following expressions:
|
||||
*
|
||||
* <ul>
|
||||
* <li>Assignments</li>
|
||||
* <li>Imports and exports</li>
|
||||
* <li>Assignments
|
||||
* <li>Imports and exports
|
||||
* </ul>
|
||||
* When given the aforementioned expressions in the request, {@code null}
|
||||
* will be returned.
|
||||
*
|
||||
* When given the aforementioned expressions in the request, {@code null} will be returned.
|
||||
*
|
||||
* @param request request for inline parsing
|
||||
* @throws InlineParsingException if the compiler failed to parse
|
||||
* @return An {@link ExecutableNode} representing an AST fragment if the request contains
|
||||
* syntactically correct Enso source, {@code null} otherwise.
|
||||
* syntactically correct Enso source, {@code null} otherwise.
|
||||
*/
|
||||
@Override
|
||||
protected ExecutableNode parse(InlineParsingRequest request) throws InlineParsingException {
|
||||
if (request.getLocation().getRootNode() instanceof EnsoRootNode ensoRootNode) {
|
||||
var context = EnsoContext.get(request.getLocation());
|
||||
Tree inlineExpr = context.getCompiler().parseInline(request.getSource());
|
||||
var undesirableExprTypes = List.of(
|
||||
Tree.Assignment.class,
|
||||
Tree.Import.class,
|
||||
Tree.Export.class
|
||||
);
|
||||
var undesirableExprTypes =
|
||||
List.of(Tree.Assignment.class, Tree.Import.class, Tree.Export.class);
|
||||
if (astContainsExprTypes(inlineExpr, undesirableExprTypes)) {
|
||||
throw new InlineParsingException(
|
||||
"Inline parsing request contains some of undesirable expression types: "
|
||||
@ -231,50 +225,47 @@ public final class EnsoLanguage extends TruffleLanguage<EnsoContext> {
|
||||
+ "\n"
|
||||
+ "Parsed expression: \n"
|
||||
+ inlineExpr.codeRepr(),
|
||||
null
|
||||
);
|
||||
null);
|
||||
}
|
||||
|
||||
var module = ensoRootNode.getModuleScope().getModule();
|
||||
var localScope = ensoRootNode.getLocalScope();
|
||||
var outputRedirect = new ByteArrayOutputStream();
|
||||
var redirectConfigWithStrictErrors = new CompilerConfig(
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
scala.Option.apply(new PrintStream(outputRedirect))
|
||||
);
|
||||
var moduleContext = new ModuleContext(
|
||||
module.asCompilerModule(), redirectConfigWithStrictErrors,
|
||||
scala.Option.empty(),
|
||||
scala.Option.empty(),
|
||||
false,
|
||||
scala.Option.empty()
|
||||
);
|
||||
var inlineContext = new InlineContext(
|
||||
moduleContext,
|
||||
redirectConfigWithStrictErrors,
|
||||
scala.Some.apply(localScope),
|
||||
scala.Some.apply(false),
|
||||
scala.Option.empty(),
|
||||
scala.Option.empty(),
|
||||
scala.Option.empty()
|
||||
);
|
||||
Compiler silentCompiler = context.getCompiler().duplicateWithConfig(redirectConfigWithStrictErrors);
|
||||
var redirectConfigWithStrictErrors =
|
||||
new CompilerConfig(
|
||||
false, false, true, true, scala.Option.apply(new PrintStream(outputRedirect)));
|
||||
var moduleContext =
|
||||
new ModuleContext(
|
||||
module.asCompilerModule(),
|
||||
redirectConfigWithStrictErrors,
|
||||
scala.Option.empty(),
|
||||
scala.Option.empty(),
|
||||
false,
|
||||
scala.Option.empty());
|
||||
var inlineContext =
|
||||
new InlineContext(
|
||||
moduleContext,
|
||||
redirectConfigWithStrictErrors,
|
||||
scala.Some.apply(localScope),
|
||||
scala.Some.apply(false),
|
||||
scala.Option.empty(),
|
||||
scala.Option.empty(),
|
||||
scala.Option.empty());
|
||||
Compiler silentCompiler =
|
||||
context.getCompiler().duplicateWithConfig(redirectConfigWithStrictErrors);
|
||||
ExpressionNode exprNode;
|
||||
try {
|
||||
var optionTupple = silentCompiler.runInline(
|
||||
request.getSource().getCharacters().toString(),
|
||||
inlineContext
|
||||
);
|
||||
var optionTupple =
|
||||
silentCompiler.runInline(request.getSource().getCharacters().toString(), inlineContext);
|
||||
if (optionTupple.nonEmpty()) {
|
||||
var newInlineContext = optionTupple.get()._1();
|
||||
var ir = optionTupple.get()._2();
|
||||
var sco = newInlineContext.localScope().getOrElse(LocalScope::root);
|
||||
var mod = newInlineContext.module$access$0().module$access$0();
|
||||
var m = org.enso.interpreter.runtime.Module.fromCompilerModule(mod);
|
||||
var toTruffle = new IrToTruffle(context, request.getSource(), m.getScope(), redirectConfigWithStrictErrors);
|
||||
var toTruffle =
|
||||
new IrToTruffle(
|
||||
context, request.getSource(), m.getScope(), redirectConfigWithStrictErrors);
|
||||
exprNode = toTruffle.runInline(ir, sco, "<inline_source>");
|
||||
} else {
|
||||
exprNode = null;
|
||||
@ -282,7 +273,8 @@ public final class EnsoLanguage extends TruffleLanguage<EnsoContext> {
|
||||
} catch (UnhandledEntity e) {
|
||||
throw new InlineParsingException("Unhandled entity: " + e.entity(), e);
|
||||
} catch (CompilationAbortedException e) {
|
||||
assert outputRedirect.toString().lines().count() > 1 : "Expected a header line from the compiler";
|
||||
assert outputRedirect.toString().lines().count() > 1
|
||||
: "Expected a header line from the compiler";
|
||||
String compilerErrOutput = outputRedirect.toString();
|
||||
throw new InlineParsingException(compilerErrOutput, e);
|
||||
} finally {
|
||||
@ -292,8 +284,7 @@ public final class EnsoLanguage extends TruffleLanguage<EnsoContext> {
|
||||
if (exprNode != null) {
|
||||
var language = EnsoLanguage.get(exprNode);
|
||||
return new ExecutableNode(language) {
|
||||
@Child
|
||||
private ExpressionNode expr;
|
||||
@Child private ExpressionNode expr;
|
||||
|
||||
@Override
|
||||
public Object execute(VirtualFrame frame) {
|
||||
@ -314,19 +305,14 @@ public final class EnsoLanguage extends TruffleLanguage<EnsoContext> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the given ast transitively contains any of {@code exprTypes}.
|
||||
*/
|
||||
/** Returns true if the given ast transitively contains any of {@code exprTypes}. */
|
||||
private boolean astContainsExprTypes(Tree ast, List<Class<? extends Tree>> exprTypes) {
|
||||
boolean astMatchesExprType = exprTypes
|
||||
.stream()
|
||||
.anyMatch(exprType -> exprType.equals(ast.getClass()));
|
||||
boolean astMatchesExprType =
|
||||
exprTypes.stream().anyMatch(exprType -> exprType.equals(ast.getClass()));
|
||||
if (astMatchesExprType) {
|
||||
return true;
|
||||
} else if (ast instanceof Tree.BodyBlock block) {
|
||||
return block
|
||||
.getStatements()
|
||||
.stream()
|
||||
return block.getStatements().stream()
|
||||
.map(Line::getExpression)
|
||||
.filter(Objects::nonNull)
|
||||
.anyMatch((Tree expr) -> astContainsExprTypes(expr, exprTypes));
|
||||
@ -336,13 +322,13 @@ public final class EnsoLanguage extends TruffleLanguage<EnsoContext> {
|
||||
}
|
||||
|
||||
@Option(
|
||||
name = "ExecutionEnvironment",
|
||||
category = OptionCategory.USER,
|
||||
help = "The environment for program execution. Defaults to `design`.")
|
||||
name = "ExecutionEnvironment",
|
||||
category = OptionCategory.USER,
|
||||
help = "The environment for program execution. Defaults to `design`.")
|
||||
public static final OptionKey<ExecutionEnvironment> EXECUTION_ENVIRONMENT =
|
||||
new OptionKey<>(
|
||||
ExecutionEnvironment.DESIGN, new OptionType<>("ExecutionEnvironment", ExecutionEnvironment::forName));
|
||||
|
||||
new OptionKey<>(
|
||||
ExecutionEnvironment.DESIGN,
|
||||
new OptionType<>("ExecutionEnvironment", ExecutionEnvironment::forName));
|
||||
|
||||
private static final OptionDescriptors OPTIONS =
|
||||
OptionDescriptors.createUnion(
|
||||
@ -367,7 +353,7 @@ public final class EnsoLanguage extends TruffleLanguage<EnsoContext> {
|
||||
|
||||
/** Conversions of primitive values */
|
||||
protected Object getLanguageView(EnsoContext context, Object value) {
|
||||
if (value instanceof Boolean b ) {
|
||||
if (value instanceof Boolean b) {
|
||||
var bool = context.getBuiltins().bool();
|
||||
return b ? bool.getTrue().newInstance() : bool.getFalse().newInstance();
|
||||
}
|
||||
|
@ -32,6 +32,7 @@ public abstract class Cache<T, M extends Cache.Metadata> {
|
||||
|
||||
/** Returns a default level of logging for this Cache. */
|
||||
protected final Level logLevel;
|
||||
|
||||
/** Log name to use in log messages */
|
||||
private final String logName;
|
||||
|
||||
|
@ -1,197 +1,215 @@
|
||||
package org.enso.interpreter.caches;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.enso.persist.Persistance;
|
||||
import org.enso.persist.Persistable;
|
||||
import org.enso.compiler.data.BindingsMap;
|
||||
import org.enso.compiler.data.BindingsMap.DefinedEntity;
|
||||
import org.enso.compiler.data.BindingsMap.ModuleReference;
|
||||
import org.enso.editions.LibraryName;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.pkg.QualifiedName;
|
||||
import org.enso.pkg.SourceFile;
|
||||
import org.openide.util.lookup.ServiceProvider;
|
||||
|
||||
import buildinfo.Info;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.oracle.truffle.api.TruffleFile;
|
||||
import com.oracle.truffle.api.TruffleLogger;
|
||||
|
||||
import buildinfo.Info;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.enso.compiler.data.BindingsMap;
|
||||
import org.enso.compiler.data.BindingsMap.DefinedEntity;
|
||||
import org.enso.compiler.data.BindingsMap.ModuleReference;
|
||||
import org.enso.editions.LibraryName;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.persist.Persistable;
|
||||
import org.enso.persist.Persistance;
|
||||
import org.enso.pkg.QualifiedName;
|
||||
import org.enso.pkg.SourceFile;
|
||||
import org.openide.util.lookup.ServiceProvider;
|
||||
import scala.Option;
|
||||
import scala.Tuple2;
|
||||
import scala.collection.immutable.Map;
|
||||
|
||||
@Persistable(clazz = QualifiedName.class, id = 30300)
|
||||
public final class ImportExportCache extends Cache<ImportExportCache.CachedBindings, ImportExportCache.Metadata> {
|
||||
public final class ImportExportCache
|
||||
extends Cache<ImportExportCache.CachedBindings, ImportExportCache.Metadata> {
|
||||
|
||||
private final LibraryName libraryName;
|
||||
private final LibraryName libraryName;
|
||||
|
||||
public ImportExportCache(LibraryName libraryName) {
|
||||
super(Level.FINEST, libraryName.toString(), true, false);
|
||||
this.libraryName = libraryName;
|
||||
this.entryName = libraryName.name();
|
||||
this.dataSuffix = bindingsCacheDataExtension;
|
||||
this.metadataSuffix = bindingsCacheMetadataExtension;
|
||||
public ImportExportCache(LibraryName libraryName) {
|
||||
super(Level.FINEST, libraryName.toString(), true, false);
|
||||
this.libraryName = libraryName;
|
||||
this.entryName = libraryName.name();
|
||||
this.dataSuffix = bindingsCacheDataExtension;
|
||||
this.metadataSuffix = bindingsCacheMetadataExtension;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] metadata(String sourceDigest, String blobDigest, CachedBindings entry) {
|
||||
try {
|
||||
return objectMapper
|
||||
.writeValueAsString(new Metadata(sourceDigest, blobDigest))
|
||||
.getBytes(metadataCharset);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] metadata(String sourceDigest, String blobDigest, CachedBindings entry) {
|
||||
try {
|
||||
return objectMapper.writeValueAsString(new Metadata(sourceDigest, blobDigest)).getBytes(metadataCharset);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
protected CachedBindings deserialize(
|
||||
EnsoContext context, byte[] data, Metadata meta, TruffleLogger logger)
|
||||
throws ClassNotFoundException, IOException, ClassNotFoundException {
|
||||
var ref = Persistance.read(data, null);
|
||||
var bindings = ref.get(MapToBindings.class);
|
||||
return new CachedBindings(libraryName, bindings, Optional.empty());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CachedBindings deserialize(EnsoContext context, byte[] data, Metadata meta, TruffleLogger logger) throws ClassNotFoundException, IOException, ClassNotFoundException {
|
||||
var ref = Persistance.read(data, null);
|
||||
var bindings = ref.get(MapToBindings.class);
|
||||
return new CachedBindings(libraryName, bindings, Optional.empty());
|
||||
@Override
|
||||
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) {
|
||||
var maybeJsonString = new String(bytes, Cache.metadataCharset);
|
||||
var mapper = new ObjectMapper();
|
||||
try {
|
||||
return Optional.of(objectMapper.readValue(maybeJsonString, ImportExportCache.Metadata.class));
|
||||
} catch (JsonProcessingException e) {
|
||||
logger.log(logLevel, "Failed to deserialize library's metadata.", e);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) {
|
||||
var maybeJsonString = new String(bytes, Cache.metadataCharset);
|
||||
var mapper = new ObjectMapper();
|
||||
try {
|
||||
return Optional.of(objectMapper.readValue(maybeJsonString, ImportExportCache.Metadata.class));
|
||||
} catch (JsonProcessingException e) {
|
||||
logger.log(logLevel, "Failed to deserialize library's metadata.", e);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
@Override
|
||||
protected Optional<String> computeDigest(CachedBindings entry, TruffleLogger logger) {
|
||||
return entry.sources().map(sources -> computeDigestOfLibrarySources(sources, logger));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<String> computeDigest(CachedBindings entry, TruffleLogger logger) {
|
||||
return entry.sources().map(sources -> computeDigestOfLibrarySources(sources, logger));
|
||||
}
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Optional<String> computeDigestFromSource(EnsoContext context, TruffleLogger logger) {
|
||||
return context
|
||||
.getPackageRepository()
|
||||
.getPackageForLibraryJava(libraryName)
|
||||
.map(pkg -> computeDigestOfLibrarySources(pkg.listSourcesJava(), logger));
|
||||
}
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Optional<String> computeDigestFromSource(EnsoContext context, TruffleLogger logger) {
|
||||
return context
|
||||
.getPackageRepository()
|
||||
.getPackageForLibraryJava(libraryName)
|
||||
.map(pkg -> computeDigestOfLibrarySources(pkg.listSourcesJava(), logger));
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Optional<Cache.Roots> getCacheRoots(EnsoContext context) {
|
||||
return context.getPackageRepository().getPackageForLibraryJava(libraryName).map(pkg -> {
|
||||
var bindingsCacheRoot =
|
||||
pkg.getBindingsCacheRootForPackage(Info.ensoVersion());
|
||||
var localCacheRoot = bindingsCacheRoot.resolve(libraryName.namespace());
|
||||
var distribution = context.getDistributionManager();
|
||||
var pathSegments = new String[]{
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Optional<Cache.Roots> getCacheRoots(EnsoContext context) {
|
||||
return context
|
||||
.getPackageRepository()
|
||||
.getPackageForLibraryJava(libraryName)
|
||||
.map(
|
||||
pkg -> {
|
||||
var bindingsCacheRoot = pkg.getBindingsCacheRootForPackage(Info.ensoVersion());
|
||||
var localCacheRoot = bindingsCacheRoot.resolve(libraryName.namespace());
|
||||
var distribution = context.getDistributionManager();
|
||||
var pathSegments =
|
||||
new String[] {
|
||||
pkg.namespace(),
|
||||
pkg.normalizedName(),
|
||||
pkg.getConfig().version(),
|
||||
Info.ensoVersion(),
|
||||
libraryName.namespace()
|
||||
};
|
||||
var path = distribution.LocallyInstalledDirectories().irCacheDirectory()
|
||||
.resolve(StringUtils.join(pathSegments, "/"));
|
||||
var globalCacheRoot = context.getTruffleFile(path.toFile());
|
||||
return new Cache.Roots(localCacheRoot, globalCacheRoot);
|
||||
});
|
||||
};
|
||||
var path =
|
||||
distribution.LocallyInstalledDirectories()
|
||||
.irCacheDirectory()
|
||||
.resolve(StringUtils.join(pathSegments, "/"));
|
||||
var globalCacheRoot = context.getTruffleFile(path.toFile());
|
||||
return new Cache.Roots(localCacheRoot, globalCacheRoot);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] serialize(EnsoContext context, CachedBindings entry) throws IOException {
|
||||
var arr = Persistance.write(entry.bindings(), null);
|
||||
return arr;
|
||||
}
|
||||
|
||||
public static final class MapToBindings {
|
||||
private final Map<QualifiedName, Persistance.Reference<BindingsMap>> entries;
|
||||
|
||||
public MapToBindings(Map<QualifiedName, Persistance.Reference<BindingsMap>> entries) {
|
||||
this.entries = entries;
|
||||
}
|
||||
|
||||
public Option<BindingsMap> findForModule(QualifiedName moduleName) {
|
||||
var ref = entries.get(moduleName);
|
||||
if (ref.isEmpty()) {
|
||||
return Option.empty();
|
||||
}
|
||||
return Option.apply(ref.get().get(BindingsMap.class));
|
||||
}
|
||||
}
|
||||
|
||||
@ServiceProvider(service = Persistance.class)
|
||||
public static final class PersistMapToBindings extends Persistance<MapToBindings> {
|
||||
public PersistMapToBindings() {
|
||||
super(MapToBindings.class, false, 364);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] serialize(EnsoContext context, CachedBindings entry) throws IOException {
|
||||
var arr = Persistance.write(entry.bindings(), null);
|
||||
return arr;
|
||||
}
|
||||
|
||||
public static final class MapToBindings {
|
||||
private final Map<QualifiedName, Persistance.Reference<BindingsMap>> entries;
|
||||
|
||||
public MapToBindings(Map<QualifiedName, Persistance.Reference<BindingsMap>> entries) {
|
||||
this.entries = entries;
|
||||
}
|
||||
|
||||
public Option<BindingsMap> findForModule(QualifiedName moduleName) {
|
||||
var ref = entries.get(moduleName);
|
||||
if (ref.isEmpty()) {
|
||||
return Option.empty();
|
||||
}
|
||||
return Option.apply(ref.get().get(BindingsMap.class));
|
||||
}
|
||||
}
|
||||
|
||||
@ServiceProvider(service = Persistance.class)
|
||||
public static final class PersistMapToBindings extends Persistance<MapToBindings> {
|
||||
public PersistMapToBindings() {
|
||||
super(MapToBindings.class, false, 364);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void writeObject(MapToBindings obj, Output out) throws IOException {
|
||||
out.writeInt(obj.entries.size());
|
||||
var it = obj.entries.iterator();
|
||||
while (it.hasNext()) {
|
||||
var e = it.next();
|
||||
out.writeInline(QualifiedName.class, e._1());
|
||||
out.writeObject(e._2().get(BindingsMap.class));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected MapToBindings readObject(Input in) throws IOException, ClassNotFoundException {
|
||||
var size = in.readInt();
|
||||
var b = Map.newBuilder();
|
||||
b.sizeHint(size);
|
||||
while (size-- > 0) {
|
||||
var name = in.readInline(QualifiedName.class);
|
||||
var value = in.readReference(BindingsMap.class);
|
||||
b.addOne(Tuple2.apply(name, value));
|
||||
}
|
||||
return new MapToBindings((Map) b.result());
|
||||
protected void writeObject(MapToBindings obj, Output out) throws IOException {
|
||||
out.writeInt(obj.entries.size());
|
||||
var it = obj.entries.iterator();
|
||||
while (it.hasNext()) {
|
||||
var e = it.next();
|
||||
out.writeInline(QualifiedName.class, e._1());
|
||||
out.writeObject(e._2().get(BindingsMap.class));
|
||||
}
|
||||
}
|
||||
|
||||
public static record CachedBindings(LibraryName libraryName, MapToBindings bindings, Optional<List<SourceFile<TruffleFile>>> sources) {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected MapToBindings readObject(Input in) throws IOException, ClassNotFoundException {
|
||||
var size = in.readInt();
|
||||
var b = Map.newBuilder();
|
||||
b.sizeHint(size);
|
||||
while (size-- > 0) {
|
||||
var name = in.readInline(QualifiedName.class);
|
||||
var value = in.readReference(BindingsMap.class);
|
||||
b.addOne(Tuple2.apply(name, value));
|
||||
}
|
||||
return new MapToBindings((Map) b.result());
|
||||
}
|
||||
}
|
||||
|
||||
public record Metadata(
|
||||
@JsonProperty("source_hash") String sourceHash,
|
||||
@JsonProperty("blob_hash") String blobHash) implements Cache.Metadata {}
|
||||
public static record CachedBindings(
|
||||
LibraryName libraryName,
|
||||
MapToBindings bindings,
|
||||
Optional<List<SourceFile<TruffleFile>>> sources) {}
|
||||
|
||||
private static final String bindingsCacheDataExtension = ".bindings";
|
||||
public record Metadata(
|
||||
@JsonProperty("source_hash") String sourceHash, @JsonProperty("blob_hash") String blobHash)
|
||||
implements Cache.Metadata {}
|
||||
|
||||
private static final String bindingsCacheMetadataExtension =".bindings.meta";
|
||||
private static final String bindingsCacheDataExtension = ".bindings";
|
||||
|
||||
private final static ObjectMapper objectMapper = new ObjectMapper();
|
||||
private static final String bindingsCacheMetadataExtension = ".bindings.meta";
|
||||
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
@Persistable(clazz=BindingsMap.PolyglotSymbol.class, id=33006)
|
||||
@Persistable(clazz=org.enso.compiler.data.BindingsMap$ModuleReference$Abstract.class, id=33007)
|
||||
@Persistable(clazz=BindingsMap.ModuleMethod.class, id=33008)
|
||||
@Persistable(clazz=BindingsMap.Type.class, id=33009)
|
||||
@Persistable(clazz=BindingsMap.ResolvedImport.class, id=33010)
|
||||
@Persistable(clazz=BindingsMap.Cons.class, id=33011)
|
||||
@Persistable(clazz=BindingsMap.ResolvedModule.class, id=33012)
|
||||
@Persistable(clazz=BindingsMap.ResolvedType.class, id=33013)
|
||||
@Persistable(clazz=BindingsMap.ResolvedMethod.class, id=33014)
|
||||
@Persistable(clazz=BindingsMap.ExportedModule.class, id=33015)
|
||||
@Persistable(clazz=org.enso.compiler.data.BindingsMap$SymbolRestriction$Only.class, id=33016)
|
||||
@Persistable(clazz=org.enso.compiler.data.BindingsMap$SymbolRestriction$Union.class, id=33017)
|
||||
@Persistable(clazz=org.enso.compiler.data.BindingsMap$SymbolRestriction$Intersect.class, id=33018)
|
||||
@Persistable(clazz=org.enso.compiler.data.BindingsMap$SymbolRestriction$AllowedResolution.class, id=33019)
|
||||
@Persistable(clazz=org.enso.compiler.data.BindingsMap$SymbolRestriction$All$.class, id=33020)
|
||||
@Persistable(clazz=org.enso.compiler.data.BindingsMap$SymbolRestriction$Hiding.class, id=33021)
|
||||
@Persistable(clazz=BindingsMap.Resolution.class, id=33029)
|
||||
@Persistable(clazz=BindingsMap.ResolvedConstructor.class, id=33030)
|
||||
@Persistable(clazz=BindingsMap.ResolvedPolyglotSymbol.class, id=33031)
|
||||
@Persistable(clazz=BindingsMap.ResolvedPolyglotField.class, id=33032)
|
||||
@Persistable(clazz = BindingsMap.PolyglotSymbol.class, id = 33006)
|
||||
@Persistable(
|
||||
clazz = org.enso.compiler.data.BindingsMap$ModuleReference$Abstract.class,
|
||||
id = 33007)
|
||||
@Persistable(clazz = BindingsMap.ModuleMethod.class, id = 33008)
|
||||
@Persistable(clazz = BindingsMap.Type.class, id = 33009)
|
||||
@Persistable(clazz = BindingsMap.ResolvedImport.class, id = 33010)
|
||||
@Persistable(clazz = BindingsMap.Cons.class, id = 33011)
|
||||
@Persistable(clazz = BindingsMap.ResolvedModule.class, id = 33012)
|
||||
@Persistable(clazz = BindingsMap.ResolvedType.class, id = 33013)
|
||||
@Persistable(clazz = BindingsMap.ResolvedMethod.class, id = 33014)
|
||||
@Persistable(clazz = BindingsMap.ExportedModule.class, id = 33015)
|
||||
@Persistable(clazz = org.enso.compiler.data.BindingsMap$SymbolRestriction$Only.class, id = 33016)
|
||||
@Persistable(clazz = org.enso.compiler.data.BindingsMap$SymbolRestriction$Union.class, id = 33017)
|
||||
@Persistable(
|
||||
clazz = org.enso.compiler.data.BindingsMap$SymbolRestriction$Intersect.class,
|
||||
id = 33018)
|
||||
@Persistable(
|
||||
clazz = org.enso.compiler.data.BindingsMap$SymbolRestriction$AllowedResolution.class,
|
||||
id = 33019)
|
||||
@Persistable(clazz = org.enso.compiler.data.BindingsMap$SymbolRestriction$All$.class, id = 33020)
|
||||
@Persistable(
|
||||
clazz = org.enso.compiler.data.BindingsMap$SymbolRestriction$Hiding.class,
|
||||
id = 33021)
|
||||
@Persistable(clazz = BindingsMap.Resolution.class, id = 33029)
|
||||
@Persistable(clazz = BindingsMap.ResolvedConstructor.class, id = 33030)
|
||||
@Persistable(clazz = BindingsMap.ResolvedPolyglotSymbol.class, id = 33031)
|
||||
@Persistable(clazz = BindingsMap.ResolvedPolyglotField.class, id = 33032)
|
||||
@ServiceProvider(service = Persistance.class)
|
||||
public static final class PersistBindingsMap extends Persistance<BindingsMap> {
|
||||
public PersistBindingsMap() {
|
||||
@ -222,6 +240,4 @@ public final class ImportExportCache extends Cache<ImportExportCache.CachedBindi
|
||||
return map;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,172 +1,186 @@
|
||||
package org.enso.interpreter.caches;
|
||||
|
||||
import buildinfo.Info;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.oracle.truffle.api.TruffleLogger;
|
||||
import com.oracle.truffle.api.source.Source;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.enso.compiler.core.ir.Module;
|
||||
import org.enso.compiler.core.ir.ProcessingPass;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.builtin.Builtins;
|
||||
import org.enso.polyglot.CompilationStage;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.enso.compiler.core.ir.Module;
|
||||
import org.enso.compiler.core.ir.ProcessingPass;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.builtin.Builtins;
|
||||
import org.enso.persist.Persistance;
|
||||
import org.enso.polyglot.CompilationStage;
|
||||
|
||||
public final class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCache.Metadata> {
|
||||
|
||||
private final org.enso.interpreter.runtime.Module module;
|
||||
private final org.enso.interpreter.runtime.Module module;
|
||||
|
||||
public ModuleCache(org.enso.interpreter.runtime.Module module) {
|
||||
super(Level.FINEST, module.getName().toString(), true, false);
|
||||
this.module = module;
|
||||
this.entryName = module.getName().item();
|
||||
this.dataSuffix = irCacheDataExtension;
|
||||
this.metadataSuffix = irCacheMetadataExtension;
|
||||
public ModuleCache(org.enso.interpreter.runtime.Module module) {
|
||||
super(Level.FINEST, module.getName().toString(), true, false);
|
||||
this.module = module;
|
||||
this.entryName = module.getName().item();
|
||||
this.dataSuffix = irCacheDataExtension;
|
||||
this.metadataSuffix = irCacheMetadataExtension;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] metadata(String sourceDigest, String blobDigest, CachedModule entry) {
|
||||
try {
|
||||
return objectMapper.writeValueAsBytes(
|
||||
new Metadata(sourceDigest, blobDigest, entry.compilationStage().toString()));
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] metadata(String sourceDigest, String blobDigest, CachedModule entry) {
|
||||
try {
|
||||
return objectMapper.writeValueAsBytes(new Metadata(sourceDigest, blobDigest, entry.compilationStage().toString()));
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@Override
|
||||
protected CachedModule deserialize(
|
||||
EnsoContext context, byte[] data, Metadata meta, TruffleLogger logger)
|
||||
throws ClassNotFoundException, IOException, ClassNotFoundException {
|
||||
var ref =
|
||||
Persistance.read(
|
||||
data,
|
||||
(obj) ->
|
||||
switch (obj) {
|
||||
case ProcessingPass.Metadata metadata -> {
|
||||
var option = metadata.restoreFromSerialization(context.getCompiler().context());
|
||||
if (option.nonEmpty()) {
|
||||
yield option.get();
|
||||
} else {
|
||||
throw raise(
|
||||
RuntimeException.class, new IOException("Cannot convert " + metadata));
|
||||
}
|
||||
}
|
||||
default -> obj;
|
||||
});
|
||||
var mod = ref.get(Module.class);
|
||||
return new CachedModule(
|
||||
mod, CompilationStage.valueOf(meta.compilationStage()), module.getSource());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) {
|
||||
var maybeJsonString = new String(bytes, Cache.metadataCharset);
|
||||
try {
|
||||
return Optional.of(objectMapper.readValue(maybeJsonString, Metadata.class));
|
||||
} catch (JsonProcessingException e) {
|
||||
logger.log(logLevel, "Failed to deserialize module's metadata.", e);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CachedModule deserialize(EnsoContext context, byte[] data, Metadata meta, TruffleLogger logger) throws ClassNotFoundException, IOException, ClassNotFoundException {
|
||||
var ref = Persistance.read(data, (obj) -> switch (obj) {
|
||||
case ProcessingPass.Metadata metadata -> {
|
||||
var option = metadata.restoreFromSerialization(context.getCompiler().context());
|
||||
if (option.nonEmpty()) {
|
||||
yield option.get();
|
||||
} else {
|
||||
throw raise(RuntimeException.class, new IOException("Cannot convert " + metadata));
|
||||
}
|
||||
}
|
||||
default -> obj;
|
||||
});
|
||||
var mod = ref.get(Module.class);
|
||||
return new CachedModule(mod, CompilationStage.valueOf(meta.compilationStage()), module.getSource());
|
||||
private Optional<String> computeDigestOfModuleSources(Source source) {
|
||||
if (source != null) {
|
||||
byte[] sourceBytes;
|
||||
if (source.hasBytes()) {
|
||||
sourceBytes = source.getBytes().toByteArray();
|
||||
} else {
|
||||
sourceBytes = source.getCharacters().toString().getBytes(metadataCharset);
|
||||
}
|
||||
return Optional.of(computeDigestFromBytes(sourceBytes));
|
||||
} else {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) {
|
||||
var maybeJsonString = new String(bytes, Cache.metadataCharset);
|
||||
try {
|
||||
return Optional.of(objectMapper.readValue(maybeJsonString, Metadata.class));
|
||||
} catch (JsonProcessingException e) {
|
||||
logger.log(logLevel, "Failed to deserialize module's metadata.", e);
|
||||
return Optional.empty();
|
||||
}
|
||||
@Override
|
||||
protected Optional<String> computeDigest(CachedModule entry, TruffleLogger logger) {
|
||||
return computeDigestOfModuleSources(entry.source());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<String> computeDigestFromSource(EnsoContext context, TruffleLogger logger) {
|
||||
try {
|
||||
return computeDigestOfModuleSources(module.getSource());
|
||||
} catch (IOException e) {
|
||||
logger.log(logLevel, "failed to retrieve the source of " + module.getName(), e);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
private Optional<String> computeDigestOfModuleSources(Source source) {
|
||||
if (source != null) {
|
||||
byte[] sourceBytes;
|
||||
if (source.hasBytes()) {
|
||||
sourceBytes = source.getBytes().toByteArray();
|
||||
} else {
|
||||
sourceBytes = source.getCharacters().toString().getBytes(metadataCharset);
|
||||
}
|
||||
return Optional.of(computeDigestFromBytes(sourceBytes));
|
||||
} else {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<String> computeDigest(CachedModule entry, TruffleLogger logger) {
|
||||
return computeDigestOfModuleSources(entry.source());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<String> computeDigestFromSource(EnsoContext context, TruffleLogger logger) {
|
||||
try {
|
||||
return computeDigestOfModuleSources(module.getSource());
|
||||
} catch (IOException e) {
|
||||
logger.log(logLevel, "failed to retrieve the source of " + module.getName(), e);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<Cache.Roots> getCacheRoots(EnsoContext context) {
|
||||
if (module != context.getBuiltins().getModule()) {
|
||||
return context.getPackageOf(module.getSourceFile()).map(pkg -> {
|
||||
var irCacheRoot = pkg.getIrCacheRootForPackage(Info.ensoVersion());
|
||||
var qualName = module.getName();
|
||||
@Override
|
||||
protected Optional<Cache.Roots> getCacheRoots(EnsoContext context) {
|
||||
if (module != context.getBuiltins().getModule()) {
|
||||
return context
|
||||
.getPackageOf(module.getSourceFile())
|
||||
.map(
|
||||
pkg -> {
|
||||
var irCacheRoot = pkg.getIrCacheRootForPackage(Info.ensoVersion());
|
||||
var qualName = module.getName();
|
||||
var localCacheRoot = irCacheRoot.resolve(qualName.path().mkString("/"));
|
||||
|
||||
var distribution = context.getDistributionManager();
|
||||
var pathSegmentsJava = new ArrayList<String>();
|
||||
pathSegmentsJava.addAll(Arrays.asList(
|
||||
pathSegmentsJava.addAll(
|
||||
Arrays.asList(
|
||||
pkg.namespace(),
|
||||
pkg.normalizedName(),
|
||||
pkg.getConfig().version(),
|
||||
Info.ensoVersion()
|
||||
));
|
||||
Info.ensoVersion()));
|
||||
pathSegmentsJava.addAll(qualName.pathAsJava());
|
||||
var path = distribution.LocallyInstalledDirectories().irCacheDirectory()
|
||||
var path =
|
||||
distribution.LocallyInstalledDirectories()
|
||||
.irCacheDirectory()
|
||||
.resolve(StringUtils.join(pathSegmentsJava, "/"));
|
||||
var globalCacheRoot = context.getTruffleFile(path.toFile());
|
||||
|
||||
return new Cache.Roots(localCacheRoot, globalCacheRoot);
|
||||
});
|
||||
} else {
|
||||
var distribution = context.getDistributionManager();
|
||||
var pathSegmentsJava = new ArrayList<String>();
|
||||
pathSegmentsJava.addAll(Arrays.asList(
|
||||
Builtins.NAMESPACE,
|
||||
Builtins.PACKAGE_NAME,
|
||||
Info.ensoVersion(),
|
||||
Info.ensoVersion()
|
||||
));
|
||||
pathSegmentsJava.addAll(module.getName().pathAsJava());
|
||||
var path = distribution.LocallyInstalledDirectories().irCacheDirectory()
|
||||
.resolve(StringUtils.join(pathSegmentsJava, "/"));
|
||||
var globalCacheRoot = context.getTruffleFile(path.toFile());
|
||||
});
|
||||
} else {
|
||||
var distribution = context.getDistributionManager();
|
||||
var pathSegmentsJava = new ArrayList<String>();
|
||||
pathSegmentsJava.addAll(
|
||||
Arrays.asList(
|
||||
Builtins.NAMESPACE, Builtins.PACKAGE_NAME, Info.ensoVersion(), Info.ensoVersion()));
|
||||
pathSegmentsJava.addAll(module.getName().pathAsJava());
|
||||
var path =
|
||||
distribution.LocallyInstalledDirectories()
|
||||
.irCacheDirectory()
|
||||
.resolve(StringUtils.join(pathSegmentsJava, "/"));
|
||||
var globalCacheRoot = context.getTruffleFile(path.toFile());
|
||||
|
||||
return Optional.of(new Cache.Roots(globalCacheRoot, globalCacheRoot));
|
||||
}
|
||||
return Optional.of(new Cache.Roots(globalCacheRoot, globalCacheRoot));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] serialize(EnsoContext context, CachedModule entry) throws IOException {
|
||||
var arr = Persistance.write(entry.moduleIR(), (obj) -> switch (obj) {
|
||||
case ProcessingPass.Metadata metadata -> metadata.prepareForSerialization(context.getCompiler().context());
|
||||
default -> obj;
|
||||
});
|
||||
return arr;
|
||||
}
|
||||
@Override
|
||||
protected byte[] serialize(EnsoContext context, CachedModule entry) throws IOException {
|
||||
var arr =
|
||||
Persistance.write(
|
||||
entry.moduleIR(),
|
||||
(obj) ->
|
||||
switch (obj) {
|
||||
case ProcessingPass.Metadata metadata -> metadata.prepareForSerialization(
|
||||
context.getCompiler().context());
|
||||
default -> obj;
|
||||
});
|
||||
return arr;
|
||||
}
|
||||
|
||||
public record CachedModule(Module moduleIR, CompilationStage compilationStage, Source source) {
|
||||
}
|
||||
public record CachedModule(Module moduleIR, CompilationStage compilationStage, Source source) {}
|
||||
|
||||
public record Metadata(
|
||||
@JsonProperty("source_hash") String sourceHash,
|
||||
@JsonProperty("blob_hash") String blobHash,
|
||||
@JsonProperty("compilation_stage") String compilationStage) implements Cache.Metadata {}
|
||||
public record Metadata(
|
||||
@JsonProperty("source_hash") String sourceHash,
|
||||
@JsonProperty("blob_hash") String blobHash,
|
||||
@JsonProperty("compilation_stage") String compilationStage)
|
||||
implements Cache.Metadata {}
|
||||
|
||||
private final static String irCacheDataExtension = ".ir";
|
||||
private static final String irCacheDataExtension = ".ir";
|
||||
|
||||
private final static String irCacheMetadataExtension = ".meta";
|
||||
private static final String irCacheMetadataExtension = ".meta";
|
||||
|
||||
private final static ObjectMapper objectMapper = new ObjectMapper();
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T extends Exception> T raise(Class<T> cls, Exception e) throws T {
|
||||
throw (T)e;
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T extends Exception> T raise(Class<T> cls, Exception e) throws T {
|
||||
throw (T) e;
|
||||
}
|
||||
}
|
||||
|
@ -11,24 +11,23 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.enso.editions.LibraryName;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.pkg.SourceFile;
|
||||
import org.enso.polyglot.Suggestion;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.logging.Level;
|
||||
|
||||
public final class SuggestionsCache
|
||||
extends Cache<SuggestionsCache.CachedSuggestions, SuggestionsCache.Metadata> {
|
||||
|
||||
private static final String SUGGESTIONS_CACHE_DATA_EXTENSION = ".suggestions";
|
||||
private static final String SUGGESTIONS_CACHE_METADATA_EXTENSION =".suggestions.meta";
|
||||
private static final String SUGGESTIONS_CACHE_METADATA_EXTENSION = ".suggestions.meta";
|
||||
|
||||
private final static ObjectMapper objectMapper = new ObjectMapper();
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
final LibraryName libraryName;
|
||||
|
||||
@ -43,20 +42,24 @@ public final class SuggestionsCache
|
||||
@Override
|
||||
protected byte[] metadata(String sourceDigest, String blobDigest, CachedSuggestions entry) {
|
||||
try {
|
||||
return objectMapper.writeValueAsString(new Metadata(sourceDigest, blobDigest)).getBytes(metadataCharset);
|
||||
return objectMapper
|
||||
.writeValueAsString(new Metadata(sourceDigest, blobDigest))
|
||||
.getBytes(metadataCharset);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CachedSuggestions deserialize(EnsoContext context, byte[] data, Metadata meta, TruffleLogger logger)
|
||||
throws ClassNotFoundException, ClassNotFoundException, IOException {
|
||||
protected CachedSuggestions deserialize(
|
||||
EnsoContext context, byte[] data, Metadata meta, TruffleLogger logger)
|
||||
throws ClassNotFoundException, ClassNotFoundException, IOException {
|
||||
try (var stream = new ObjectInputStream(new ByteArrayInputStream(data))) {
|
||||
if (stream.readObject() instanceof Suggestions suggestions) {
|
||||
return new CachedSuggestions(libraryName, suggestions, Optional.empty());
|
||||
} else {
|
||||
throw new ClassNotFoundException("Expected SuggestionsCache.Suggestions, got " + data.getClass());
|
||||
throw new ClassNotFoundException(
|
||||
"Expected SuggestionsCache.Suggestions, got " + data.getClass());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -87,22 +90,29 @@ public final class SuggestionsCache
|
||||
|
||||
@Override
|
||||
protected Optional<Roots> getCacheRoots(EnsoContext context) {
|
||||
return context.getPackageRepository().getPackageForLibraryJava(libraryName).map(pkg -> {
|
||||
var bindingsCacheRoot = pkg.getSuggestionsCacheRootForPackage(Info.ensoVersion());
|
||||
var localCacheRoot = bindingsCacheRoot.resolve(libraryName.namespace());
|
||||
var distribution = context.getDistributionManager();
|
||||
var pathSegments = new String[]{
|
||||
pkg.namespace(),
|
||||
pkg.normalizedName(),
|
||||
pkg.getConfig().version(),
|
||||
Info.ensoVersion(),
|
||||
libraryName.namespace()
|
||||
};
|
||||
var path = distribution.LocallyInstalledDirectories().irCacheDirectory()
|
||||
.resolve(StringUtils.join(pathSegments, "/"));
|
||||
var globalCacheRoot = context.getTruffleFile(path.toFile());
|
||||
return new Cache.Roots(localCacheRoot, globalCacheRoot);
|
||||
});
|
||||
return context
|
||||
.getPackageRepository()
|
||||
.getPackageForLibraryJava(libraryName)
|
||||
.map(
|
||||
pkg -> {
|
||||
var bindingsCacheRoot = pkg.getSuggestionsCacheRootForPackage(Info.ensoVersion());
|
||||
var localCacheRoot = bindingsCacheRoot.resolve(libraryName.namespace());
|
||||
var distribution = context.getDistributionManager();
|
||||
var pathSegments =
|
||||
new String[] {
|
||||
pkg.namespace(),
|
||||
pkg.normalizedName(),
|
||||
pkg.getConfig().version(),
|
||||
Info.ensoVersion(),
|
||||
libraryName.namespace()
|
||||
};
|
||||
var path =
|
||||
distribution.LocallyInstalledDirectories()
|
||||
.irCacheDirectory()
|
||||
.resolve(StringUtils.join(pathSegments, "/"));
|
||||
var globalCacheRoot = context.getTruffleFile(path.toFile());
|
||||
return new Cache.Roots(localCacheRoot, globalCacheRoot);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -115,11 +125,11 @@ public final class SuggestionsCache
|
||||
}
|
||||
|
||||
// Suggestions class is not a record because of a Frgaal bug leading to invalid compilation error.
|
||||
public final static class Suggestions implements Serializable {
|
||||
public static final class Suggestions implements Serializable {
|
||||
|
||||
private final List<Suggestion> suggestions;
|
||||
|
||||
public Suggestions(List<Suggestion> suggestions) {
|
||||
public Suggestions(List<Suggestion> suggestions) {
|
||||
this.suggestions = suggestions;
|
||||
}
|
||||
|
||||
@ -128,15 +138,19 @@ public final class SuggestionsCache
|
||||
}
|
||||
}
|
||||
|
||||
// CachedSuggestions class is not a record because of a Frgaal bug leading to invalid compilation error.
|
||||
public final static class CachedSuggestions {
|
||||
// CachedSuggestions class is not a record because of a Frgaal bug leading to invalid compilation
|
||||
// error.
|
||||
public static final class CachedSuggestions {
|
||||
|
||||
private final LibraryName libraryName;
|
||||
private final Suggestions suggestions;
|
||||
|
||||
private final Optional<List<SourceFile<TruffleFile>>> sources;
|
||||
|
||||
public CachedSuggestions(LibraryName libraryName, Suggestions suggestions, Optional<List<SourceFile<TruffleFile>>> sources) {
|
||||
public CachedSuggestions(
|
||||
LibraryName libraryName,
|
||||
Suggestions suggestions,
|
||||
Optional<List<SourceFile<TruffleFile>>> sources) {
|
||||
this.libraryName = libraryName;
|
||||
this.suggestions = suggestions;
|
||||
this.sources = sources;
|
||||
@ -160,7 +174,6 @@ public final class SuggestionsCache
|
||||
}
|
||||
|
||||
record Metadata(
|
||||
@JsonProperty("source_hash") String sourceHash,
|
||||
@JsonProperty("blob_hash") String blobHash
|
||||
) implements Cache.Metadata { }
|
||||
@JsonProperty("source_hash") String sourceHash, @JsonProperty("blob_hash") String blobHash)
|
||||
implements Cache.Metadata {}
|
||||
}
|
||||
|
@ -36,7 +36,9 @@ public abstract class BaseNode extends Node {
|
||||
this.tailStatus = tailStatus;
|
||||
}
|
||||
|
||||
/** @return the tail position status of this node. */
|
||||
/**
|
||||
* @return the tail position status of this node.
|
||||
*/
|
||||
public TailStatus getTailStatus() {
|
||||
return tailStatus;
|
||||
}
|
||||
|
@ -1,22 +1,19 @@
|
||||
package org.enso.interpreter.node;
|
||||
|
||||
import com.oracle.truffle.api.frame.FrameDescriptor;
|
||||
import com.oracle.truffle.api.frame.FrameSlotKind;
|
||||
import com.oracle.truffle.api.nodes.NodeInfo;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import com.oracle.truffle.api.source.Source;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import org.enso.compiler.context.LocalScope;
|
||||
import org.enso.interpreter.EnsoLanguage;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.error.DataflowError;
|
||||
import org.enso.compiler.context.LocalScope;
|
||||
import org.enso.interpreter.runtime.scope.ModuleScope;
|
||||
import org.enso.interpreter.util.ScalaConversions;
|
||||
|
||||
import com.oracle.truffle.api.frame.FrameDescriptor;
|
||||
import com.oracle.truffle.api.frame.FrameSlotKind;
|
||||
|
||||
/** A common base class for all kinds of root node in Enso. */
|
||||
@NodeInfo(shortName = "Root", description = "A root node for Enso computations")
|
||||
public abstract class EnsoRootNode extends RootNode {
|
||||
@ -47,7 +44,8 @@ public abstract class EnsoRootNode extends RootNode {
|
||||
this.name = name;
|
||||
this.localScope = localScope;
|
||||
this.moduleScope = moduleScope;
|
||||
if (sourceSection == null || moduleScope.getModule().isModuleSource(sourceSection.getSource())) {
|
||||
if (sourceSection == null
|
||||
|| moduleScope.getModule().isModuleSource(sourceSection.getSource())) {
|
||||
this.inlineSource = null;
|
||||
} else {
|
||||
this.inlineSource = sourceSection.getSource();
|
||||
@ -57,19 +55,16 @@ public abstract class EnsoRootNode extends RootNode {
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a {@link FrameDescriptor} from the alias analysis scope metadata
|
||||
* for the local scope. See [[AliasAnalysis.Graph.Scope.allDefinitions]].
|
||||
* Builds a {@link FrameDescriptor} from the alias analysis scope metadata for the local scope.
|
||||
* See [[AliasAnalysis.Graph.Scope.allDefinitions]].
|
||||
*
|
||||
* @return {@link FrameDescriptor} built from the variable definitions in
|
||||
* the local localScope.
|
||||
* @return {@link FrameDescriptor} built from the variable definitions in the local localScope.
|
||||
*/
|
||||
private static FrameDescriptor buildFrameDescriptor(LocalScope localScope) {
|
||||
var descriptorBuilder = FrameDescriptor.newBuilder();
|
||||
descriptorBuilder.addSlot(FrameSlotKind.Object, LocalScope.monadicStateSlotName(), null);
|
||||
for (var definition : ScalaConversions.asJava(localScope.scope().allDefinitions())) {
|
||||
descriptorBuilder.addSlot(
|
||||
FrameSlotKind.Illegal, definition.symbol(), null
|
||||
);
|
||||
descriptorBuilder.addSlot(FrameSlotKind.Illegal, definition.symbol(), null);
|
||||
}
|
||||
descriptorBuilder.defaultValue(DataflowError.UNINITIALIZED);
|
||||
var frameDescriptor = descriptorBuilder.build();
|
||||
@ -116,13 +111,17 @@ public abstract class EnsoRootNode extends RootNode {
|
||||
}
|
||||
|
||||
static final int NO_SOURCE = -1;
|
||||
|
||||
static SourceSection findSourceSection(final RootNode n, int sourceStartIndex, int sourceLength) {
|
||||
if (sourceStartIndex != NO_SOURCE && n instanceof EnsoRootNode rootNode) {
|
||||
if (rootNode.inlineSource == null) {
|
||||
if (rootNode.sourceStartIndex == NO_SOURCE) {
|
||||
return null;
|
||||
} else {
|
||||
return rootNode.getModuleScope().getModule().createSection(sourceStartIndex, sourceLength);
|
||||
return rootNode
|
||||
.getModuleScope()
|
||||
.getModule()
|
||||
.createSection(sourceStartIndex, sourceLength);
|
||||
}
|
||||
} else {
|
||||
return rootNode.inlineSource.createSection(sourceStartIndex, sourceLength);
|
||||
|
@ -5,12 +5,10 @@ import com.oracle.truffle.api.CompilerDirectives.CompilationFinal;
|
||||
import com.oracle.truffle.api.frame.Frame;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.instrumentation.GenerateWrapper;
|
||||
import com.oracle.truffle.api.instrumentation.GenerateWrapper.OutgoingConverter;
|
||||
import com.oracle.truffle.api.instrumentation.InstrumentableNode;
|
||||
import com.oracle.truffle.api.instrumentation.ProbeNode;
|
||||
import com.oracle.truffle.api.instrumentation.StandardTags;
|
||||
import com.oracle.truffle.api.instrumentation.Tag;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.NodeLibrary;
|
||||
import com.oracle.truffle.api.library.ExportLibrary;
|
||||
import com.oracle.truffle.api.library.ExportMessage;
|
||||
@ -18,9 +16,7 @@ import com.oracle.truffle.api.nodes.NodeInfo;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import com.oracle.truffle.api.nodes.UnexpectedResultException;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import org.enso.interpreter.runtime.builtin.Builtins;
|
||||
import org.enso.interpreter.runtime.callable.atom.AtomConstructor;
|
||||
import org.enso.interpreter.runtime.callable.function.Function;
|
||||
@ -52,9 +48,7 @@ public abstract class ExpressionNode extends BaseNode implements InstrumentableN
|
||||
return node instanceof ExpressionNodeWrapper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance of this node.
|
||||
*/
|
||||
/** Creates a new instance of this node. */
|
||||
public ExpressionNode() {
|
||||
sourceLength = EnsoRootNode.NO_SOURCE;
|
||||
sourceStartIndex = EnsoRootNode.NO_SOURCE;
|
||||
@ -64,7 +58,7 @@ public abstract class ExpressionNode extends BaseNode implements InstrumentableN
|
||||
* Sets the source location of this node.
|
||||
*
|
||||
* @param sourceStartIndex the source index this node begins at
|
||||
* @param sourceLength the length of this node's source
|
||||
* @param sourceLength the length of this node's source
|
||||
*/
|
||||
public void setSourceLocation(int sourceStartIndex, int sourceLength) {
|
||||
CompilerDirectives.transferToInterpreterAndInvalidate();
|
||||
@ -80,7 +74,9 @@ public abstract class ExpressionNode extends BaseNode implements InstrumentableN
|
||||
@Override
|
||||
public SourceSection getSourceSection() {
|
||||
var bounds = getSourceSectionBounds();
|
||||
return bounds == null ? null : EnsoRootNode.findSourceSection(getRootNode(), bounds[0], bounds[1]);
|
||||
return bounds == null
|
||||
? null
|
||||
: EnsoRootNode.findSourceSection(getRootNode(), bounds[0], bounds[1]);
|
||||
}
|
||||
|
||||
public int[] getSourceSectionBounds() {
|
||||
@ -90,7 +86,7 @@ public abstract class ExpressionNode extends BaseNode implements InstrumentableN
|
||||
if (sourceStartIndex == EnsoRootNode.NO_SOURCE && sourceLength == EnsoRootNode.NO_SOURCE) {
|
||||
return null;
|
||||
} else {
|
||||
return new int[] { sourceStartIndex, sourceLength };
|
||||
return new int[] {sourceStartIndex, sourceLength};
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -120,7 +116,7 @@ public abstract class ExpressionNode extends BaseNode implements InstrumentableN
|
||||
* @param frame the stack frame for execution
|
||||
* @return the {@code long} value obtained by executing the node
|
||||
* @throws UnexpectedResultException if the result cannot be represented as a value of the return
|
||||
* type
|
||||
* type
|
||||
*/
|
||||
public long executeLong(VirtualFrame frame) throws UnexpectedResultException {
|
||||
return TypesGen.expectLong(executeGeneric(frame));
|
||||
@ -132,7 +128,7 @@ public abstract class ExpressionNode extends BaseNode implements InstrumentableN
|
||||
* @param frame the stack frame for execution
|
||||
* @return the Atom constructor obtained by executing the node
|
||||
* @throws UnexpectedResultException if the result cannot be represented as a value of the return
|
||||
* type
|
||||
* type
|
||||
*/
|
||||
public AtomConstructor executeAtomConstructor(VirtualFrame frame)
|
||||
throws UnexpectedResultException {
|
||||
@ -145,7 +141,7 @@ public abstract class ExpressionNode extends BaseNode implements InstrumentableN
|
||||
* @param frame the stack frame for execution
|
||||
* @return the function obtained by executing the node
|
||||
* @throws UnexpectedResultException if the result cannot be represented as a value of the return
|
||||
* type
|
||||
* type
|
||||
*/
|
||||
public Function executeFunction(VirtualFrame frame) throws UnexpectedResultException {
|
||||
return TypesGen.expectFunction(executeGeneric(frame));
|
||||
|
@ -1,22 +1,20 @@
|
||||
package org.enso.interpreter.node;
|
||||
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.enso.compiler.core.CompilerError;
|
||||
import org.enso.compiler.context.LocalScope;
|
||||
import org.enso.interpreter.EnsoLanguage;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.data.Type;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
import org.enso.interpreter.runtime.scope.ModuleScope;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.ReportPolymorphism;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.NodeInfo;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
import java.util.function.Supplier;
|
||||
import org.enso.compiler.context.LocalScope;
|
||||
import org.enso.compiler.core.CompilerError;
|
||||
import org.enso.interpreter.EnsoLanguage;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.data.Type;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
import org.enso.interpreter.runtime.scope.ModuleScope;
|
||||
|
||||
@ReportPolymorphism
|
||||
@NodeInfo(shortName = "Method", description = "A root node for Enso methods.")
|
||||
@ -33,12 +31,15 @@ public class MethodRootNode extends ClosureRootNode {
|
||||
SourceSection section,
|
||||
Type type,
|
||||
String methodName) {
|
||||
super(language,
|
||||
super(
|
||||
language,
|
||||
localScope,
|
||||
moduleScope,
|
||||
body,
|
||||
section,
|
||||
shortName(type.getName(), methodName), null, false);
|
||||
shortName(type.getName(), methodName),
|
||||
null,
|
||||
false);
|
||||
this.type = type;
|
||||
this.methodName = methodName;
|
||||
}
|
||||
@ -68,13 +69,7 @@ public class MethodRootNode extends ClosureRootNode {
|
||||
Type type,
|
||||
String methodName) {
|
||||
return build(
|
||||
language,
|
||||
localScope,
|
||||
moduleScope,
|
||||
new LazyBodyNode(body),
|
||||
section,
|
||||
type,
|
||||
methodName);
|
||||
language, localScope, moduleScope, new LazyBodyNode(body), section, type, methodName);
|
||||
}
|
||||
|
||||
public static MethodRootNode build(
|
||||
@ -85,8 +80,7 @@ public class MethodRootNode extends ClosureRootNode {
|
||||
SourceSection section,
|
||||
Type type,
|
||||
String methodName) {
|
||||
return new MethodRootNode(
|
||||
language, localScope, moduleScope, body, section, type, methodName);
|
||||
return new MethodRootNode(language, localScope, moduleScope, body, section, type, methodName);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -105,12 +99,16 @@ public class MethodRootNode extends ClosureRootNode {
|
||||
+ methodName;
|
||||
}
|
||||
|
||||
/** @return the constructor this method was defined for */
|
||||
/**
|
||||
* @return the constructor this method was defined for
|
||||
*/
|
||||
public Type getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
/** @return the method name */
|
||||
/**
|
||||
* @return the method name
|
||||
*/
|
||||
public String getMethodName() {
|
||||
return methodName;
|
||||
}
|
||||
@ -160,6 +158,7 @@ public class MethodRootNode extends ClosureRootNode {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isSubjectToInstrumentation() {
|
||||
return true;
|
||||
}
|
||||
|
@ -9,15 +9,17 @@ import com.oracle.truffle.api.nodes.DirectCallNode;
|
||||
import com.oracle.truffle.api.nodes.IndirectCallNode;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.NodeInfo;
|
||||
import org.enso.interpreter.node.InlineableNode;
|
||||
import org.enso.interpreter.runtime.callable.CallerInfo;
|
||||
import org.enso.interpreter.runtime.callable.function.Function;
|
||||
import org.enso.interpreter.node.InlineableNode;
|
||||
|
||||
/**
|
||||
* This node is responsible for optimising function calls. Where possible, it will handle the call via:
|
||||
* This node is responsible for optimising function calls. Where possible, it will handle the call
|
||||
* via:
|
||||
*
|
||||
* <ul>
|
||||
* <li>{@link InlineableNode} to force inlining</li>
|
||||
* <li>{@link DirectCallNode} with potential for inlining</li>
|
||||
* <li>{@link InlineableNode} to force inlining
|
||||
* <li>{@link DirectCallNode} with potential for inlining
|
||||
* </ul>
|
||||
*/
|
||||
@NodeInfo(shortName = "ExecCall", description = "Optimises function calls")
|
||||
@ -47,10 +49,9 @@ public abstract class ExecuteCallNode extends Node {
|
||||
* @param callNode the cached call node for {@code cachedTarget}
|
||||
* @return the result of executing {@code function} on {@code arguments}
|
||||
*/
|
||||
@Specialization(guards = {
|
||||
"function.getCallTarget() == cachedTarget",
|
||||
"callNode != null"
|
||||
}, limit = "3")
|
||||
@Specialization(
|
||||
guards = {"function.getCallTarget() == cachedTarget", "callNode != null"},
|
||||
limit = "3")
|
||||
protected Object callInlineable(
|
||||
VirtualFrame frame,
|
||||
Function function,
|
||||
@ -77,9 +78,11 @@ public abstract class ExecuteCallNode extends Node {
|
||||
* @param callNode the cached call node for {@code cachedTarget}
|
||||
* @return the result of executing {@code function} on {@code arguments}
|
||||
*/
|
||||
@Specialization(guards = {
|
||||
"function.getCallTarget() == cachedTarget",
|
||||
}, limit = "3")
|
||||
@Specialization(
|
||||
guards = {
|
||||
"function.getCallTarget() == cachedTarget",
|
||||
},
|
||||
limit = "3")
|
||||
protected Object callDirect(
|
||||
Function function,
|
||||
CallerInfo callerInfo,
|
||||
@ -115,7 +118,7 @@ public abstract class ExecuteCallNode extends Node {
|
||||
* @param callNode the cached call node for making indirect calls
|
||||
* @return the result of executing {@code function} on {@code arguments}
|
||||
*/
|
||||
@Specialization(replaces = { "callDirect", "callInlineable" })
|
||||
@Specialization(replaces = {"callDirect", "callInlineable"})
|
||||
protected Object callIndirect(
|
||||
Function function,
|
||||
CallerInfo callerInfo,
|
||||
@ -138,5 +141,9 @@ public abstract class ExecuteCallNode extends Node {
|
||||
* @return the result of executing {@code function} on {@code arguments}
|
||||
*/
|
||||
public abstract Object executeCall(
|
||||
VirtualFrame frame, Function function, CallerInfo callerInfo, Object state, Object[] arguments);
|
||||
VirtualFrame frame,
|
||||
Function function,
|
||||
CallerInfo callerInfo,
|
||||
Object state,
|
||||
Object[] arguments);
|
||||
}
|
||||
|
@ -10,22 +10,18 @@ import com.oracle.truffle.api.instrumentation.ProbeNode;
|
||||
import com.oracle.truffle.api.instrumentation.StandardTags;
|
||||
import com.oracle.truffle.api.instrumentation.Tag;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.TruffleObject;
|
||||
import com.oracle.truffle.api.library.ExportLibrary;
|
||||
import com.oracle.truffle.api.library.ExportMessage;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.NodeInfo;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
|
||||
import org.enso.interpreter.runtime.callable.function.Function;
|
||||
import org.enso.interpreter.runtime.tag.IdentifiedTag;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.enso.interpreter.node.ClosureRootNode;
|
||||
import org.enso.interpreter.runtime.callable.function.Function;
|
||||
import org.enso.interpreter.runtime.data.EnsoObject;
|
||||
import org.enso.interpreter.runtime.tag.AvoidIdInstrumentationTag;
|
||||
import org.enso.interpreter.runtime.tag.IdentifiedTag;
|
||||
|
||||
/**
|
||||
* A node used for instrumenting function calls. It does nothing useful from the language
|
||||
@ -108,17 +104,23 @@ public class FunctionCallInstrumentationNode extends Node implements Instrumenta
|
||||
}
|
||||
}
|
||||
|
||||
/** @return the function for this call. */
|
||||
/**
|
||||
* @return the function for this call.
|
||||
*/
|
||||
public Function getFunction() {
|
||||
return function;
|
||||
}
|
||||
|
||||
/** @return the state passed to the function in this call. */
|
||||
/**
|
||||
* @return the state passed to the function in this call.
|
||||
*/
|
||||
public Object getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
/** @return the arguments passed to the function in this call. */
|
||||
/**
|
||||
* @return the arguments passed to the function in this call.
|
||||
*/
|
||||
public Object[] getArguments() {
|
||||
return arguments;
|
||||
}
|
||||
@ -126,7 +128,11 @@ public class FunctionCallInstrumentationNode extends Node implements Instrumenta
|
||||
@Override
|
||||
@CompilerDirectives.TruffleBoundary
|
||||
public String toString() {
|
||||
return "FunctionCall[function=" + function + ", arguments: " + Arrays.toString(arguments) + "]";
|
||||
return "FunctionCall[function="
|
||||
+ function
|
||||
+ ", arguments: "
|
||||
+ Arrays.toString(arguments)
|
||||
+ "]";
|
||||
}
|
||||
}
|
||||
|
||||
@ -170,7 +176,9 @@ public class FunctionCallInstrumentationNode extends Node implements Instrumenta
|
||||
return tag == StandardTags.CallTag.class || (tag == IdentifiedTag.class && id != null);
|
||||
}
|
||||
|
||||
/** @return the source section of this node. */
|
||||
/**
|
||||
* @return the source section of this node.
|
||||
*/
|
||||
@Override
|
||||
public SourceSection getSourceSection() {
|
||||
var parent = getParent();
|
||||
@ -184,7 +192,9 @@ public class FunctionCallInstrumentationNode extends Node implements Instrumenta
|
||||
return null;
|
||||
}
|
||||
|
||||
/** @return the expression ID of this node. */
|
||||
/**
|
||||
* @return the expression ID of this node.
|
||||
*/
|
||||
public UUID getId() {
|
||||
return id;
|
||||
}
|
||||
|
@ -25,7 +25,9 @@ import org.enso.interpreter.runtime.library.dispatch.TypesLibrary;
|
||||
@ImportStatic({HostMethodCallNode.PolyglotCallType.class, HostMethodCallNode.class})
|
||||
public abstract class IndirectInvokeConversionNode extends Node {
|
||||
|
||||
/** @return a new indirect method invocation node */
|
||||
/**
|
||||
* @return a new indirect method invocation node
|
||||
*/
|
||||
public static IndirectInvokeConversionNode build() {
|
||||
return IndirectInvokeConversionNodeGen.create();
|
||||
}
|
||||
|
@ -39,7 +39,9 @@ import org.enso.interpreter.runtime.state.State;
|
||||
@ImportStatic({HostMethodCallNode.PolyglotCallType.class, HostMethodCallNode.class})
|
||||
public abstract class IndirectInvokeMethodNode extends Node {
|
||||
|
||||
/** @return a new indirect method invocation node */
|
||||
/**
|
||||
* @return a new indirect method invocation node
|
||||
*/
|
||||
public static IndirectInvokeMethodNode build() {
|
||||
return IndirectInvokeMethodNodeGen.create();
|
||||
}
|
||||
|
@ -215,8 +215,10 @@ public abstract class InvokeCallableNode extends BaseNode {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
selfArgument = thisExecutor.executeThunk(callerFrame, selfArgument, state, TailStatus.NOT_TAIL);
|
||||
thatArgument = thatExecutor.executeThunk(callerFrame, thatArgument, state, TailStatus.NOT_TAIL);
|
||||
selfArgument =
|
||||
thisExecutor.executeThunk(callerFrame, selfArgument, state, TailStatus.NOT_TAIL);
|
||||
thatArgument =
|
||||
thatExecutor.executeThunk(callerFrame, thatArgument, state, TailStatus.NOT_TAIL);
|
||||
|
||||
arguments[thisArgumentPosition] = selfArgument;
|
||||
arguments[thatArgumentPosition] = thatArgument;
|
||||
@ -226,7 +228,11 @@ public abstract class InvokeCallableNode extends BaseNode {
|
||||
} else {
|
||||
CompilerDirectives.transferToInterpreter();
|
||||
var ctx = EnsoContext.get(this);
|
||||
throw new PanicException(ctx.getBuiltins().error().makeNoConversionCurrying(canApplyThis, canApplyThat, conversion), this);
|
||||
throw new PanicException(
|
||||
ctx.getBuiltins()
|
||||
.error()
|
||||
.makeNoConversionCurrying(canApplyThis, canApplyThat, conversion),
|
||||
this);
|
||||
}
|
||||
}
|
||||
|
||||
@ -248,7 +254,8 @@ public abstract class InvokeCallableNode extends BaseNode {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
selfArgument = thisExecutor.executeThunk(callerFrame, selfArgument, state, TailStatus.NOT_TAIL);
|
||||
selfArgument =
|
||||
thisExecutor.executeThunk(callerFrame, selfArgument, state, TailStatus.NOT_TAIL);
|
||||
arguments[thisArgumentPosition] = selfArgument;
|
||||
}
|
||||
return invokeMethodNode.execute(callerFrame, state, symbol, selfArgument, arguments);
|
||||
@ -282,11 +289,11 @@ public abstract class InvokeCallableNode extends BaseNode {
|
||||
try {
|
||||
if (childDispatch == null) {
|
||||
childDispatch =
|
||||
insert(
|
||||
build(
|
||||
invokeFunctionNode.getSchema(),
|
||||
invokeFunctionNode.getDefaultsExecutionMode(),
|
||||
invokeFunctionNode.getArgumentsExecutionMode()));
|
||||
insert(
|
||||
build(
|
||||
invokeFunctionNode.getSchema(),
|
||||
invokeFunctionNode.getDefaultsExecutionMode(),
|
||||
invokeFunctionNode.getArgumentsExecutionMode()));
|
||||
childDispatch.setTailStatus(getTailStatus());
|
||||
childDispatch.setId(invokeFunctionNode.getId());
|
||||
notifyInserted(childDispatch);
|
||||
@ -296,12 +303,7 @@ public abstract class InvokeCallableNode extends BaseNode {
|
||||
}
|
||||
}
|
||||
|
||||
var result = childDispatch.execute(
|
||||
callable,
|
||||
callerFrame,
|
||||
state,
|
||||
arguments);
|
||||
|
||||
var result = childDispatch.execute(callable, callerFrame, state, arguments);
|
||||
|
||||
if (result instanceof DataflowError) {
|
||||
return result;
|
||||
@ -350,7 +352,9 @@ public abstract class InvokeCallableNode extends BaseNode {
|
||||
}
|
||||
}
|
||||
|
||||
/** @return the source section for this node. */
|
||||
/**
|
||||
* @return the source section for this node.
|
||||
*/
|
||||
@Override
|
||||
public SourceSection getSourceSection() {
|
||||
Node parent = getParent();
|
||||
|
@ -1,8 +1,17 @@
|
||||
package org.enso.interpreter.node.callable;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
|
||||
import org.enso.interpreter.node.BaseNode;
|
||||
import org.enso.interpreter.node.callable.dispatch.InvokeFunctionNode;
|
||||
import org.enso.interpreter.node.callable.resolver.ConversionResolverNode;
|
||||
@ -23,17 +32,6 @@ import org.enso.interpreter.runtime.error.WithWarnings;
|
||||
import org.enso.interpreter.runtime.library.dispatch.TypesLibrary;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
|
||||
public abstract class InvokeConversionNode extends BaseNode {
|
||||
private @Child InvokeFunctionNode invokeFunctionNode;
|
||||
private @Child InvokeConversionNode childDispatch;
|
||||
@ -132,9 +130,8 @@ public abstract class InvokeConversionNode extends BaseNode {
|
||||
@Shared("typesLib") @CachedLibrary(limit = "10") TypesLibrary dispatch,
|
||||
@Shared("conversionResolverNode") @Cached ConversionResolverNode conversionResolverNode) {
|
||||
Function function =
|
||||
conversionResolverNode.execute(extractType(self),
|
||||
EnsoContext.get(this).getBuiltins().dataflowError(),
|
||||
conversion);
|
||||
conversionResolverNode.execute(
|
||||
extractType(self), EnsoContext.get(this).getBuiltins().dataflowError(), conversion);
|
||||
if (function != null) {
|
||||
return invokeFunctionNode.execute(function, frame, state, arguments);
|
||||
} else {
|
||||
@ -165,10 +162,7 @@ public abstract class InvokeConversionNode extends BaseNode {
|
||||
var result = that.castTo(type);
|
||||
if (result == null) {
|
||||
throw new PanicException(
|
||||
EnsoContext.get(this)
|
||||
.getBuiltins()
|
||||
.error()
|
||||
.makeNoSuchConversion(type, self, conversion),
|
||||
EnsoContext.get(this).getBuiltins().error().makeNoSuchConversion(type, self, conversion),
|
||||
this);
|
||||
}
|
||||
return result;
|
||||
@ -229,10 +223,8 @@ public abstract class InvokeConversionNode extends BaseNode {
|
||||
String str = interop.asString(that);
|
||||
Text txt = Text.create(str);
|
||||
Function function =
|
||||
conversionResolverNode.expectNonNull(txt,
|
||||
extractType(self),
|
||||
EnsoContext.get(this).getBuiltins().text(),
|
||||
conversion);
|
||||
conversionResolverNode.expectNonNull(
|
||||
txt, extractType(self), EnsoContext.get(this).getBuiltins().text(), conversion);
|
||||
arguments[0] = txt;
|
||||
return invokeFunctionNode.execute(function, frame, state, arguments);
|
||||
} catch (UnsupportedMessageException e) {
|
||||
@ -258,7 +250,8 @@ public abstract class InvokeConversionNode extends BaseNode {
|
||||
@Shared("typesLib") @CachedLibrary(limit = "10") TypesLibrary typesLib,
|
||||
@Shared("conversionResolverNode") @Cached ConversionResolverNode conversionResolverNode) {
|
||||
Function function =
|
||||
conversionResolverNode.expectNonNull(that, extractType(self), EnsoContext.get(this).getBuiltins().date(), conversion);
|
||||
conversionResolverNode.expectNonNull(
|
||||
that, extractType(self), EnsoContext.get(this).getBuiltins().date(), conversion);
|
||||
return invokeFunctionNode.execute(function, frame, state, arguments);
|
||||
}
|
||||
|
||||
@ -280,10 +273,8 @@ public abstract class InvokeConversionNode extends BaseNode {
|
||||
@Shared("typesLib") @CachedLibrary(limit = "10") TypesLibrary typesLib,
|
||||
@Shared("conversionResolverNode") @Cached ConversionResolverNode conversionResolverNode) {
|
||||
Function function =
|
||||
conversionResolverNode.expectNonNull(that,
|
||||
extractType(self),
|
||||
EnsoContext.get(this).getBuiltins().timeOfDay(),
|
||||
conversion);
|
||||
conversionResolverNode.expectNonNull(
|
||||
that, extractType(self), EnsoContext.get(this).getBuiltins().timeOfDay(), conversion);
|
||||
return invokeFunctionNode.execute(function, frame, state, arguments);
|
||||
}
|
||||
|
||||
@ -305,10 +296,8 @@ public abstract class InvokeConversionNode extends BaseNode {
|
||||
@Shared("typesLib") @CachedLibrary(limit = "10") TypesLibrary typesLib,
|
||||
@Shared("conversionResolverNode") @Cached ConversionResolverNode conversionResolverNode) {
|
||||
Function function =
|
||||
conversionResolverNode.expectNonNull(that,
|
||||
extractType(self),
|
||||
EnsoContext.get(this).getBuiltins().dateTime(),
|
||||
conversion);
|
||||
conversionResolverNode.expectNonNull(
|
||||
that, extractType(self), EnsoContext.get(this).getBuiltins().dateTime(), conversion);
|
||||
return invokeFunctionNode.execute(function, frame, state, arguments);
|
||||
}
|
||||
|
||||
@ -329,10 +318,8 @@ public abstract class InvokeConversionNode extends BaseNode {
|
||||
@Shared("typesLib") @CachedLibrary(limit = "10") TypesLibrary typesLib,
|
||||
@Shared("conversionResolverNode") @Cached ConversionResolverNode conversionResolverNode) {
|
||||
Function function =
|
||||
conversionResolverNode.expectNonNull(that,
|
||||
extractType(self),
|
||||
EnsoContext.get(this).getBuiltins().duration(),
|
||||
conversion);
|
||||
conversionResolverNode.expectNonNull(
|
||||
that, extractType(self), EnsoContext.get(this).getBuiltins().duration(), conversion);
|
||||
return invokeFunctionNode.execute(function, frame, state, arguments);
|
||||
}
|
||||
|
||||
@ -353,10 +340,8 @@ public abstract class InvokeConversionNode extends BaseNode {
|
||||
@Shared("typesLib") @CachedLibrary(limit = "10") TypesLibrary typesLib,
|
||||
@Shared("conversionResolverNode") @Cached ConversionResolverNode conversionResolverNode) {
|
||||
Function function =
|
||||
conversionResolverNode.expectNonNull(thatMap,
|
||||
extractType(self),
|
||||
EnsoContext.get(this).getBuiltins().map(),
|
||||
conversion);
|
||||
conversionResolverNode.expectNonNull(
|
||||
thatMap, extractType(self), EnsoContext.get(this).getBuiltins().map(), conversion);
|
||||
return invokeFunctionNode.execute(function, frame, state, arguments);
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,23 @@
|
||||
package org.enso.interpreter.node.callable;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Bind;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.ImportStatic;
|
||||
import com.oracle.truffle.api.dsl.NonIdempotent;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnknownIdentifierException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.ExplodeLoop;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import com.oracle.truffle.api.profiles.BranchProfile;
|
||||
import com.oracle.truffle.api.profiles.CountingConditionProfile;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalTime;
|
||||
import java.time.ZoneId;
|
||||
@ -8,7 +26,6 @@ import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
|
||||
import org.enso.interpreter.Constants.Names;
|
||||
import org.enso.interpreter.node.BaseNode;
|
||||
import org.enso.interpreter.node.MethodRootNode;
|
||||
@ -44,29 +61,11 @@ import org.enso.interpreter.runtime.error.WithWarnings;
|
||||
import org.enso.interpreter.runtime.library.dispatch.TypesLibrary;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Bind;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.ImportStatic;
|
||||
import com.oracle.truffle.api.dsl.NonIdempotent;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnknownIdentifierException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.ExplodeLoop;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import com.oracle.truffle.api.profiles.BranchProfile;
|
||||
import com.oracle.truffle.api.profiles.CountingConditionProfile;
|
||||
import com.oracle.truffle.api.source.SourceSection;
|
||||
|
||||
@ImportStatic({HostMethodCallNode.PolyglotCallType.class, HostMethodCallNode.class})
|
||||
public abstract class InvokeMethodNode extends BaseNode {
|
||||
protected static final int CACHE_SIZE = 10;
|
||||
private @Child InvokeFunctionNode invokeFunctionNode;
|
||||
|
||||
/**
|
||||
* A node that is created specifically for cases when a static method is called on {@code Any}. In
|
||||
* such cases, we need to modify the number of passed arguments, therefore, a new {@link
|
||||
@ -88,14 +87,16 @@ public abstract class InvokeMethodNode extends BaseNode {
|
||||
* @param defaultsExecutionMode the defaulted arguments handling mode for this call
|
||||
* @param argumentsExecutionMode the arguments execution mode for this call
|
||||
* @param thisArgumentPosition position
|
||||
* @param onBoundary shall we emit plain {@code PanicException} or also attach {@code UnknownIdentifierException} cause
|
||||
* @param onBoundary shall we emit plain {@code PanicException} or also attach {@code
|
||||
* UnknownIdentifierException} cause
|
||||
* @return a new invoke method node
|
||||
*/
|
||||
public static InvokeMethodNode build(
|
||||
CallArgumentInfo[] schema,
|
||||
InvokeCallableNode.DefaultsExecutionMode defaultsExecutionMode,
|
||||
InvokeCallableNode.ArgumentsExecutionMode argumentsExecutionMode,
|
||||
int thisArgumentPosition, boolean onBoundary) {
|
||||
int thisArgumentPosition,
|
||||
boolean onBoundary) {
|
||||
return InvokeMethodNodeGen.create(
|
||||
schema, defaultsExecutionMode, argumentsExecutionMode, thisArgumentPosition, onBoundary);
|
||||
}
|
||||
@ -104,8 +105,8 @@ public abstract class InvokeMethodNode extends BaseNode {
|
||||
CallArgumentInfo[] schema,
|
||||
InvokeCallableNode.DefaultsExecutionMode defaultsExecutionMode,
|
||||
InvokeCallableNode.ArgumentsExecutionMode argumentsExecutionMode,
|
||||
int thisArgumentPosition, boolean onBoundary
|
||||
) {
|
||||
int thisArgumentPosition,
|
||||
boolean onBoundary) {
|
||||
this.invokeFunctionNode =
|
||||
InvokeFunctionNode.build(schema, defaultsExecutionMode, argumentsExecutionMode);
|
||||
this.argumentCount = schema.length;
|
||||
@ -219,17 +220,20 @@ public abstract class InvokeMethodNode extends BaseNode {
|
||||
argsWithDefaultValCount++;
|
||||
}
|
||||
}
|
||||
// Static method calls on Any are resolved to `Any.type.method`. Such methods take one additional
|
||||
// Static method calls on Any are resolved to `Any.type.method`. Such methods take one
|
||||
// additional
|
||||
// self argument (with Any.type) as opposed to static method calls resolved on any other
|
||||
// types. This case is handled in the following block.
|
||||
boolean shouldPrependSyntheticSelfArg = resolvedFuncArgCount - argsWithDefaultValCount == arguments.length + 1;
|
||||
boolean shouldPrependSyntheticSelfArg =
|
||||
resolvedFuncArgCount - argsWithDefaultValCount == arguments.length + 1;
|
||||
if (isAnyEigenType(selfTpe) && shouldPrependSyntheticSelfArg) {
|
||||
// function is a static method on Any, so the first two arguments in `invokeFuncSchema`
|
||||
// represent self arguments.
|
||||
boolean selfArgSpecified = false;
|
||||
if (invokeFuncSchema.length > 1) {
|
||||
selfArgSpecified =
|
||||
invokeFuncSchema[1].getName() != null && invokeFuncSchema[1].getName().equals(Names.SELF_ARGUMENT);
|
||||
invokeFuncSchema[1].getName() != null
|
||||
&& invokeFuncSchema[1].getName().equals(Names.SELF_ARGUMENT);
|
||||
}
|
||||
|
||||
if (selfArgSpecified) {
|
||||
@ -270,7 +274,8 @@ public abstract class InvokeMethodNode extends BaseNode {
|
||||
return invokeFunctionNode.execute(function, frame, state, arguments);
|
||||
}
|
||||
|
||||
private PanicException methodNotFound(UnresolvedSymbol symbol, Object self) throws PanicException {
|
||||
private PanicException methodNotFound(UnresolvedSymbol symbol, Object self)
|
||||
throws PanicException {
|
||||
var cause = onBoundary ? UnknownIdentifierException.create(symbol.getName()) : null;
|
||||
var payload = EnsoContext.get(this).getBuiltins().error().makeNoSuchMethod(self, symbol);
|
||||
throw new PanicException(payload, cause, this);
|
||||
@ -283,8 +288,7 @@ public abstract class InvokeMethodNode extends BaseNode {
|
||||
UnresolvedSymbol symbol,
|
||||
EnsoMultiValue self,
|
||||
Object[] arguments,
|
||||
@Shared("methodResolverNode") @Cached MethodResolverNode methodResolverNode
|
||||
) {
|
||||
@Shared("methodResolverNode") @Cached MethodResolverNode methodResolverNode) {
|
||||
var fnAndType = self.resolveSymbol(methodResolverNode, symbol);
|
||||
if (fnAndType != null) {
|
||||
var unwrapSelf = self.castTo(fnAndType.getRight());
|
||||
@ -306,7 +310,8 @@ public abstract class InvokeMethodNode extends BaseNode {
|
||||
Object[] arguments,
|
||||
@Shared("methodResolverNode") @Cached MethodResolverNode methodResolverNode) {
|
||||
Function function =
|
||||
methodResolverNode.executeResolution(EnsoContext.get(this).getBuiltins().dataflowError(), symbol);
|
||||
methodResolverNode.executeResolution(
|
||||
EnsoContext.get(this).getBuiltins().dataflowError(), symbol);
|
||||
if (errorReceiverProfile.profile(function == null)) {
|
||||
return self;
|
||||
} else {
|
||||
@ -444,7 +449,8 @@ public abstract class InvokeMethodNode extends BaseNode {
|
||||
invokeFunctionNode.getSchema(),
|
||||
invokeFunctionNode.getDefaultsExecutionMode(),
|
||||
invokeFunctionNode.getArgumentsExecutionMode(),
|
||||
thisArgumentPosition, false));
|
||||
thisArgumentPosition,
|
||||
false));
|
||||
childDispatch.setTailStatus(getTailStatus());
|
||||
childDispatch.setId(invokeFunctionNode.getId());
|
||||
notifyInserted(childDispatch);
|
||||
|
@ -1,12 +1,11 @@
|
||||
package org.enso.interpreter.node.callable;
|
||||
|
||||
import org.enso.interpreter.node.ExpressionNode;
|
||||
import org.enso.interpreter.runtime.data.vector.ArrayLikeHelpers;
|
||||
import org.enso.interpreter.runtime.error.PanicSentinel;
|
||||
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.nodes.ExplodeLoop;
|
||||
import com.oracle.truffle.api.nodes.NodeInfo;
|
||||
import org.enso.interpreter.node.ExpressionNode;
|
||||
import org.enso.interpreter.runtime.data.vector.ArrayLikeHelpers;
|
||||
import org.enso.interpreter.runtime.error.PanicSentinel;
|
||||
|
||||
@NodeInfo(shortName = "[]", description = "Creates a vector from given expressions.")
|
||||
public class SequenceLiteralNode extends ExpressionNode {
|
||||
|
@ -1,10 +1,24 @@
|
||||
package org.enso.interpreter.node.callable.argument;
|
||||
|
||||
import com.oracle.truffle.api.CompilerAsserts;
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.TruffleLanguage;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.frame.MaterializedFrame;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.nodes.ExplodeLoop;
|
||||
import com.oracle.truffle.api.nodes.InvalidAssumptionException;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.NodeUtil;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.enso.compiler.core.ir.Name;
|
||||
import org.enso.interpreter.EnsoLanguage;
|
||||
import org.enso.interpreter.node.BaseNode.TailStatus;
|
||||
@ -34,42 +48,26 @@ import org.enso.interpreter.runtime.error.PanicSentinel;
|
||||
import org.enso.interpreter.runtime.library.dispatch.TypesLibrary;
|
||||
import org.graalvm.collections.Pair;
|
||||
|
||||
import com.oracle.truffle.api.CompilerAsserts;
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.TruffleLanguage;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.frame.MaterializedFrame;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.nodes.ExplodeLoop;
|
||||
import com.oracle.truffle.api.nodes.InvalidAssumptionException;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.NodeUtil;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
|
||||
public abstract class ReadArgumentCheckNode extends Node {
|
||||
private final String name;
|
||||
@CompilerDirectives.CompilationFinal
|
||||
private String expectedTypeMessage;
|
||||
@CompilerDirectives.CompilationFinal private String expectedTypeMessage;
|
||||
|
||||
ReadArgumentCheckNode(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
/** */
|
||||
public static ExpressionNode wrap(ExpressionNode original, ReadArgumentCheckNode check) {
|
||||
return new TypeCheckExpressionNode(original, check);
|
||||
}
|
||||
|
||||
/** Executes check or conversion of the value.abstract
|
||||
/**
|
||||
* Executes check or conversion of the value.abstract
|
||||
*
|
||||
* @param frame frame requesting the conversion
|
||||
* @param value the value to convert
|
||||
* @return {@code null} when the check isn't satisfied and conversion isn't possible or non-{@code null} value that can be used as a result
|
||||
* @return {@code null} when the check isn't satisfied and conversion isn't possible or non-{@code
|
||||
* null} value that can be used as a result
|
||||
*/
|
||||
public final Object handleCheckOrConversion(VirtualFrame frame, Object value) {
|
||||
var result = executeCheckOrConversion(frame, value);
|
||||
@ -80,7 +78,9 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
}
|
||||
|
||||
abstract Object findDirectMatch(VirtualFrame frame, Object value);
|
||||
|
||||
abstract Object executeCheckOrConversion(VirtualFrame frame, Object value);
|
||||
|
||||
abstract String expectedTypeMessage();
|
||||
|
||||
final PanicException panicAtTheEnd(Object v) {
|
||||
@ -96,7 +96,11 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
|
||||
public static ReadArgumentCheckNode allOf(Name argumentName, ReadArgumentCheckNode... checks) {
|
||||
var list = Arrays.asList(checks);
|
||||
var flatten = list.stream().flatMap(n -> n instanceof AllOfNode all ? Arrays.asList(all.checks).stream() : Stream.of(n)).toList();
|
||||
var flatten =
|
||||
list.stream()
|
||||
.flatMap(
|
||||
n -> n instanceof AllOfNode all ? Arrays.asList(all.checks).stream() : Stream.of(n))
|
||||
.toList();
|
||||
var arr = toArray(flatten);
|
||||
return switch (arr.length) {
|
||||
case 0 -> null;
|
||||
@ -142,7 +146,7 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
var cnt = (int) list.stream().filter(n -> n != null).count();
|
||||
var arr = new ReadArgumentCheckNode[cnt];
|
||||
var it = list.iterator();
|
||||
for (int i = 0; i < cnt;) {
|
||||
for (int i = 0; i < cnt; ) {
|
||||
var element = it.next();
|
||||
if (element != null) {
|
||||
arr[i++] = element;
|
||||
@ -152,10 +156,8 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
}
|
||||
|
||||
static final class AllOfNode extends ReadArgumentCheckNode {
|
||||
@Children
|
||||
private ReadArgumentCheckNode[] checks;
|
||||
@Child
|
||||
private TypesLibrary types;
|
||||
@Children private ReadArgumentCheckNode[] checks;
|
||||
@Child private TypesLibrary types;
|
||||
|
||||
AllOfNode(String name, ReadArgumentCheckNode[] checks) {
|
||||
super(name);
|
||||
@ -188,13 +190,14 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
|
||||
@Override
|
||||
String expectedTypeMessage() {
|
||||
return Arrays.stream(checks).map(n -> n.expectedTypeMessage()).collect(Collectors.joining(" & "));
|
||||
return Arrays.stream(checks)
|
||||
.map(n -> n.expectedTypeMessage())
|
||||
.collect(Collectors.joining(" & "));
|
||||
}
|
||||
}
|
||||
|
||||
static final class OneOfNode extends ReadArgumentCheckNode {
|
||||
@Children
|
||||
private ReadArgumentCheckNode[] checks;
|
||||
@Children private ReadArgumentCheckNode[] checks;
|
||||
|
||||
OneOfNode(String name, ReadArgumentCheckNode[] checks) {
|
||||
super(name);
|
||||
@ -231,18 +234,17 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
|
||||
@Override
|
||||
String expectedTypeMessage() {
|
||||
return Arrays.stream(checks).map(n -> n.expectedTypeMessage()).collect(Collectors.joining(" | "));
|
||||
return Arrays.stream(checks)
|
||||
.map(n -> n.expectedTypeMessage())
|
||||
.collect(Collectors.joining(" | "));
|
||||
}
|
||||
}
|
||||
|
||||
static abstract class TypeCheckNode extends ReadArgumentCheckNode {
|
||||
abstract static class TypeCheckNode extends ReadArgumentCheckNode {
|
||||
private final Type expectedType;
|
||||
@Child
|
||||
IsValueOfTypeNode checkType;
|
||||
@CompilerDirectives.CompilationFinal
|
||||
private String expectedTypeMessage;
|
||||
@CompilerDirectives.CompilationFinal
|
||||
private LazyCheckRootNode lazyCheck;
|
||||
@Child IsValueOfTypeNode checkType;
|
||||
@CompilerDirectives.CompilationFinal private String expectedTypeMessage;
|
||||
@CompilerDirectives.CompilationFinal private LazyCheckRootNode lazyCheck;
|
||||
|
||||
TypeCheckNode(String name, Type expectedType) {
|
||||
super(name);
|
||||
@ -256,7 +258,8 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
}
|
||||
|
||||
@Specialization(rewriteOn = InvalidAssumptionException.class)
|
||||
Object doCheckNoConversionNeeded(VirtualFrame frame, Object v) throws InvalidAssumptionException {
|
||||
Object doCheckNoConversionNeeded(VirtualFrame frame, Object v)
|
||||
throws InvalidAssumptionException {
|
||||
var ret = findDirectMatch(frame, v);
|
||||
if (ret != null) {
|
||||
return ret;
|
||||
@ -265,26 +268,21 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
}
|
||||
}
|
||||
|
||||
@Specialization(limit = "10", guards = {
|
||||
"cachedType != null",
|
||||
"findType(typeOfNode, v) == cachedType"
|
||||
})
|
||||
@Specialization(
|
||||
limit = "10",
|
||||
guards = {"cachedType != null", "findType(typeOfNode, v) == cachedType"})
|
||||
Object doWithConversionCached(
|
||||
VirtualFrame frame, Object v,
|
||||
@Shared("typeOfNode")
|
||||
@Cached TypeOfNode typeOfNode,
|
||||
@Cached("findType(typeOfNode, v)") Type cachedType,
|
||||
@Cached("findConversionNode(cachedType)") ApplicationNode convertNode
|
||||
) {
|
||||
VirtualFrame frame,
|
||||
Object v,
|
||||
@Shared("typeOfNode") @Cached TypeOfNode typeOfNode,
|
||||
@Cached("findType(typeOfNode, v)") Type cachedType,
|
||||
@Cached("findConversionNode(cachedType)") ApplicationNode convertNode) {
|
||||
return handleWithConversion(frame, v, convertNode);
|
||||
}
|
||||
|
||||
@Specialization(replaces = "doWithConversionCached")
|
||||
Object doWithConversionUncached(
|
||||
VirtualFrame frame, Object v,
|
||||
@Shared("typeOfNode")
|
||||
@Cached TypeOfNode typeOfNode
|
||||
) {
|
||||
VirtualFrame frame, Object v, @Shared("typeOfNode") @Cached TypeOfNode typeOfNode) {
|
||||
var type = findType(typeOfNode, v);
|
||||
return doWithConversionUncachedBoundary(frame == null ? null : frame.materialize(), v, type);
|
||||
}
|
||||
@ -341,20 +339,21 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
var convNode = LiteralNode.build(convAndType.getLeft());
|
||||
var intoNode = LiteralNode.build(convAndType.getRight());
|
||||
var valueNode = ran.plainRead();
|
||||
var args = new CallArgument[]{
|
||||
new CallArgument(null, intoNode),
|
||||
new CallArgument(null, valueNode)
|
||||
};
|
||||
var args =
|
||||
new CallArgument[] {
|
||||
new CallArgument(null, intoNode), new CallArgument(null, valueNode)
|
||||
};
|
||||
return ApplicationNode.build(convNode, args, DefaultsExecutionMode.EXECUTE);
|
||||
} else if (NodeUtil.findParent(this, TypeCheckExpressionNode.class) instanceof TypeCheckExpressionNode tcen) {
|
||||
} else if (NodeUtil.findParent(this, TypeCheckExpressionNode.class)
|
||||
instanceof TypeCheckExpressionNode tcen) {
|
||||
CompilerAsserts.neverPartOfCompilation();
|
||||
var convNode = LiteralNode.build(convAndType.getLeft());
|
||||
var intoNode = LiteralNode.build(convAndType.getRight());
|
||||
var valueNode = tcen.original;
|
||||
var args = new CallArgument[]{
|
||||
new CallArgument(null, intoNode),
|
||||
new CallArgument(null, valueNode)
|
||||
};
|
||||
var args =
|
||||
new CallArgument[] {
|
||||
new CallArgument(null, intoNode), new CallArgument(null, valueNode)
|
||||
};
|
||||
return ApplicationNode.build(convNode, args, DefaultsExecutionMode.EXECUTE);
|
||||
}
|
||||
}
|
||||
@ -368,9 +367,8 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
return null;
|
||||
}
|
||||
|
||||
private Object handleWithConversion(
|
||||
VirtualFrame frame, Object v, ApplicationNode convertNode
|
||||
) throws PanicException {
|
||||
private Object handleWithConversion(VirtualFrame frame, Object v, ApplicationNode convertNode)
|
||||
throws PanicException {
|
||||
if (convertNode == null) {
|
||||
var ret = findDirectMatch(frame, v);
|
||||
if (ret != null) {
|
||||
@ -400,10 +398,9 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
}
|
||||
}
|
||||
|
||||
static abstract class MetaCheckNode extends ReadArgumentCheckNode {
|
||||
abstract static class MetaCheckNode extends ReadArgumentCheckNode {
|
||||
private final Object expectedMeta;
|
||||
@CompilerDirectives.CompilationFinal
|
||||
private String expectedTypeMessage;
|
||||
@CompilerDirectives.CompilationFinal private String expectedTypeMessage;
|
||||
|
||||
MetaCheckNode(String name, Object expectedMeta) {
|
||||
super(name);
|
||||
@ -416,10 +413,7 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
}
|
||||
|
||||
@Specialization()
|
||||
Object verifyMetaObject(
|
||||
VirtualFrame frame, Object v,
|
||||
@Cached IsValueOfTypeNode isA
|
||||
) {
|
||||
Object verifyMetaObject(VirtualFrame frame, Object v, @Cached IsValueOfTypeNode isA) {
|
||||
if (isAllFitValue(v)) {
|
||||
return v;
|
||||
}
|
||||
@ -429,6 +423,7 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
String expectedTypeMessage() {
|
||||
if (expectedTypeMessage != null) {
|
||||
@ -447,18 +442,18 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
|
||||
private static final class LazyCheckRootNode extends RootNode {
|
||||
|
||||
@Child
|
||||
private ThunkExecutorNode evalThunk;
|
||||
@Child
|
||||
private ReadArgumentCheckNode check;
|
||||
@Child private ThunkExecutorNode evalThunk;
|
||||
@Child private ReadArgumentCheckNode check;
|
||||
|
||||
static final FunctionSchema SCHEMA = new FunctionSchema(
|
||||
static final FunctionSchema SCHEMA =
|
||||
new FunctionSchema(
|
||||
FunctionSchema.CallerFrameAccess.NONE,
|
||||
new ArgumentDefinition[]{new ArgumentDefinition(0, "delegate", null, null, ExecutionMode.EXECUTE)},
|
||||
new boolean[]{true},
|
||||
new ArgumentDefinition[] {
|
||||
new ArgumentDefinition(0, "delegate", null, null, ExecutionMode.EXECUTE)
|
||||
},
|
||||
new boolean[] {true},
|
||||
new CallArgumentInfo[0],
|
||||
new Annotation[0]
|
||||
);
|
||||
new Annotation[0]);
|
||||
|
||||
LazyCheckRootNode(TruffleLanguage<?> language, ReadArgumentCheckNode check) {
|
||||
super(language);
|
||||
@ -467,7 +462,7 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
}
|
||||
|
||||
Function wrapThunk(Function thunk) {
|
||||
return new Function(getCallTarget(), thunk.getScope(), SCHEMA, new Object[]{thunk}, null);
|
||||
return new Function(getCallTarget(), thunk.getScope(), SCHEMA, new Object[] {thunk}, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -483,10 +478,8 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
}
|
||||
|
||||
private static final class TypeCheckExpressionNode extends ExpressionNode {
|
||||
@Child
|
||||
private ExpressionNode original;
|
||||
@Child
|
||||
private ReadArgumentCheckNode check;
|
||||
@Child private ExpressionNode original;
|
||||
@Child private ReadArgumentCheckNode check;
|
||||
|
||||
TypeCheckExpressionNode(ExpressionNode original, ReadArgumentCheckNode check) {
|
||||
this.check = check;
|
||||
@ -505,5 +498,4 @@ public abstract class ReadArgumentCheckNode extends Node {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.nodes.NodeInfo;
|
||||
import com.oracle.truffle.api.profiles.BranchProfile;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
import org.enso.interpreter.node.BaseNode;
|
||||
import org.enso.interpreter.node.callable.ExecuteCallNode;
|
||||
import org.enso.interpreter.node.callable.InvokeCallableNode;
|
||||
@ -16,8 +17,6 @@ import org.enso.interpreter.runtime.callable.function.FunctionSchema;
|
||||
import org.enso.interpreter.runtime.control.TailCallException;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
|
||||
import java.util.concurrent.locks.Lock;
|
||||
|
||||
/** Handles runtime function currying and oversaturated (eta-expanded) calls. */
|
||||
@NodeInfo(description = "Handles runtime currying and eta-expansion")
|
||||
public class CurryNode extends BaseNode {
|
||||
@ -107,8 +106,9 @@ public class CurryNode extends BaseNode {
|
||||
if (!postApplicationSchema.hasOversaturatedArgs()) {
|
||||
var value = doCall(frame, function, callerInfo, state, arguments);
|
||||
if (defaultsExecutionMode.isExecute()
|
||||
&& (value instanceof Function || (value instanceof AtomConstructor cons
|
||||
&& cons.getConstructorFunction().getSchema().isFullyApplied()))) {
|
||||
&& (value instanceof Function
|
||||
|| (value instanceof AtomConstructor cons
|
||||
&& cons.getConstructorFunction().getSchema().isFullyApplied()))) {
|
||||
keepExecutingProfile.enter();
|
||||
if (oversaturatedCallableNode == null) {
|
||||
CompilerDirectives.transferToInterpreterAndInvalidate();
|
||||
@ -133,24 +133,28 @@ public class CurryNode extends BaseNode {
|
||||
return value;
|
||||
}
|
||||
} else {
|
||||
var evaluatedVal = loopingCall.executeDispatch(frame, function, callerInfo, state, arguments, null);
|
||||
var evaluatedVal =
|
||||
loopingCall.executeDispatch(frame, function, callerInfo, state, arguments, null);
|
||||
|
||||
return this.oversaturatedCallableNode.execute(
|
||||
evaluatedVal, frame, state, oversaturatedArguments);
|
||||
}
|
||||
} else {
|
||||
return
|
||||
new Function(
|
||||
function.getCallTarget(),
|
||||
function.getScope(),
|
||||
postApplicationSchema,
|
||||
arguments,
|
||||
oversaturatedArguments);
|
||||
return new Function(
|
||||
function.getCallTarget(),
|
||||
function.getScope(),
|
||||
postApplicationSchema,
|
||||
arguments,
|
||||
oversaturatedArguments);
|
||||
}
|
||||
}
|
||||
|
||||
private Object doCall(
|
||||
VirtualFrame frame, Function function, CallerInfo callerInfo, State state, Object[] arguments) {
|
||||
VirtualFrame frame,
|
||||
Function function,
|
||||
CallerInfo callerInfo,
|
||||
State state,
|
||||
Object[] arguments) {
|
||||
return switch (getTailStatus()) {
|
||||
case TAIL_DIRECT -> directCall.executeCall(frame, function, callerInfo, state, arguments);
|
||||
case TAIL_LOOP -> throw new TailCallException(function, callerInfo, arguments);
|
||||
|
@ -77,8 +77,9 @@ public abstract class IndirectCurryNode extends Node {
|
||||
var value =
|
||||
doCall(frame, function, callerInfo, state, arguments, isTail, directCall, loopingCall);
|
||||
if (defaultsExecutionMode.isExecute()
|
||||
&& (value instanceof Function || (value instanceof AtomConstructor cons
|
||||
&& cons.getConstructorFunction().getSchema().isFullyApplied()))) {
|
||||
&& (value instanceof Function
|
||||
|| (value instanceof AtomConstructor cons
|
||||
&& cons.getConstructorFunction().getSchema().isFullyApplied()))) {
|
||||
return oversaturatedCallableNode.execute(
|
||||
value,
|
||||
frame,
|
||||
@ -92,7 +93,8 @@ public abstract class IndirectCurryNode extends Node {
|
||||
return value;
|
||||
}
|
||||
} else {
|
||||
var evaluatedVal = loopingCall.executeDispatch(frame, function, callerInfo, state, arguments, null);
|
||||
var evaluatedVal =
|
||||
loopingCall.executeDispatch(frame, function, callerInfo, state, arguments, null);
|
||||
|
||||
return oversaturatedCallableNode.execute(
|
||||
evaluatedVal,
|
||||
@ -106,11 +108,11 @@ public abstract class IndirectCurryNode extends Node {
|
||||
}
|
||||
} else {
|
||||
return new Function(
|
||||
function.getCallTarget(),
|
||||
function.getScope(),
|
||||
postApplicationSchema,
|
||||
arguments,
|
||||
oversaturatedArguments);
|
||||
function.getCallTarget(),
|
||||
function.getScope(),
|
||||
postApplicationSchema,
|
||||
arguments,
|
||||
oversaturatedArguments);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -100,7 +100,8 @@ public abstract class InvokeFunctionNode extends BaseNode {
|
||||
@Cached("build(cachedSchema, argumentMapping, getArgumentsExecutionMode())")
|
||||
ArgumentSorterNode mappingNode,
|
||||
@Cached(
|
||||
"build(argumentMapping, getDefaultsExecutionMode(), getArgumentsExecutionMode(), getTailStatus())")
|
||||
"build(argumentMapping, getDefaultsExecutionMode(), getArgumentsExecutionMode(),"
|
||||
+ " getTailStatus())")
|
||||
CurryNode curryNode) {
|
||||
ArgumentSorterNode.MappedArguments mappedArguments =
|
||||
mappingNode.execute(callerFrame, function, state, arguments);
|
||||
@ -201,7 +202,9 @@ public abstract class InvokeFunctionNode extends BaseNode {
|
||||
return argumentsExecutionMode;
|
||||
}
|
||||
|
||||
/** @return the source section for this node. */
|
||||
/**
|
||||
* @return the source section for this node.
|
||||
*/
|
||||
@Override
|
||||
public SourceSection getSourceSection() {
|
||||
Node parent = getParent();
|
||||
|
@ -80,8 +80,7 @@ public class CreateFunctionNode extends ExpressionNode {
|
||||
return schema.getArgumentInfos();
|
||||
}
|
||||
|
||||
/** Optionally offers {@link AvoidIdInstrumentationTag}.
|
||||
*/
|
||||
/** Optionally offers {@link AvoidIdInstrumentationTag}. */
|
||||
@Override
|
||||
public boolean hasTag(Class<? extends Tag> tag) {
|
||||
if (AvoidIdInstrumentationTag.class == tag) {
|
||||
|
@ -1,7 +1,6 @@
|
||||
package org.enso.interpreter.node.controlflow.caseexpr;
|
||||
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.data.EnsoObject;
|
||||
|
||||
@ -13,5 +12,4 @@ record BranchResult(boolean isMatched, Object result) implements EnsoObject {
|
||||
static BranchResult success(Object result) {
|
||||
return new BranchResult(true, result);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,9 +1,5 @@
|
||||
package org.enso.interpreter.node.controlflow.caseexpr;
|
||||
|
||||
import java.math.BigInteger;
|
||||
|
||||
import org.enso.interpreter.runtime.number.EnsoBigInteger;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.RootCallTarget;
|
||||
import com.oracle.truffle.api.dsl.Fallback;
|
||||
@ -13,6 +9,8 @@ import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.NodeInfo;
|
||||
import com.oracle.truffle.api.profiles.CountingConditionProfile;
|
||||
import java.math.BigInteger;
|
||||
import org.enso.interpreter.runtime.number.EnsoBigInteger;
|
||||
|
||||
@NodeInfo(shortName = "NumericLiteralMatch", description = "Allows matching on numeric literals")
|
||||
public abstract class NumericLiteralBranchNode extends BranchNode {
|
||||
@ -46,19 +44,20 @@ public abstract class NumericLiteralBranchNode extends BranchNode {
|
||||
Object state,
|
||||
Object target,
|
||||
@CachedLibrary(limit = "1") InteropLibrary interop) {
|
||||
var taken = switch (literal) {
|
||||
case Long l -> target instanceof Long t && l.longValue() == t.longValue();
|
||||
case Double d -> target instanceof Double t && d.doubleValue() == t.doubleValue();
|
||||
case BigInteger b -> target instanceof EnsoBigInteger e && compare(b, e.asBigInteger());
|
||||
default -> throw CompilerDirectives.shouldNotReachHere();
|
||||
};
|
||||
var taken =
|
||||
switch (literal) {
|
||||
case Long l -> target instanceof Long t && l.longValue() == t.longValue();
|
||||
case Double d -> target instanceof Double t && d.doubleValue() == t.doubleValue();
|
||||
case BigInteger b -> target instanceof EnsoBigInteger e && compare(b, e.asBigInteger());
|
||||
default -> throw CompilerDirectives.shouldNotReachHere();
|
||||
};
|
||||
if (numProfile.profile(taken)) accept(frame, state, new Object[0]);
|
||||
}
|
||||
|
||||
@Fallback
|
||||
void doOther(VirtualFrame frame, Object state, Object target) {}
|
||||
|
||||
@CompilerDirectives.TruffleBoundary(allowInlining=true)
|
||||
@CompilerDirectives.TruffleBoundary(allowInlining = true)
|
||||
private boolean compare(BigInteger b1, BigInteger b2) {
|
||||
return b1.equals(b2);
|
||||
}
|
||||
|
@ -24,7 +24,9 @@ public class QualifiedAccessorNode extends RootNode {
|
||||
this.atomConstructor = atomConstructor;
|
||||
}
|
||||
|
||||
/** @return the atom constructor. */
|
||||
/**
|
||||
* @return the atom constructor.
|
||||
*/
|
||||
public AtomConstructor getAtomConstructor() {
|
||||
return atomConstructor;
|
||||
}
|
||||
|
@ -1,17 +1,16 @@
|
||||
package org.enso.interpreter.node.expression.builtin;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.IntStream;
|
||||
import org.enso.interpreter.EnsoLanguage;
|
||||
import org.enso.interpreter.runtime.callable.argument.ArgumentDefinition;
|
||||
import org.enso.interpreter.runtime.callable.atom.AtomConstructor;
|
||||
import org.enso.interpreter.runtime.data.Type;
|
||||
import org.enso.interpreter.runtime.scope.ModuleScope;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
/** A base class for all classes annotated with @BuiltinType */
|
||||
public abstract class Builtin {
|
||||
public record Cons(String name, List<String> params) {
|
||||
@ -20,8 +19,10 @@ public abstract class Builtin {
|
||||
}
|
||||
|
||||
private AtomConstructor build(EnsoLanguage language, ModuleScope scope, Type type) {
|
||||
var res = new AtomConstructor(name, scope, type,true);
|
||||
res.initializeFields(language, IntStream.range(0, params.size())
|
||||
var res = new AtomConstructor(name, scope, type, true);
|
||||
res.initializeFields(
|
||||
language,
|
||||
IntStream.range(0, params.size())
|
||||
.mapToObj(
|
||||
i ->
|
||||
new ArgumentDefinition(
|
||||
@ -35,7 +36,6 @@ public abstract class Builtin {
|
||||
|
||||
public Builtin() {
|
||||
name = this.getClass().getSimpleName().replaceAll("([^_A-Z])([A-Z])", "$1_$2");
|
||||
|
||||
}
|
||||
|
||||
private @CompilerDirectives.CompilationFinal Type type;
|
||||
@ -49,7 +49,8 @@ public abstract class Builtin {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
public final void initialize(EnsoLanguage language, ModuleScope scope, Map<Class<? extends Builtin>, Builtin> builtins) {
|
||||
public final void initialize(
|
||||
EnsoLanguage language, ModuleScope scope, Map<Class<? extends Builtin>, Builtin> builtins) {
|
||||
if (type == null) {
|
||||
Type supertype = null;
|
||||
if (getSuperType() != null) {
|
||||
@ -57,9 +58,10 @@ public abstract class Builtin {
|
||||
s.initialize(language, scope, builtins);
|
||||
supertype = s.getType();
|
||||
}
|
||||
type = containsValues() ?
|
||||
Type.create(name, scope, supertype, builtins.get(Any.class).getType(), true) :
|
||||
Type.createSingleton(name, scope, supertype, true);
|
||||
type =
|
||||
containsValues()
|
||||
? Type.create(name, scope, supertype, builtins.get(Any.class).getType(), true)
|
||||
: Type.createSingleton(name, scope, supertype, true);
|
||||
}
|
||||
if (constructors == null) {
|
||||
var conses = getDeclaredConstructors();
|
||||
@ -91,5 +93,4 @@ public abstract class Builtin {
|
||||
public final AtomConstructor[] getConstructors() {
|
||||
return constructors;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -10,7 +10,8 @@ import org.enso.interpreter.runtime.callable.argument.CallArgumentInfo;
|
||||
type = "Any",
|
||||
name = "catch_primitive",
|
||||
description =
|
||||
"If called on an error, executes the provided handler on the error's payload. Otherwise acts as identity.")
|
||||
"If called on an error, executes the provided handler on the error's payload. Otherwise"
|
||||
+ " acts as identity.")
|
||||
public class CatchAnyNode extends Node {
|
||||
private @Child InvokeCallableNode invokeCallableNode;
|
||||
|
||||
|
@ -13,7 +13,8 @@ import org.enso.interpreter.runtime.state.State;
|
||||
type = "Error",
|
||||
name = "catch_primitive",
|
||||
description =
|
||||
"If called on an error, executes the provided handler on the error's payload. Otherwise acts as identity.")
|
||||
"If called on an error, executes the provided handler on the error's payload. Otherwise"
|
||||
+ " acts as identity.")
|
||||
public class CatchErrorNode extends Node {
|
||||
private @Child InvokeCallableNode invokeCallableNode;
|
||||
|
||||
|
@ -14,7 +14,8 @@ import org.enso.interpreter.runtime.error.PanicException;
|
||||
type = "Vector",
|
||||
name = "from_polyglot_array",
|
||||
description =
|
||||
"Creates a Vector by providing its underlying storage as a polyglot array. The underlying array should be guaranteed to never be mutated.",
|
||||
"Creates a Vector by providing its underlying storage as a polyglot array. The underlying"
|
||||
+ " array should be guaranteed to never be mutated.",
|
||||
autoRegister = false)
|
||||
public abstract class FromPolyglotArrayBuiltinVectorNode extends Node {
|
||||
|
||||
|
@ -33,6 +33,7 @@ public abstract class HostValueToEnsoNode extends Node {
|
||||
public static HostValueToEnsoNode getUncached() {
|
||||
return HostValueToEnsoNodeGen.getUncached();
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an arbitrary value to a value usable within Enso code.
|
||||
*
|
||||
|
@ -1,5 +1,16 @@
|
||||
package org.enso.interpreter.node.expression.builtin.meta;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.NeverDefault;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnknownIdentifierException;
|
||||
import com.oracle.truffle.api.library.ExportLibrary;
|
||||
import com.oracle.truffle.api.library.ExportMessage;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.callable.InvokeCallableNode;
|
||||
import org.enso.interpreter.node.callable.dispatch.InvokeFunctionNode;
|
||||
@ -16,19 +27,6 @@ import org.enso.interpreter.runtime.data.vector.ArrayLikeHelpers;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.NeverDefault;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.TruffleObject;
|
||||
import com.oracle.truffle.api.interop.UnknownIdentifierException;
|
||||
import com.oracle.truffle.api.library.ExportLibrary;
|
||||
import com.oracle.truffle.api.library.ExportMessage;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.RootNode;
|
||||
|
||||
@BuiltinMethod(
|
||||
type = "Meta",
|
||||
name = "atom_with_hole_builtin",
|
||||
@ -49,22 +47,21 @@ public abstract class AtomWithAHoleNode extends Node {
|
||||
@NeverDefault
|
||||
static InvokeCallableNode callWithHole() {
|
||||
return InvokeCallableNode.build(
|
||||
new CallArgumentInfo[] {new CallArgumentInfo()},
|
||||
InvokeCallableNode.DefaultsExecutionMode.EXECUTE,
|
||||
InvokeCallableNode.ArgumentsExecutionMode.PRE_EXECUTED);
|
||||
new CallArgumentInfo[] {new CallArgumentInfo()},
|
||||
InvokeCallableNode.DefaultsExecutionMode.EXECUTE,
|
||||
InvokeCallableNode.ArgumentsExecutionMode.PRE_EXECUTED);
|
||||
}
|
||||
|
||||
@Specialization
|
||||
Object doExecute(
|
||||
VirtualFrame frame,
|
||||
Object factory,
|
||||
State state,
|
||||
@Cached("callWithHole()") InvokeCallableNode iop,
|
||||
@Cached SwapAtomFieldNode swapNode
|
||||
) {
|
||||
VirtualFrame frame,
|
||||
Object factory,
|
||||
State state,
|
||||
@Cached("callWithHole()") InvokeCallableNode iop,
|
||||
@Cached SwapAtomFieldNode swapNode) {
|
||||
var ctx = EnsoContext.get(this);
|
||||
var lazy = new HoleInAtom();
|
||||
var result = iop.execute(factory, frame, state, new Object[] { lazy });
|
||||
var result = iop.execute(factory, frame, state, new Object[] {lazy});
|
||||
if (result instanceof Atom atom) {
|
||||
var index = swapNode.findHoleIndex(atom, lazy);
|
||||
if (index >= 0) {
|
||||
@ -82,8 +79,7 @@ public abstract class AtomWithAHoleNode extends Node {
|
||||
int index;
|
||||
Function function;
|
||||
|
||||
HoleInAtom() {
|
||||
}
|
||||
HoleInAtom() {}
|
||||
|
||||
void init(Atom result, int index, Function function) {
|
||||
this.result = result;
|
||||
@ -91,26 +87,30 @@ public abstract class AtomWithAHoleNode extends Node {
|
||||
this.function = function;
|
||||
}
|
||||
|
||||
@ExportMessage boolean hasMembers() {
|
||||
return true;
|
||||
@ExportMessage
|
||||
boolean hasMembers() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@ExportMessage boolean isMemberReadable(String member) {
|
||||
return switch (member) {
|
||||
case "value", "fill" -> true;
|
||||
default -> false;
|
||||
};
|
||||
@ExportMessage
|
||||
boolean isMemberReadable(String member) {
|
||||
return switch (member) {
|
||||
case "value", "fill" -> true;
|
||||
default -> false;
|
||||
};
|
||||
}
|
||||
|
||||
@ExportMessage boolean isMemberInvocable(String member) {
|
||||
return switch (member) {
|
||||
case "fill" -> true;
|
||||
default -> false;
|
||||
};
|
||||
@ExportMessage
|
||||
boolean isMemberInvocable(String member) {
|
||||
return switch (member) {
|
||||
case "fill" -> true;
|
||||
default -> false;
|
||||
};
|
||||
}
|
||||
|
||||
@ExportMessage Object getMembers(boolean includeInternal) {
|
||||
return ArrayLikeHelpers.wrapStrings("value", "fill");
|
||||
@ExportMessage
|
||||
Object getMembers(boolean includeInternal) {
|
||||
return ArrayLikeHelpers.wrapStrings("value", "fill");
|
||||
}
|
||||
|
||||
@ExportMessage
|
||||
@ -126,9 +126,10 @@ public abstract class AtomWithAHoleNode extends Node {
|
||||
|
||||
@ExportMessage
|
||||
Object invokeMember(
|
||||
String name, Object[] args,
|
||||
@Cached(value="buildWithArity(1)", allowUncached=true) InvokeFunctionNode invoke
|
||||
) throws UnknownIdentifierException {
|
||||
String name,
|
||||
Object[] args,
|
||||
@Cached(value = "buildWithArity(1)", allowUncached = true) InvokeFunctionNode invoke)
|
||||
throws UnknownIdentifierException {
|
||||
if ("fill".equals(name)) {
|
||||
if (args.length == 0) {
|
||||
return function;
|
||||
@ -144,20 +145,26 @@ public abstract class AtomWithAHoleNode extends Node {
|
||||
return "Meta.atom_with_hole";
|
||||
}
|
||||
}
|
||||
|
||||
static final class SwapAtomFieldNode extends RootNode {
|
||||
private final FunctionSchema schema;
|
||||
@CompilerDirectives.CompilationFinal
|
||||
private int lastIndex = -1;
|
||||
@CompilerDirectives.CompilationFinal private int lastIndex = -1;
|
||||
@Child private StructsLibrary structs = StructsLibrary.getFactory().createDispatched(10);
|
||||
|
||||
private SwapAtomFieldNode() {
|
||||
super(null);
|
||||
this.schema = new FunctionSchema(FunctionSchema.CallerFrameAccess.NONE, new ArgumentDefinition[]{
|
||||
new ArgumentDefinition(0, "lazy", null, null, ArgumentDefinition.ExecutionMode.EXECUTE),
|
||||
new ArgumentDefinition(1, "value", null, null, ArgumentDefinition.ExecutionMode.EXECUTE)
|
||||
}, new boolean[]{
|
||||
true, false
|
||||
}, new CallArgumentInfo[0], new Annotation[0]);
|
||||
this.schema =
|
||||
new FunctionSchema(
|
||||
FunctionSchema.CallerFrameAccess.NONE,
|
||||
new ArgumentDefinition[] {
|
||||
new ArgumentDefinition(
|
||||
0, "lazy", null, null, ArgumentDefinition.ExecutionMode.EXECUTE),
|
||||
new ArgumentDefinition(
|
||||
1, "value", null, null, ArgumentDefinition.ExecutionMode.EXECUTE)
|
||||
},
|
||||
new boolean[] {true, false},
|
||||
new CallArgumentInfo[0],
|
||||
new Annotation[0]);
|
||||
}
|
||||
|
||||
@NeverDefault
|
||||
@ -191,23 +198,17 @@ public abstract class AtomWithAHoleNode extends Node {
|
||||
|
||||
@CompilerDirectives.TruffleBoundary
|
||||
private int findHoleIndexLoop(Object[] arr, HoleInAtom lazy) {
|
||||
for (int i = 0; i < arr.length; i++) {
|
||||
if (arr[i] == lazy) {
|
||||
return i;
|
||||
}
|
||||
for (int i = 0; i < arr.length; i++) {
|
||||
if (arr[i] == lazy) {
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
Function createFn(HoleInAtom lazy) {
|
||||
var preArgs = new Object[]{lazy, null};
|
||||
return new Function(
|
||||
getCallTarget(),
|
||||
null,
|
||||
schema,
|
||||
preArgs,
|
||||
new Object[]{}
|
||||
);
|
||||
var preArgs = new Object[] {lazy, null};
|
||||
return new Function(getCallTarget(), null, schema, preArgs, new Object[] {});
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -31,12 +31,8 @@ public abstract class EnsoProjectNode extends Node {
|
||||
return EnsoProjectNodeGen.create();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A weak reference to the context in which this node was last executed.
|
||||
*/
|
||||
@CompilationFinal
|
||||
private WeakReference<EnsoContext> previousCtxRef = new WeakReference<>(null);
|
||||
/** A weak reference to the context in which this node was last executed. */
|
||||
@CompilationFinal private WeakReference<EnsoContext> previousCtxRef = new WeakReference<>(null);
|
||||
|
||||
private Object cachedProjectDescr;
|
||||
|
||||
@ -72,7 +68,8 @@ public abstract class EnsoProjectNode extends Node {
|
||||
: "Should skip the first frame, therefore, callNode should not be null";
|
||||
var callRootNode = callNode.getRootNode();
|
||||
assert callRootNode != null
|
||||
: "Should be called only from Enso code, and thus, should always have a root node";
|
||||
: "Should be called only from Enso code, and thus, should always have a"
|
||||
+ " root node";
|
||||
if (callRootNode instanceof EnsoRootNode ensoRootNode) {
|
||||
var pkg = ensoRootNode.getModuleScope().getModule().getPackage();
|
||||
// Don't return null, as that would signal to Truffle that we want to
|
||||
@ -84,17 +81,19 @@ public abstract class EnsoProjectNode extends Node {
|
||||
}
|
||||
} else {
|
||||
CompilerDirectives.transferToInterpreter();
|
||||
throw EnsoContext.get(this).raiseAssertionPanic(this,
|
||||
"Should not reach here: callRootNode = "
|
||||
+ callRootNode
|
||||
+ ". Probably not called from Enso?", null);
|
||||
throw EnsoContext.get(this)
|
||||
.raiseAssertionPanic(
|
||||
this,
|
||||
"Should not reach here: callRootNode = "
|
||||
+ callRootNode
|
||||
+ ". Probably not called from Enso?",
|
||||
null);
|
||||
}
|
||||
},
|
||||
// The first frame is always Enso_Project.enso_project
|
||||
1);
|
||||
if (pkgOpt.isPresent()) {
|
||||
cachedProjectDescr =
|
||||
createProjectDescriptionAtom(ctx, pkgOpt.get());
|
||||
cachedProjectDescr = createProjectDescriptionAtom(ctx, pkgOpt.get());
|
||||
} else {
|
||||
cachedProjectDescr = notInModuleError(ctx);
|
||||
}
|
||||
@ -106,8 +105,7 @@ public abstract class EnsoProjectNode extends Node {
|
||||
@Specialization(guards = "!isNothing(module)")
|
||||
@TruffleBoundary
|
||||
public Object getOtherProjectDescr(
|
||||
Object module,
|
||||
@CachedLibrary(limit = "5") TypesLibrary typesLib) {
|
||||
Object module, @CachedLibrary(limit = "5") TypesLibrary typesLib) {
|
||||
var ctx = EnsoContext.get(this);
|
||||
if (!typesLib.hasType(module)) {
|
||||
return unsupportedArgsError(module);
|
||||
@ -140,7 +138,7 @@ public abstract class EnsoProjectNode extends Node {
|
||||
.getBuiltins()
|
||||
.error()
|
||||
.makeUnsupportedArgumentsError(
|
||||
new Object[]{moduleActual}, "The `module` argument does not refer to a module"),
|
||||
new Object[] {moduleActual}, "The `module` argument does not refer to a module"),
|
||||
this);
|
||||
}
|
||||
|
||||
|
@ -1,19 +1,5 @@
|
||||
package org.enso.interpreter.node.expression.builtin.meta;
|
||||
|
||||
import java.math.BigInteger;
|
||||
|
||||
import org.enso.interpreter.dsl.AcceptsError;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.expression.builtin.number.utils.BigIntegerOps;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.callable.atom.Atom;
|
||||
import org.enso.interpreter.runtime.callable.atom.AtomConstructor;
|
||||
import org.enso.interpreter.runtime.data.EnsoMultiValue;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.WarningsLibrary;
|
||||
import org.enso.interpreter.runtime.number.EnsoBigInteger;
|
||||
import org.enso.polyglot.common_utils.Core_Text_Utils;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
@ -24,6 +10,18 @@ import com.oracle.truffle.api.interop.TruffleObject;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import java.math.BigInteger;
|
||||
import org.enso.interpreter.dsl.AcceptsError;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.expression.builtin.number.utils.BigIntegerOps;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.callable.atom.Atom;
|
||||
import org.enso.interpreter.runtime.callable.atom.AtomConstructor;
|
||||
import org.enso.interpreter.runtime.data.EnsoMultiValue;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.WarningsLibrary;
|
||||
import org.enso.interpreter.runtime.number.EnsoBigInteger;
|
||||
import org.enso.polyglot.common_utils.Core_Text_Utils;
|
||||
|
||||
@BuiltinMethod(
|
||||
type = "Any",
|
||||
@ -76,11 +74,11 @@ public abstract class EqualsNode extends Node {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Specialization(guards="interop.isBoolean(other)")
|
||||
@Specialization(guards = "interop.isBoolean(other)")
|
||||
boolean equalsBoolInterop(
|
||||
boolean self, Object other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop
|
||||
) {
|
||||
boolean self,
|
||||
Object other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
try {
|
||||
return self == interop.asBoolean(other);
|
||||
} catch (UnsupportedMessageException ex) {
|
||||
@ -108,11 +106,11 @@ public abstract class EqualsNode extends Node {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Specialization(guards="interop.fitsInLong(other)")
|
||||
@Specialization(guards = "interop.fitsInLong(other)")
|
||||
boolean equalsLongInterop(
|
||||
long self, Object other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop
|
||||
) {
|
||||
long self,
|
||||
Object other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
try {
|
||||
return self == interop.asLong(other);
|
||||
} catch (UnsupportedMessageException ex) {
|
||||
@ -145,11 +143,11 @@ public abstract class EqualsNode extends Node {
|
||||
return self == other.getValue().doubleValue();
|
||||
}
|
||||
|
||||
@Specialization(guards="interop.fitsInDouble(other)")
|
||||
@Specialization(guards = "interop.fitsInDouble(other)")
|
||||
boolean equalsDoubleInterop(
|
||||
double self, Object other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop
|
||||
) {
|
||||
double self,
|
||||
Object other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
try {
|
||||
return self == interop.asDouble(other);
|
||||
} catch (UnsupportedMessageException ex) {
|
||||
@ -195,14 +193,11 @@ public abstract class EqualsNode extends Node {
|
||||
}
|
||||
|
||||
@TruffleBoundary
|
||||
@Specialization(guards={
|
||||
"!isPrimitiveValue(other)",
|
||||
"interop.fitsInBigInteger(other)"
|
||||
})
|
||||
@Specialization(guards = {"!isPrimitiveValue(other)", "interop.fitsInBigInteger(other)"})
|
||||
boolean equalsBigIntInterop(
|
||||
EnsoBigInteger self, Object other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop
|
||||
) {
|
||||
EnsoBigInteger self,
|
||||
Object other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
try {
|
||||
var otherBigInteger = InteropLibrary.getUncached().asBigInteger(other);
|
||||
return self.asBigInteger().equals(otherBigInteger);
|
||||
@ -293,40 +288,39 @@ public abstract class EqualsNode extends Node {
|
||||
return isSameObjectNode.execute(self, other) || equalsAtomNode.execute(self, other);
|
||||
}
|
||||
|
||||
|
||||
@Specialization
|
||||
boolean equalsReverseBoolean(
|
||||
TruffleObject self, boolean other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@Shared("reverse") @Cached EqualsNode reverse
|
||||
) {
|
||||
TruffleObject self,
|
||||
boolean other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@Shared("reverse") @Cached EqualsNode reverse) {
|
||||
return reverse.execute(other, self);
|
||||
}
|
||||
|
||||
@Specialization
|
||||
boolean equalsReverseLong(
|
||||
TruffleObject self, long other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@Shared("reverse") @Cached EqualsNode reverse
|
||||
) {
|
||||
TruffleObject self,
|
||||
long other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@Shared("reverse") @Cached EqualsNode reverse) {
|
||||
return reverse.execute(other, self);
|
||||
}
|
||||
|
||||
@Specialization
|
||||
boolean equalsReverseDouble(
|
||||
TruffleObject self, double other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@Shared("reverse") @Cached EqualsNode reverse
|
||||
) {
|
||||
TruffleObject self,
|
||||
double other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@Shared("reverse") @Cached EqualsNode reverse) {
|
||||
return reverse.execute(other, self);
|
||||
}
|
||||
|
||||
@Specialization
|
||||
boolean equalsReverseBigInt(
|
||||
TruffleObject self, EnsoBigInteger other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@Shared("reverse") @Cached EqualsNode reverse
|
||||
) {
|
||||
TruffleObject self,
|
||||
EnsoBigInteger other,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@Shared("reverse") @Cached EqualsNode reverse) {
|
||||
return reverse.execute(other, self);
|
||||
}
|
||||
|
||||
@ -375,8 +369,6 @@ public abstract class EqualsNode extends Node {
|
||||
}
|
||||
|
||||
static boolean isPrimitiveValue(Object object) {
|
||||
return object instanceof Boolean
|
||||
|| object instanceof Long
|
||||
|| object instanceof Double;
|
||||
return object instanceof Boolean || object instanceof Long || object instanceof Double;
|
||||
}
|
||||
}
|
||||
|
@ -52,9 +52,11 @@ public abstract class GetAnnotationNode extends BaseNode {
|
||||
CompilerDirectives.transferToInterpreter();
|
||||
var ctx = EnsoContext.get(this);
|
||||
var err = ctx.getBuiltins().error();
|
||||
var payload = err.makeUnsupportedArgumentsError(new Object[] { method }, "Use .name to specify name of function");
|
||||
var payload =
|
||||
err.makeUnsupportedArgumentsError(
|
||||
new Object[] {method}, "Use .name to specify name of function");
|
||||
throw new PanicException(payload, this);
|
||||
}
|
||||
}
|
||||
if (methodFunction != null) {
|
||||
String parameterName = expectStringNode.execute(parameter);
|
||||
Annotation annotation = methodFunction.getSchema().getAnnotation(parameterName);
|
||||
|
@ -16,10 +16,11 @@ public class GetQualifiedTypeNameNode extends Node {
|
||||
private @Child TypeOfNode typeOfNode = TypeOfNode.build();
|
||||
|
||||
Object execute(@AcceptsError Object value) {
|
||||
var maybeType = switch (value) {
|
||||
case Type type -> type;
|
||||
default -> typeOfNode.execute(value);
|
||||
};
|
||||
var maybeType =
|
||||
switch (value) {
|
||||
case Type type -> type;
|
||||
default -> typeOfNode.execute(value);
|
||||
};
|
||||
if (maybeType instanceof Type type) {
|
||||
return Text.create(type.getQualifiedName().toString());
|
||||
}
|
||||
|
@ -1,10 +1,31 @@
|
||||
package org.enso.interpreter.node.expression.builtin.meta;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
import com.oracle.truffle.api.CompilerAsserts;
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.Fallback;
|
||||
import com.oracle.truffle.api.dsl.GenerateUncached;
|
||||
import com.oracle.truffle.api.dsl.NeverDefault;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.interop.ArityException;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.InvalidArrayIndexException;
|
||||
import com.oracle.truffle.api.interop.StopIterationException;
|
||||
import com.oracle.truffle.api.interop.UnknownIdentifierException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedTypeException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.ExplodeLoop;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.profiles.ConditionProfile;
|
||||
import com.oracle.truffle.api.profiles.LoopConditionProfile;
|
||||
import java.math.BigDecimal;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.enso.interpreter.dsl.AcceptsError;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.callable.InvokeCallableNode.ArgumentsExecutionMode;
|
||||
@ -33,29 +54,6 @@ import org.enso.interpreter.runtime.scope.ModuleScope;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
import org.enso.polyglot.common_utils.Core_Text_Utils;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
import com.oracle.truffle.api.CompilerAsserts;
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.Fallback;
|
||||
import com.oracle.truffle.api.dsl.GenerateUncached;
|
||||
import com.oracle.truffle.api.dsl.NeverDefault;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.interop.ArityException;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.InvalidArrayIndexException;
|
||||
import com.oracle.truffle.api.interop.StopIterationException;
|
||||
import com.oracle.truffle.api.interop.UnknownIdentifierException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedTypeException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.ExplodeLoop;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.profiles.ConditionProfile;
|
||||
import com.oracle.truffle.api.profiles.LoopConditionProfile;
|
||||
|
||||
/**
|
||||
* Implements {@code hash_code} functionality.
|
||||
*
|
||||
@ -75,10 +73,10 @@ import com.oracle.truffle.api.profiles.LoopConditionProfile;
|
||||
@BuiltinMethod(
|
||||
type = "Comparable",
|
||||
name = "hash_builtin",
|
||||
description = """
|
||||
Returns hash code of this atom. Use only for overriding default Comparator.
|
||||
description =
|
||||
"""
|
||||
)
|
||||
Returns hash code of this atom. Use only for overriding default Comparator.
|
||||
""")
|
||||
public abstract class HashCodeNode extends Node {
|
||||
|
||||
public static HashCodeNode build() {
|
||||
@ -149,7 +147,8 @@ public abstract class HashCodeNode extends Node {
|
||||
|
||||
@Specialization
|
||||
@TruffleBoundary
|
||||
long hashCodeForUnresolvedSymbol(UnresolvedSymbol unresolvedSymbol,
|
||||
long hashCodeForUnresolvedSymbol(
|
||||
UnresolvedSymbol unresolvedSymbol,
|
||||
@Shared("hashCodeNode") @Cached HashCodeNode hashCodeNode) {
|
||||
long nameHash = hashCodeNode.execute(unresolvedSymbol.getName());
|
||||
long scopeHash = hashCodeNode.execute(unresolvedSymbol.getScope());
|
||||
@ -157,39 +156,39 @@ public abstract class HashCodeNode extends Node {
|
||||
}
|
||||
|
||||
@Specialization
|
||||
long hashCodeForUnresolvedConversion(UnresolvedConversion unresolvedConversion,
|
||||
long hashCodeForUnresolvedConversion(
|
||||
UnresolvedConversion unresolvedConversion,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
return hashCodeForModuleScope(unresolvedConversion.getScope(), interop);
|
||||
}
|
||||
|
||||
@Specialization
|
||||
long hashCodeForModuleScope(ModuleScope moduleScope,
|
||||
long hashCodeForModuleScope(
|
||||
ModuleScope moduleScope,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
return hashCodeForModule(moduleScope.getModule(), interop);
|
||||
}
|
||||
|
||||
@Specialization
|
||||
@TruffleBoundary
|
||||
long hashCodeForModule(Module module,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
long hashCodeForModule(
|
||||
Module module, @Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
return hashCodeForString(module.toString(), interop);
|
||||
}
|
||||
|
||||
@Specialization
|
||||
long hashCodeForFile(EnsoFile file,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
long hashCodeForFile(
|
||||
EnsoFile file, @Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
return hashCodeForString(file.getPath(), interop);
|
||||
}
|
||||
|
||||
/**
|
||||
* There is no specialization for {@link TypesLibrary#hasType(Object)}, because also
|
||||
* primitive values would fall into that specialization and it would be too complicated
|
||||
* to make that specialization disjunctive. So we rather specialize directly for
|
||||
* {@link Type}.
|
||||
* There is no specialization for {@link TypesLibrary#hasType(Object)}, because also primitive
|
||||
* values would fall into that specialization and it would be too complicated to make that
|
||||
* specialization disjunctive. So we rather specialize directly for {@link Type}.
|
||||
*/
|
||||
@Specialization
|
||||
long hashCodeForType(Type type,
|
||||
@Shared("hashCodeNode") @Cached HashCodeNode hashCodeNode) {
|
||||
long hashCodeForType(Type type, @Shared("hashCodeNode") @Cached HashCodeNode hashCodeNode) {
|
||||
if (EnsoContext.get(this).getNothing() == type) {
|
||||
// Nothing should be equal to `null`
|
||||
return 0;
|
||||
@ -205,10 +204,12 @@ public abstract class HashCodeNode extends Node {
|
||||
return nodes;
|
||||
}
|
||||
|
||||
@Specialization(guards = {
|
||||
"atomCtorCached == atom.getConstructor()",
|
||||
"customComparatorNode.execute(atom) == null",
|
||||
}, limit = "5")
|
||||
@Specialization(
|
||||
guards = {
|
||||
"atomCtorCached == atom.getConstructor()",
|
||||
"customComparatorNode.execute(atom) == null",
|
||||
},
|
||||
limit = "5")
|
||||
@ExplodeLoop
|
||||
long hashCodeForAtomWithDefaultComparator(
|
||||
Atom atom,
|
||||
@ -247,12 +248,8 @@ public abstract class HashCodeNode extends Node {
|
||||
}
|
||||
|
||||
@Specialization(
|
||||
guards = {
|
||||
"atomCtorCached == atom.getConstructor()",
|
||||
"cachedComparator != null"
|
||||
},
|
||||
limit = "5"
|
||||
)
|
||||
guards = {"atomCtorCached == atom.getConstructor()", "cachedComparator != null"},
|
||||
limit = "5")
|
||||
long hashCodeForAtomWithCustomComparator(
|
||||
Atom atom,
|
||||
@Cached("atom.getConstructor()") AtomConstructor atomCtorCached,
|
||||
@ -260,11 +257,10 @@ public abstract class HashCodeNode extends Node {
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@Cached(value = "customComparatorNode.execute(atom)") Type cachedComparator,
|
||||
@Cached(value = "findHashMethod(cachedComparator)", allowUncached = true)
|
||||
Function compareMethod,
|
||||
@Cached(value = "createInvokeNode(compareMethod)") InvokeFunctionNode invokeFunctionNode
|
||||
) {
|
||||
Function compareMethod,
|
||||
@Cached(value = "createInvokeNode(compareMethod)") InvokeFunctionNode invokeFunctionNode) {
|
||||
var ctx = EnsoContext.get(this);
|
||||
var args = new Object[] { cachedComparator, atom};
|
||||
var args = new Object[] {cachedComparator, atom};
|
||||
var result = invokeFunctionNode.execute(compareMethod, null, State.create(ctx), args);
|
||||
if (!interop.isNumber(result)) {
|
||||
throw ctx.raiseAssertionPanic(this, "Custom comparator must return a number", null);
|
||||
@ -298,7 +294,8 @@ public abstract class HashCodeNode extends Node {
|
||||
}
|
||||
|
||||
@TruffleBoundary
|
||||
@Specialization(replaces = {"hashCodeForAtomWithDefaultComparator", "hashCodeForAtomWithCustomComparator"})
|
||||
@Specialization(
|
||||
replaces = {"hashCodeForAtomWithDefaultComparator", "hashCodeForAtomWithCustomComparator"})
|
||||
long hashCodeForAtomUncached(Atom atom) {
|
||||
if (atom.getHashCode() != null) {
|
||||
return atom.getHashCode();
|
||||
@ -314,8 +311,7 @@ public abstract class HashCodeNode extends Node {
|
||||
InteropLibrary.getFactory().getUncached(),
|
||||
customComparator,
|
||||
compareMethod,
|
||||
createInvokeNode(compareMethod)
|
||||
);
|
||||
createInvokeNode(compareMethod));
|
||||
}
|
||||
|
||||
Object[] fields = StructsLibrary.getUncached().getFields(atom);
|
||||
@ -465,8 +461,7 @@ public abstract class HashCodeNode extends Node {
|
||||
|
||||
@Specialization
|
||||
long hashCodeForText(
|
||||
Text text,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
Text text, @Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
if (text.is_normalized()) {
|
||||
return text.toString().hashCode();
|
||||
} else {
|
||||
@ -478,9 +473,7 @@ public abstract class HashCodeNode extends Node {
|
||||
@Specialization(
|
||||
guards = {"interop.isString(selfStr)"},
|
||||
limit = "3")
|
||||
long hashCodeForString(
|
||||
Object selfStr,
|
||||
@CachedLibrary("selfStr") InteropLibrary interop) {
|
||||
long hashCodeForString(Object selfStr, @CachedLibrary("selfStr") InteropLibrary interop) {
|
||||
String str;
|
||||
try {
|
||||
str = interop.asString(selfStr);
|
||||
@ -491,10 +484,7 @@ public abstract class HashCodeNode extends Node {
|
||||
}
|
||||
|
||||
@Specialization(
|
||||
guards = {
|
||||
"interop.hasArrayElements(selfArray)",
|
||||
"!interop.hasHashEntries(selfArray)"
|
||||
},
|
||||
guards = {"interop.hasArrayElements(selfArray)", "!interop.hasHashEntries(selfArray)"},
|
||||
limit = "3")
|
||||
long hashCodeForArray(
|
||||
Object selfArray,
|
||||
@ -527,10 +517,11 @@ public abstract class HashCodeNode extends Node {
|
||||
* Two maps are considered equal, if they have the same entries. Note that we do not care about
|
||||
* ordering.
|
||||
*/
|
||||
@Specialization(guards = {
|
||||
"interop.hasHashEntries(selfMap)",
|
||||
"!interop.hasArrayElements(selfMap)",
|
||||
})
|
||||
@Specialization(
|
||||
guards = {
|
||||
"interop.hasHashEntries(selfMap)",
|
||||
"!interop.hasArrayElements(selfMap)",
|
||||
})
|
||||
long hashCodeForMap(
|
||||
Object selfMap,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@ -555,17 +546,19 @@ public abstract class HashCodeNode extends Node {
|
||||
return Arrays.hashCode(new long[] {keysHashCode, valuesHashCode, mapSize});
|
||||
}
|
||||
|
||||
@Specialization(guards = {
|
||||
"!isAtom(objectWithMembers)",
|
||||
"!isJavaObject(objectWithMembers)",
|
||||
"interop.hasMembers(objectWithMembers)",
|
||||
"!interop.hasArrayElements(objectWithMembers)",
|
||||
"!interop.isTime(objectWithMembers)",
|
||||
"!interop.isDate(objectWithMembers)",
|
||||
"!interop.isTimeZone(objectWithMembers)",
|
||||
"!typesLib.hasType(objectWithMembers)",
|
||||
})
|
||||
long hashCodeForInteropObjectWithMembers(Object objectWithMembers,
|
||||
@Specialization(
|
||||
guards = {
|
||||
"!isAtom(objectWithMembers)",
|
||||
"!isJavaObject(objectWithMembers)",
|
||||
"interop.hasMembers(objectWithMembers)",
|
||||
"!interop.hasArrayElements(objectWithMembers)",
|
||||
"!interop.isTime(objectWithMembers)",
|
||||
"!interop.isDate(objectWithMembers)",
|
||||
"!interop.isTimeZone(objectWithMembers)",
|
||||
"!typesLib.hasType(objectWithMembers)",
|
||||
})
|
||||
long hashCodeForInteropObjectWithMembers(
|
||||
Object objectWithMembers,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@CachedLibrary(limit = "5") TypesLibrary typesLib,
|
||||
@Shared("hashCodeNode") @Cached HashCodeNode hashCodeNode) {
|
||||
@ -587,12 +580,18 @@ public abstract class HashCodeNode extends Node {
|
||||
}
|
||||
}
|
||||
return Arrays.hashCode(hashCodes);
|
||||
} catch (UnsupportedMessageException | InvalidArrayIndexException | UnknownIdentifierException e) {
|
||||
} catch (UnsupportedMessageException
|
||||
| InvalidArrayIndexException
|
||||
| UnknownIdentifierException e) {
|
||||
CompilerDirectives.transferToInterpreter();
|
||||
throw EnsoContext.get(this).raiseAssertionPanic(this, String.format("An interop object (%s) has probably wrongly specified interop API"
|
||||
+ " for members.", objectWithMembers),
|
||||
e
|
||||
);
|
||||
throw EnsoContext.get(this)
|
||||
.raiseAssertionPanic(
|
||||
this,
|
||||
String.format(
|
||||
"An interop object (%s) has probably wrongly specified interop API"
|
||||
+ " for members.",
|
||||
objectWithMembers),
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -605,8 +604,7 @@ public abstract class HashCodeNode extends Node {
|
||||
|
||||
@Specialization(guards = "isJavaObject(hostObject)")
|
||||
long hashCodeForHostObject(
|
||||
Object hostObject,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
Object hostObject, @Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) {
|
||||
try {
|
||||
Object hashCodeRes = interop.invokeMember(hostObject, "hashCode");
|
||||
assert interop.fitsInInt(hashCodeRes);
|
||||
@ -620,12 +618,13 @@ public abstract class HashCodeNode extends Node {
|
||||
}
|
||||
|
||||
/**
|
||||
* Every host function has a unique fully qualified name, it is not a lambda.
|
||||
* We get the hashcode from the qualified name.
|
||||
* Every host function has a unique fully qualified name, it is not a lambda. We get the hashcode
|
||||
* from the qualified name.
|
||||
*/
|
||||
@TruffleBoundary
|
||||
@Specialization(guards = "isJavaFunction(hostFunction)")
|
||||
long hashCodeForHostFunction(Object hostFunction,
|
||||
long hashCodeForHostFunction(
|
||||
Object hostFunction,
|
||||
@Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop,
|
||||
@Shared("hashCodeNode") @Cached HashCodeNode hashCodeNode) {
|
||||
return hashCodeNode.execute(interop.toDisplayString(hostFunction));
|
||||
|
@ -1,5 +1,10 @@
|
||||
package org.enso.interpreter.node.expression.builtin.meta;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.RootCallTarget;
|
||||
import com.oracle.truffle.api.instrumentation.EventBinding;
|
||||
import com.oracle.truffle.api.interop.InteropException;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import org.enso.interpreter.EnsoLanguage;
|
||||
import org.enso.interpreter.instrument.Timer;
|
||||
import org.enso.interpreter.node.callable.FunctionCallInstrumentationNode;
|
||||
@ -9,12 +14,6 @@ import org.enso.interpreter.runtime.data.EnsoObject;
|
||||
import org.enso.interpreter.runtime.data.vector.ArrayLikeHelpers;
|
||||
import org.enso.polyglot.debugger.IdExecutionService;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.RootCallTarget;
|
||||
import com.oracle.truffle.api.instrumentation.EventBinding;
|
||||
import com.oracle.truffle.api.interop.InteropException;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
|
||||
final class Instrumentor implements EnsoObject, IdExecutionService.Callbacks {
|
||||
|
||||
private final IdExecutionService service;
|
||||
@ -37,7 +36,13 @@ final class Instrumentor implements EnsoObject, IdExecutionService.Callbacks {
|
||||
this.handle = null;
|
||||
}
|
||||
|
||||
Instrumentor(Instrumentor orig, Object onEnter, Object onReturn, Object onReturnExpr, Object onCall, boolean activate) {
|
||||
Instrumentor(
|
||||
Instrumentor orig,
|
||||
Object onEnter,
|
||||
Object onReturn,
|
||||
Object onReturnExpr,
|
||||
Object onCall,
|
||||
boolean activate) {
|
||||
this.module = orig.module;
|
||||
this.service = orig.service;
|
||||
this.target = orig.target;
|
||||
@ -45,9 +50,7 @@ final class Instrumentor implements EnsoObject, IdExecutionService.Callbacks {
|
||||
this.onReturn = onReturn != null ? onReturn : orig.onReturn;
|
||||
this.onReturnExpr = onReturnExpr != null ? onReturnExpr : orig.onReturnExpr;
|
||||
this.onCall = onCall != null ? onCall : orig.onCall;
|
||||
this.handle = !activate ? null : service.bind(
|
||||
module, target, this, new Timer.Disabled()
|
||||
);
|
||||
this.handle = !activate ? null : service.bind(module, target, this, new Timer.Disabled());
|
||||
}
|
||||
|
||||
@CompilerDirectives.TruffleBoundary
|
||||
@ -67,7 +70,8 @@ final class Instrumentor implements EnsoObject, IdExecutionService.Callbacks {
|
||||
if (onEnter != null) {
|
||||
var ret = InteropLibrary.getUncached().execute(onEnter, info.getId().toString());
|
||||
ret = InteropLibrary.getUncached().isNull(ret) ? null : ret;
|
||||
return handle.isDisposed() ? null : ret; }
|
||||
return handle.isDisposed() ? null : ret;
|
||||
}
|
||||
} catch (InteropException ignored) {
|
||||
}
|
||||
return null;
|
||||
@ -78,10 +82,11 @@ final class Instrumentor implements EnsoObject, IdExecutionService.Callbacks {
|
||||
try {
|
||||
if (onReturn != null) {
|
||||
var iop = InteropLibrary.getUncached();
|
||||
var result = onReturnExpr == null || !iop.isString(onReturnExpr) ?
|
||||
info.getResult()
|
||||
:
|
||||
InstrumentorEvalNode.asSuspendedEval(EnsoLanguage.get(target.getRootNode()), onReturnExpr, info);
|
||||
var result =
|
||||
onReturnExpr == null || !iop.isString(onReturnExpr)
|
||||
? info.getResult()
|
||||
: InstrumentorEvalNode.asSuspendedEval(
|
||||
EnsoLanguage.get(target.getRootNode()), onReturnExpr, info);
|
||||
iop.execute(onReturn, info.getId().toString(), result);
|
||||
}
|
||||
} catch (Throwable ignored) {
|
||||
@ -101,12 +106,13 @@ final class Instrumentor implements EnsoObject, IdExecutionService.Callbacks {
|
||||
args[i] = EnsoContext.get(null).getBuiltins().nothing();
|
||||
}
|
||||
}
|
||||
var ret = InteropLibrary.getUncached().execute(
|
||||
onCall,
|
||||
info.getId().toString(),
|
||||
call.getFunction(),
|
||||
ArrayLikeHelpers.asVectorWithCheckAt(args)
|
||||
);
|
||||
var ret =
|
||||
InteropLibrary.getUncached()
|
||||
.execute(
|
||||
onCall,
|
||||
info.getId().toString(),
|
||||
call.getFunction(),
|
||||
ArrayLikeHelpers.asVectorWithCheckAt(args));
|
||||
ret = InteropLibrary.getUncached().isNull(ret) ? null : ret;
|
||||
return handle.isDisposed() ? null : ret;
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
package org.enso.interpreter.node.expression.builtin.meta;
|
||||
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.interop.InvalidArrayIndexException;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.callable.UnresolvedSymbol;
|
||||
@ -10,18 +12,13 @@ import org.enso.interpreter.runtime.data.vector.ArrayLikeAtNode;
|
||||
import org.enso.interpreter.runtime.data.vector.ArrayLikeLengthNode;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.interop.InvalidArrayIndexException;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
|
||||
@BuiltinMethod(
|
||||
type = "Meta",
|
||||
name = "instrumentor_builtin",
|
||||
description = "Handles instrumentation operations.",
|
||||
autoRegister = false)
|
||||
public class InstrumentorBuiltin extends Node {
|
||||
@Child
|
||||
private ArrayLikeAtNode atNode = ArrayLikeAtNode.create();
|
||||
@Child private ArrayLikeAtNode atNode = ArrayLikeAtNode.create();
|
||||
|
||||
Object execute(Text operation, Object args) {
|
||||
var ctx = EnsoContext.get(this);
|
||||
@ -29,14 +26,15 @@ public class InstrumentorBuiltin extends Node {
|
||||
try {
|
||||
Object ret = "newBuilder".equals(op) ? newBuilder(ctx, atNode.executeAt(args, 0)) : null;
|
||||
if (atNode.executeAt(args, 0) instanceof Instrumentor b) {
|
||||
ret = switch (op) {
|
||||
case "onEnter" -> onEnter(b, atNode.executeAt(args, 1));
|
||||
case "onReturn" -> onReturn(b, atNode.executeAt(args, 1), atNode.executeAt(args, 2));
|
||||
case "onCall" -> onCall(b, atNode.executeAt(args, 1));
|
||||
case "activate" -> activate(b, atNode.executeAt(args, 1));
|
||||
case "deactivate" -> b.deactivate();
|
||||
default -> null;
|
||||
};
|
||||
ret =
|
||||
switch (op) {
|
||||
case "onEnter" -> onEnter(b, atNode.executeAt(args, 1));
|
||||
case "onReturn" -> onReturn(b, atNode.executeAt(args, 1), atNode.executeAt(args, 2));
|
||||
case "onCall" -> onCall(b, atNode.executeAt(args, 1));
|
||||
case "activate" -> activate(b, atNode.executeAt(args, 1));
|
||||
case "deactivate" -> b.deactivate();
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
if (ret == null) {
|
||||
var err = ctx.getBuiltins().error().makeUnimplemented(operation.toString());
|
||||
@ -58,7 +56,8 @@ public class InstrumentorBuiltin extends Node {
|
||||
if (fnAndType != null) {
|
||||
var service = ctx.getIdValueExtractor();
|
||||
if (service != null) {
|
||||
return new Instrumentor(symbol.getScope().getModule(), service, fnAndType.getLeft().getCallTarget());
|
||||
return new Instrumentor(
|
||||
symbol.getScope().getModule(), service, fnAndType.getLeft().getCallTarget());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,11 +10,14 @@ import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.DataflowError;
|
||||
import org.enso.interpreter.runtime.number.EnsoBigInteger;
|
||||
|
||||
@BuiltinMethod(type = "Integer", name = "parse", description = """
|
||||
Parse integer number""", autoRegister = false)
|
||||
@BuiltinMethod(
|
||||
type = "Integer",
|
||||
name = "parse",
|
||||
description = """
|
||||
Parse integer number""",
|
||||
autoRegister = false)
|
||||
public final class ParseIntegerNode extends IntegerNode {
|
||||
@Child
|
||||
ToJavaStringNode toJavaString = ToJavaStringNode.build();
|
||||
@Child ToJavaStringNode toJavaString = ToJavaStringNode.build();
|
||||
private final BranchProfile noEx1 = BranchProfile.create();
|
||||
private final BranchProfile noEx2 = BranchProfile.create();
|
||||
|
||||
@ -37,4 +40,3 @@ public final class ParseIntegerNode extends IntegerNode {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,9 @@ public class ToEnsoNumberNode extends Node {
|
||||
this.fitsProfile = fitsProfile;
|
||||
}
|
||||
|
||||
/** @return a new instance of this node. */
|
||||
/**
|
||||
* @return a new instance of this node.
|
||||
*/
|
||||
@NeverDefault
|
||||
public static ToEnsoNumberNode create() {
|
||||
return new ToEnsoNumberNode(CountingConditionProfile.create());
|
||||
|
@ -28,25 +28,30 @@ public abstract class CustomComparatorNode extends Node {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given atom's comparator if it is a comparator that is different
|
||||
* than the default (internal) one.
|
||||
* Returns the given atom's comparator if it is a comparator that is different than the default
|
||||
* (internal) one.
|
||||
*
|
||||
* @param atom Atom for which we check whether it has custom comparator
|
||||
* @return {@code null} if the atom has default comparator. Otherwise it returns the real comparator type.
|
||||
* @return {@code null} if the atom has default comparator. Otherwise it returns the real
|
||||
* comparator type.
|
||||
*/
|
||||
public abstract Type execute(Atom atom);
|
||||
|
||||
@Specialization
|
||||
Type hasCustomComparatorCached(
|
||||
Atom atom,
|
||||
@Cached(value = "buildConvertionNode()", allowUncached = true) InvokeConversionNode convertNode,
|
||||
@Cached(value = "createConversion()", allowUncached = true) UnresolvedConversion conversion
|
||||
) {
|
||||
@Cached(value = "buildConvertionNode()", allowUncached = true)
|
||||
InvokeConversionNode convertNode,
|
||||
@Cached(value = "createConversion()", allowUncached = true) UnresolvedConversion conversion) {
|
||||
var ctx = EnsoContext.get(this);
|
||||
var comparableType = ctx.getBuiltins().comparable().getType();
|
||||
var state = State.create(ctx);
|
||||
Object res = convertNode.execute(null, state, conversion, comparableType, atom, new Object[] { comparableType, atom });
|
||||
return res instanceof Type result && result != ctx.getBuiltins().defaultComparator().getType() ? result : null;
|
||||
Object res =
|
||||
convertNode.execute(
|
||||
null, state, conversion, comparableType, atom, new Object[] {comparableType, atom});
|
||||
return res instanceof Type result && result != ctx.getBuiltins().defaultComparator().getType()
|
||||
? result
|
||||
: null;
|
||||
}
|
||||
|
||||
@NeverDefault
|
||||
@ -61,6 +66,7 @@ public abstract class CustomComparatorNode extends Node {
|
||||
CallArgumentInfo[] argSchema = new CallArgumentInfo[2];
|
||||
argSchema[0] = new CallArgumentInfo();
|
||||
argSchema[1] = new CallArgumentInfo();
|
||||
return InvokeConversionNode.build(argSchema, DefaultsExecutionMode.EXECUTE, ArgumentsExecutionMode.EXECUTE, 1);
|
||||
return InvokeConversionNode.build(
|
||||
argSchema, DefaultsExecutionMode.EXECUTE, ArgumentsExecutionMode.EXECUTE, 1);
|
||||
}
|
||||
}
|
||||
|
@ -16,21 +16,19 @@ import java.time.LocalDateTime;
|
||||
import java.time.ZonedDateTime;
|
||||
import org.enso.interpreter.dsl.AcceptsError;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.expression.builtin.number.utils.BigIntegerOps;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.DataflowError;
|
||||
import org.enso.interpreter.runtime.error.WarningsLibrary;
|
||||
import org.enso.interpreter.runtime.number.EnsoBigInteger;
|
||||
|
||||
@BuiltinMethod(
|
||||
type = "Comparable",
|
||||
name = "less_than_builtin",
|
||||
description = """
|
||||
description =
|
||||
"""
|
||||
Returns true if self is less than `other`. Or return Nothing if the values are
|
||||
not comparable.
|
||||
"""
|
||||
)
|
||||
""")
|
||||
@GenerateUncached
|
||||
public abstract class LessThanNode extends Node {
|
||||
|
||||
@ -126,23 +124,27 @@ public abstract class LessThanNode extends Node {
|
||||
}
|
||||
|
||||
/**
|
||||
* If one of the objects has warnings attached, just treat it as an object without any
|
||||
* warnings.
|
||||
* If one of the objects has warnings attached, just treat it as an object without any warnings.
|
||||
*/
|
||||
@Specialization(guards = {
|
||||
"selfWarnLib.hasWarnings(selfWithWarnings) || otherWarnLib.hasWarnings(otherWithWarnings)"
|
||||
}, limit = "3")
|
||||
Object lessWithWarnings(Object selfWithWarnings, Object otherWithWarnings,
|
||||
@Specialization(
|
||||
guards = {
|
||||
"selfWarnLib.hasWarnings(selfWithWarnings) || otherWarnLib.hasWarnings(otherWithWarnings)"
|
||||
},
|
||||
limit = "3")
|
||||
Object lessWithWarnings(
|
||||
Object selfWithWarnings,
|
||||
Object otherWithWarnings,
|
||||
@CachedLibrary("selfWithWarnings") WarningsLibrary selfWarnLib,
|
||||
@CachedLibrary("otherWithWarnings") WarningsLibrary otherWarnLib,
|
||||
@Cached LessThanNode lessThanNode
|
||||
) {
|
||||
@Cached LessThanNode lessThanNode) {
|
||||
try {
|
||||
Object self =
|
||||
selfWarnLib.hasWarnings(selfWithWarnings) ? selfWarnLib.removeWarnings(selfWithWarnings)
|
||||
selfWarnLib.hasWarnings(selfWithWarnings)
|
||||
? selfWarnLib.removeWarnings(selfWithWarnings)
|
||||
: selfWithWarnings;
|
||||
Object other =
|
||||
otherWarnLib.hasWarnings(otherWithWarnings) ? otherWarnLib.removeWarnings(otherWithWarnings)
|
||||
otherWarnLib.hasWarnings(otherWithWarnings)
|
||||
? otherWarnLib.removeWarnings(otherWithWarnings)
|
||||
: otherWithWarnings;
|
||||
return lessThanNode.execute(self, other);
|
||||
} catch (UnsupportedMessageException e) {
|
||||
@ -151,7 +153,9 @@ public abstract class LessThanNode extends Node {
|
||||
}
|
||||
|
||||
@Specialization(limit = "3")
|
||||
boolean lessTexts(Text selfText, Text otherText,
|
||||
boolean lessTexts(
|
||||
Text selfText,
|
||||
Text otherText,
|
||||
@CachedLibrary("selfText") InteropLibrary selfInterop,
|
||||
@CachedLibrary("otherText") InteropLibrary otherInterop) {
|
||||
if (selfText.is_normalized() && otherText.is_normalized()) {
|
||||
@ -162,14 +166,12 @@ public abstract class LessThanNode extends Node {
|
||||
}
|
||||
|
||||
@Specialization(
|
||||
guards = {
|
||||
"selfInterop.isString(selfStr)",
|
||||
"otherInterop.isString(otherStr)"
|
||||
},
|
||||
limit = "5"
|
||||
)
|
||||
guards = {"selfInterop.isString(selfStr)", "otherInterop.isString(otherStr)"},
|
||||
limit = "5")
|
||||
@TruffleBoundary
|
||||
boolean lessInteropStrings(Object selfStr, Object otherStr,
|
||||
boolean lessInteropStrings(
|
||||
Object selfStr,
|
||||
Object otherStr,
|
||||
@CachedLibrary("selfStr") InteropLibrary selfInterop,
|
||||
@CachedLibrary("otherStr") InteropLibrary otherInterop) {
|
||||
String selfJavaString;
|
||||
@ -180,23 +182,17 @@ public abstract class LessThanNode extends Node {
|
||||
} catch (UnsupportedMessageException e) {
|
||||
throw EnsoContext.get(this).raiseAssertionPanic(this, null, e);
|
||||
}
|
||||
return Normalizer.compare(
|
||||
selfJavaString,
|
||||
otherJavaString,
|
||||
Normalizer.FOLD_CASE_DEFAULT
|
||||
) < 0;
|
||||
return Normalizer.compare(selfJavaString, otherJavaString, Normalizer.FOLD_CASE_DEFAULT) < 0;
|
||||
}
|
||||
|
||||
@Specialization(guards = {
|
||||
"selfInterop.isBoolean(selfBoolean)",
|
||||
"otherInterop.isBoolean(otherBoolean)"
|
||||
}, limit = "3")
|
||||
@Specialization(
|
||||
guards = {"selfInterop.isBoolean(selfBoolean)", "otherInterop.isBoolean(otherBoolean)"},
|
||||
limit = "3")
|
||||
boolean lessInteropBoolean(
|
||||
Object selfBoolean,
|
||||
Object otherBoolean,
|
||||
@CachedLibrary("selfBoolean") InteropLibrary selfInterop,
|
||||
@CachedLibrary("otherBoolean") InteropLibrary otherInterop
|
||||
) {
|
||||
@CachedLibrary("otherBoolean") InteropLibrary otherInterop) {
|
||||
try {
|
||||
return !selfInterop.asBoolean(selfBoolean) && otherInterop.asBoolean(otherBoolean);
|
||||
} catch (UnsupportedMessageException e) {
|
||||
@ -205,105 +201,114 @@ public abstract class LessThanNode extends Node {
|
||||
}
|
||||
|
||||
@TruffleBoundary
|
||||
@Specialization(guards = {
|
||||
"selfInterop.isDate(selfZonedDateTime)",
|
||||
"selfInterop.isTime(selfZonedDateTime)",
|
||||
"selfInterop.isTimeZone(selfZonedDateTime)",
|
||||
"otherInterop.isDate(otherZonedDateTime)",
|
||||
"otherInterop.isTime(otherZonedDateTime)",
|
||||
"otherInterop.isTimeZone(otherZonedDateTime)"
|
||||
}, limit = "3")
|
||||
boolean lessInteropZonedDateTimes(Object selfZonedDateTime, Object otherZonedDateTime,
|
||||
@Specialization(
|
||||
guards = {
|
||||
"selfInterop.isDate(selfZonedDateTime)",
|
||||
"selfInterop.isTime(selfZonedDateTime)",
|
||||
"selfInterop.isTimeZone(selfZonedDateTime)",
|
||||
"otherInterop.isDate(otherZonedDateTime)",
|
||||
"otherInterop.isTime(otherZonedDateTime)",
|
||||
"otherInterop.isTimeZone(otherZonedDateTime)"
|
||||
},
|
||||
limit = "3")
|
||||
boolean lessInteropZonedDateTimes(
|
||||
Object selfZonedDateTime,
|
||||
Object otherZonedDateTime,
|
||||
@CachedLibrary("selfZonedDateTime") InteropLibrary selfInterop,
|
||||
@CachedLibrary("otherZonedDateTime") InteropLibrary otherInterop) {
|
||||
try {
|
||||
var self = ZonedDateTime.of(
|
||||
selfInterop.asDate(selfZonedDateTime),
|
||||
selfInterop.asTime(selfZonedDateTime),
|
||||
selfInterop.asTimeZone(selfZonedDateTime)
|
||||
);
|
||||
var other = ZonedDateTime.of(
|
||||
otherInterop.asDate(otherZonedDateTime),
|
||||
otherInterop.asTime(otherZonedDateTime),
|
||||
otherInterop.asTimeZone(otherZonedDateTime)
|
||||
);
|
||||
var self =
|
||||
ZonedDateTime.of(
|
||||
selfInterop.asDate(selfZonedDateTime),
|
||||
selfInterop.asTime(selfZonedDateTime),
|
||||
selfInterop.asTimeZone(selfZonedDateTime));
|
||||
var other =
|
||||
ZonedDateTime.of(
|
||||
otherInterop.asDate(otherZonedDateTime),
|
||||
otherInterop.asTime(otherZonedDateTime),
|
||||
otherInterop.asTimeZone(otherZonedDateTime));
|
||||
return self.compareTo(other) < 0;
|
||||
} catch (UnsupportedMessageException e) {
|
||||
throw EnsoContext.get(this).raiseAssertionPanic(this, null, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Specialization(guards = {
|
||||
"selfInterop.isDate(selfDateTime)",
|
||||
"selfInterop.isTime(selfDateTime)",
|
||||
"!selfInterop.isTimeZone(selfDateTime)",
|
||||
"otherInterop.isDate(otherDateTime)",
|
||||
"otherInterop.isTime(otherDateTime)",
|
||||
"!otherInterop.isTimeZone(otherDateTime)"
|
||||
}, limit = "3")
|
||||
boolean lessInteropDateTimes(Object selfDateTime, Object otherDateTime,
|
||||
@Specialization(
|
||||
guards = {
|
||||
"selfInterop.isDate(selfDateTime)",
|
||||
"selfInterop.isTime(selfDateTime)",
|
||||
"!selfInterop.isTimeZone(selfDateTime)",
|
||||
"otherInterop.isDate(otherDateTime)",
|
||||
"otherInterop.isTime(otherDateTime)",
|
||||
"!otherInterop.isTimeZone(otherDateTime)"
|
||||
},
|
||||
limit = "3")
|
||||
boolean lessInteropDateTimes(
|
||||
Object selfDateTime,
|
||||
Object otherDateTime,
|
||||
@CachedLibrary("selfDateTime") InteropLibrary selfInterop,
|
||||
@CachedLibrary("otherDateTime") InteropLibrary otherInterop) {
|
||||
try {
|
||||
var self = LocalDateTime.of(
|
||||
selfInterop.asDate(selfDateTime),
|
||||
selfInterop.asTime(selfDateTime)
|
||||
);
|
||||
var other = LocalDateTime.of(
|
||||
otherInterop.asDate(otherDateTime),
|
||||
otherInterop.asTime(otherDateTime)
|
||||
);
|
||||
var self =
|
||||
LocalDateTime.of(selfInterop.asDate(selfDateTime), selfInterop.asTime(selfDateTime));
|
||||
var other =
|
||||
LocalDateTime.of(otherInterop.asDate(otherDateTime), otherInterop.asTime(otherDateTime));
|
||||
return self.isBefore(other);
|
||||
} catch (UnsupportedMessageException e) {
|
||||
throw EnsoContext.get(this).raiseAssertionPanic(this, null, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Specialization(guards = {
|
||||
"selfInterop.isDate(selfDate)",
|
||||
"!selfInterop.isTime(selfDate)",
|
||||
"!selfInterop.isTimeZone(selfDate)",
|
||||
"otherInterop.isDate(otherDate)",
|
||||
"!otherInterop.isTime(otherDate)",
|
||||
"!otherInterop.isTimeZone(otherDate)"
|
||||
}, limit = "3")
|
||||
boolean lessInteropDates(Object selfDate, Object otherDate,
|
||||
@Specialization(
|
||||
guards = {
|
||||
"selfInterop.isDate(selfDate)",
|
||||
"!selfInterop.isTime(selfDate)",
|
||||
"!selfInterop.isTimeZone(selfDate)",
|
||||
"otherInterop.isDate(otherDate)",
|
||||
"!otherInterop.isTime(otherDate)",
|
||||
"!otherInterop.isTimeZone(otherDate)"
|
||||
},
|
||||
limit = "3")
|
||||
boolean lessInteropDates(
|
||||
Object selfDate,
|
||||
Object otherDate,
|
||||
@CachedLibrary("selfDate") InteropLibrary selfInterop,
|
||||
@CachedLibrary("otherDate") InteropLibrary otherInterop) {
|
||||
try {
|
||||
return selfInterop.asDate(selfDate).isBefore(
|
||||
otherInterop.asDate(otherDate)
|
||||
);
|
||||
return selfInterop.asDate(selfDate).isBefore(otherInterop.asDate(otherDate));
|
||||
} catch (UnsupportedMessageException e) {
|
||||
throw EnsoContext.get(this).raiseAssertionPanic(this, null, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Specialization(guards = {
|
||||
"!selfInterop.isDate(selfTime)",
|
||||
"selfInterop.isTime(selfTime)",
|
||||
"!selfInterop.isTimeZone(selfTime)",
|
||||
"!otherInterop.isDate(otherTime)",
|
||||
"otherInterop.isTime(otherTime)",
|
||||
"!otherInterop.isTimeZone(otherTime)"
|
||||
}, limit = "3")
|
||||
boolean lessInteropTimes(Object selfTime, Object otherTime,
|
||||
@Specialization(
|
||||
guards = {
|
||||
"!selfInterop.isDate(selfTime)",
|
||||
"selfInterop.isTime(selfTime)",
|
||||
"!selfInterop.isTimeZone(selfTime)",
|
||||
"!otherInterop.isDate(otherTime)",
|
||||
"otherInterop.isTime(otherTime)",
|
||||
"!otherInterop.isTimeZone(otherTime)"
|
||||
},
|
||||
limit = "3")
|
||||
boolean lessInteropTimes(
|
||||
Object selfTime,
|
||||
Object otherTime,
|
||||
@CachedLibrary("selfTime") InteropLibrary selfInterop,
|
||||
@CachedLibrary("otherTime") InteropLibrary otherInterop) {
|
||||
try {
|
||||
return selfInterop.asTime(selfTime).isBefore(
|
||||
otherInterop.asTime(otherTime)
|
||||
);
|
||||
return selfInterop.asTime(selfTime).isBefore(otherInterop.asTime(otherTime));
|
||||
} catch (UnsupportedMessageException e) {
|
||||
throw EnsoContext.get(this).raiseAssertionPanic(this, null, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Specialization(guards = {
|
||||
"selfInterop.isDuration(selfDuration)",
|
||||
"otherInterop.isDuration(otherDuration)"
|
||||
}, limit = "3")
|
||||
boolean lessInteropDuration(Object selfDuration, Object otherDuration,
|
||||
@Specialization(
|
||||
guards = {"selfInterop.isDuration(selfDuration)", "otherInterop.isDuration(otherDuration)"},
|
||||
limit = "3")
|
||||
boolean lessInteropDuration(
|
||||
Object selfDuration,
|
||||
Object otherDuration,
|
||||
@CachedLibrary("selfDuration") InteropLibrary selfInterop,
|
||||
@CachedLibrary("otherDuration") InteropLibrary otherInterop) {
|
||||
try {
|
||||
@ -323,6 +328,4 @@ public abstract class LessThanNode extends Node {
|
||||
private Object nothing() {
|
||||
return EnsoContext.get(this).getNothing();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -29,32 +29,44 @@ public class Ordering extends Builtin {
|
||||
}
|
||||
}
|
||||
|
||||
/** @return the Less constructor */
|
||||
/**
|
||||
* @return the Less constructor
|
||||
*/
|
||||
public AtomConstructor less() {
|
||||
return getConstructors()[0];
|
||||
}
|
||||
|
||||
/** @return the Equal constructor */
|
||||
/**
|
||||
* @return the Equal constructor
|
||||
*/
|
||||
public AtomConstructor equal() {
|
||||
return getConstructors()[1];
|
||||
}
|
||||
|
||||
/** @return the Greater constructor */
|
||||
/**
|
||||
* @return the Greater constructor
|
||||
*/
|
||||
public AtomConstructor greater() {
|
||||
return getConstructors()[2];
|
||||
}
|
||||
|
||||
/** @return a new instance of Less */
|
||||
/**
|
||||
* @return a new instance of Less
|
||||
*/
|
||||
public Atom newLess() {
|
||||
return less().newInstance();
|
||||
}
|
||||
|
||||
/** @return a new instance of Equal */
|
||||
/**
|
||||
* @return a new instance of Equal
|
||||
*/
|
||||
public Atom newEqual() {
|
||||
return equal().newInstance();
|
||||
}
|
||||
|
||||
/** @return a new instance of Greater */
|
||||
/**
|
||||
* @return a new instance of Greater
|
||||
*/
|
||||
public Atom newGreater() {
|
||||
return greater().newInstance();
|
||||
}
|
||||
|
@ -1,5 +1,16 @@
|
||||
package org.enso.interpreter.node.expression.builtin.ordering;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.GenerateUncached;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.InvalidArrayIndexException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
@ -9,7 +20,6 @@ import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.enso.interpreter.dsl.AcceptsError;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.callable.dispatch.CallOptimiserNode;
|
||||
@ -34,18 +44,6 @@ import org.enso.interpreter.runtime.error.WithWarnings;
|
||||
import org.enso.interpreter.runtime.library.dispatch.TypesLibrary;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.GenerateUncached;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.InvalidArrayIndexException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
|
||||
/**
|
||||
* Sorts a vector with elements that have only Default_Comparator, thus, only elements with a
|
||||
* builtin type, which is the most common scenario for sorting.
|
||||
@ -189,8 +187,7 @@ public abstract class SortVectorNode extends Node {
|
||||
var problemBehavior = ProblemBehavior.fromInt((int) problemBehaviorNum);
|
||||
// Split into groups
|
||||
List<Object> elems = readInteropArray(lengthNode, atNode, warningsLib, self);
|
||||
List<Type> comparators =
|
||||
readInteropArray(lengthNode, atNode, warningsLib, comparatorsArray);
|
||||
List<Type> comparators = readInteropArray(lengthNode, atNode, warningsLib, comparatorsArray);
|
||||
List<Function> compareFuncs =
|
||||
readInteropArray(lengthNode, atNode, warningsLib, compareFuncsArray);
|
||||
List<Group> groups = splitByComparators(elems, comparators, compareFuncs);
|
||||
@ -425,7 +422,8 @@ public abstract class SortVectorNode extends Node {
|
||||
return 50;
|
||||
} else {
|
||||
// Type is not a builtin type
|
||||
throw EnsoContext.get(this).raiseAssertionPanic(this, "Should be a builtin type: " + builtinType, null);
|
||||
throw EnsoContext.get(this)
|
||||
.raiseAssertionPanic(this, "Should be a builtin type: " + builtinType, null);
|
||||
}
|
||||
}
|
||||
|
||||
@ -449,8 +447,7 @@ public abstract class SortVectorNode extends Node {
|
||||
|
||||
/** Returns true iff the given array of comparators is all Default_Comparator */
|
||||
boolean areAllDefaultComparators(
|
||||
ArrayLikeLengthNode lengthNode, ArrayLikeAtNode atNode, Object comparators
|
||||
) {
|
||||
ArrayLikeLengthNode lengthNode, ArrayLikeAtNode atNode, Object comparators) {
|
||||
var ctx = EnsoContext.get(this);
|
||||
var longSize = 0L;
|
||||
try {
|
||||
@ -666,9 +663,8 @@ public abstract class SortVectorNode extends Node {
|
||||
/**
|
||||
* Helper class that returns the comparator function.
|
||||
*
|
||||
* The class is introduced to handle the presence of {@code UnresolvedSymbol},
|
||||
* as the comparator function, which has to be first resolved before it
|
||||
* can be used to compare values.
|
||||
* <p>The class is introduced to handle the presence of {@code UnresolvedSymbol}, as the
|
||||
* comparator function, which has to be first resolved before it can be used to compare values.
|
||||
*/
|
||||
private abstract class Compare {
|
||||
|
||||
@ -687,7 +683,6 @@ public abstract class SortVectorNode extends Node {
|
||||
* @return a non-null comparator function.
|
||||
*/
|
||||
abstract Function get(Object arg);
|
||||
|
||||
}
|
||||
|
||||
private final class CompareFromFunction extends Compare {
|
||||
@ -719,21 +714,22 @@ public abstract class SortVectorNode extends Node {
|
||||
private final MethodResolverNode methodResolverNode;
|
||||
private final TypesLibrary typesLibrary;
|
||||
|
||||
private CompareFromUnresolvedSymbol(UnresolvedSymbol unresolvedSymbol,
|
||||
MethodResolverNode methodResolvedNode,
|
||||
TypesLibrary typesLibrary) {
|
||||
private CompareFromUnresolvedSymbol(
|
||||
UnresolvedSymbol unresolvedSymbol,
|
||||
MethodResolverNode methodResolvedNode,
|
||||
TypesLibrary typesLibrary) {
|
||||
this.unresolvedSymbol = unresolvedSymbol;
|
||||
this.methodResolverNode = methodResolvedNode;
|
||||
this.typesLibrary = typesLibrary;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean hasFunctionSelfArgument(Object definedOn) {
|
||||
var resolvedFunction = methodResolverNode.expectNonNull(definedOn, typesLibrary.getType(definedOn), unresolvedSymbol);
|
||||
return resolvedFunction.getSchema().getArgumentsCount() > 0 &&
|
||||
resolvedFunction.getSchema().getArgumentInfos()[0].getName().equals("self");
|
||||
|
||||
var resolvedFunction =
|
||||
methodResolverNode.expectNonNull(
|
||||
definedOn, typesLibrary.getType(definedOn), unresolvedSymbol);
|
||||
return resolvedFunction.getSchema().getArgumentsCount() > 0
|
||||
&& resolvedFunction.getSchema().getArgumentInfos()[0].getName().equals("self");
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -750,6 +746,7 @@ public abstract class SortVectorNode extends Node {
|
||||
private final class GenericSortComparator extends SortComparator {
|
||||
|
||||
private final boolean ascending;
|
||||
|
||||
/**
|
||||
* Either function from `by` parameter to the `Vector.sort` method, or the `compare` function
|
||||
* extracted from the comparator for the appropriate group.
|
||||
@ -806,8 +803,10 @@ public abstract class SortVectorNode extends Node {
|
||||
Object yConverted;
|
||||
if (hasCustomOnFunc) {
|
||||
// onFunc cannot have `self` argument, we assume it has just one argument.
|
||||
xConverted = callNode.executeDispatch(null, onFunc.get(x), null, state, new Object[]{x}, null);
|
||||
yConverted = callNode.executeDispatch(null, onFunc.get(y), null, state, new Object[]{y}, null);
|
||||
xConverted =
|
||||
callNode.executeDispatch(null, onFunc.get(x), null, state, new Object[] {x}, null);
|
||||
yConverted =
|
||||
callNode.executeDispatch(null, onFunc.get(y), null, state, new Object[] {y}, null);
|
||||
} else {
|
||||
xConverted = x;
|
||||
yConverted = y;
|
||||
@ -818,7 +817,8 @@ public abstract class SortVectorNode extends Node {
|
||||
} else {
|
||||
args = new Object[] {xConverted, yConverted};
|
||||
}
|
||||
Object res = callNode.executeDispatch(null, compareFunc.get(xConverted), null, state, args, null);
|
||||
Object res =
|
||||
callNode.executeDispatch(null, compareFunc.get(xConverted), null, state, args, null);
|
||||
if (res == less) {
|
||||
return ascending ? -1 : 1;
|
||||
} else if (res == equal) {
|
||||
@ -839,20 +839,30 @@ public abstract class SortVectorNode extends Node {
|
||||
* Checks value given for {@code by} parameter and converts it to {@link Function}. Throw a
|
||||
* dataflow error otherwise.
|
||||
*/
|
||||
private Compare checkAndConvertByFunc(Object byFuncObj, TypesLibrary typesLibrary, MethodResolverNode methodResolverNode) {
|
||||
private Compare checkAndConvertByFunc(
|
||||
Object byFuncObj, TypesLibrary typesLibrary, MethodResolverNode methodResolverNode) {
|
||||
return checkAndConvertFunction(
|
||||
byFuncObj, "Unsupported argument for `by`, expected a method with two arguments", 2, 3,
|
||||
typesLibrary, methodResolverNode);
|
||||
byFuncObj,
|
||||
"Unsupported argument for `by`, expected a method with two arguments",
|
||||
2,
|
||||
3,
|
||||
typesLibrary,
|
||||
methodResolverNode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the value given for {@code on} parameter and converts it to {@link Function}. Throws a
|
||||
* dataflow error otherwise.
|
||||
*/
|
||||
private Compare checkAndConvertOnFunc(Object onFuncObj, TypesLibrary typesLibrary, MethodResolverNode methodResolverNode) {
|
||||
private Compare checkAndConvertOnFunc(
|
||||
Object onFuncObj, TypesLibrary typesLibrary, MethodResolverNode methodResolverNode) {
|
||||
return checkAndConvertFunction(
|
||||
onFuncObj, "Unsupported argument for `on`, expected a method with one argument", 1, 1,
|
||||
typesLibrary, methodResolverNode);
|
||||
onFuncObj,
|
||||
"Unsupported argument for `on`, expected a method with one argument",
|
||||
1,
|
||||
1,
|
||||
typesLibrary,
|
||||
methodResolverNode);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -862,8 +872,12 @@ public abstract class SortVectorNode extends Node {
|
||||
* @param typesLibrary types library for resolving the dispatch type for unresolved symbols.
|
||||
*/
|
||||
private Compare checkAndConvertFunction(
|
||||
Object funcObj, String errMsg, int minArgCount, int maxArgCount,
|
||||
TypesLibrary typesLibrary, MethodResolverNode methodResolverNode) {
|
||||
Object funcObj,
|
||||
String errMsg,
|
||||
int minArgCount,
|
||||
int maxArgCount,
|
||||
TypesLibrary typesLibrary,
|
||||
MethodResolverNode methodResolverNode) {
|
||||
if (funcObj instanceof UnresolvedSymbol unresolved) {
|
||||
return new CompareFromUnresolvedSymbol(unresolved, methodResolverNode, typesLibrary);
|
||||
}
|
||||
|
@ -15,7 +15,8 @@ import org.enso.interpreter.runtime.state.State;
|
||||
type = "Managed_Resource",
|
||||
name = "with",
|
||||
description =
|
||||
"Applies the passed action to the underlying resource managed by the passed Managed_Resource object.")
|
||||
"Applies the passed action to the underlying resource managed by the passed"
|
||||
+ " Managed_Resource object.")
|
||||
public abstract class WithNode extends Node {
|
||||
|
||||
private @Child InvokeCallableNode invokeCallableNode =
|
||||
|
@ -82,8 +82,7 @@ public abstract class AssertNode extends Node {
|
||||
} catch (UnsupportedMessageException e) {
|
||||
if (actionRes instanceof DataflowError dataflowError) {
|
||||
var txt = Text.create("Result of assert action is a dataflow error: " + dataflowError);
|
||||
throw new PanicException(
|
||||
builtins.error().makeAssertionError(txt), this);
|
||||
throw new PanicException(builtins.error().makeAssertionError(txt), this);
|
||||
} else {
|
||||
var typeError =
|
||||
builtins
|
||||
|
@ -15,7 +15,9 @@ public abstract class GCNode extends Node {
|
||||
|
||||
public abstract Object execute();
|
||||
|
||||
/** @return A new GCNode. */
|
||||
/**
|
||||
* @return A new GCNode.
|
||||
*/
|
||||
public static GCNode build() {
|
||||
return GCNodeGen.create();
|
||||
}
|
||||
|
@ -14,7 +14,8 @@ import org.enso.interpreter.runtime.state.State;
|
||||
type = "Runtime",
|
||||
name = "no_inline_with_arg",
|
||||
description =
|
||||
"Runs its first argument applied to the second argument without the possibility of the call or its argument getting inlined.",
|
||||
"Runs its first argument applied to the second argument without the possibility of the call"
|
||||
+ " or its argument getting inlined.",
|
||||
autoRegister = false)
|
||||
public class NoInlineWithArgNode extends Node {
|
||||
private @Child InvokeCallableNode invokeCallableNode;
|
||||
|
@ -1,18 +1,16 @@
|
||||
package org.enso.interpreter.node.expression.builtin.text.util;
|
||||
|
||||
import org.enso.interpreter.node.expression.builtin.meta.TypeOfNode;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.data.Type;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.NeverDefault;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import org.enso.interpreter.node.expression.builtin.meta.TypeOfNode;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.data.Type;
|
||||
|
||||
public final class TypeToDisplayTextNode extends Node {
|
||||
@Child
|
||||
private TypeOfNode typeOfNode;
|
||||
@Child private TypeOfNode typeOfNode;
|
||||
|
||||
private TypeToDisplayTextNode(TypeOfNode typeOfNode) {
|
||||
this.typeOfNode = typeOfNode;
|
||||
|
@ -114,7 +114,8 @@ public abstract class EvalNode extends BaseNode {
|
||||
@Cached("toJavaStringNode.execute(expression)") String expressionStr,
|
||||
@Cached("callerInfo") CallerInfo cachedCallerInfo,
|
||||
@Cached(
|
||||
"parseExpression(callerInfo.getLocalScope(), callerInfo.getModuleScope(), expressionStr)")
|
||||
"parseExpression(callerInfo.getLocalScope(), callerInfo.getModuleScope(),"
|
||||
+ " expressionStr)")
|
||||
RootCallTarget cachedCallTarget,
|
||||
@Shared("thunkExecutorNode") @Cached("build()") ThunkExecutorNode thunkExecutorNode) {
|
||||
Function thunk = Function.thunk(cachedCallTarget, callerInfo.getFrame());
|
||||
|
@ -4,7 +4,6 @@ import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.nodes.NodeInfo;
|
||||
import java.util.function.Predicate;
|
||||
import org.enso.compiler.core.IR;
|
||||
import org.enso.compiler.core.ir.Expression;
|
||||
import org.enso.compiler.core.ir.Literal;
|
||||
import org.enso.interpreter.node.ExpressionNode;
|
||||
@ -13,7 +12,8 @@ import org.enso.interpreter.runtime.tag.Patchable;
|
||||
|
||||
/** Generic literal node. */
|
||||
@NodeInfo(shortName = "Literal", description = "Constant literal expression")
|
||||
final class PatchableLiteralNode extends ExpressionNode implements Patchable, Predicate<Expression> {
|
||||
final class PatchableLiteralNode extends ExpressionNode
|
||||
implements Patchable, Predicate<Expression> {
|
||||
private final LiteralNode node;
|
||||
private Object value;
|
||||
|
||||
@ -72,5 +72,4 @@ final class PatchableLiteralNode extends ExpressionNode implements Patchable, Pr
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,49 +1,5 @@
|
||||
package org.enso.interpreter.runtime;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.PrintStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import org.enso.compiler.Compiler;
|
||||
import org.enso.compiler.PackageRepository;
|
||||
import org.enso.compiler.PackageRepositoryUtils;
|
||||
import org.enso.compiler.data.CompilerConfig;
|
||||
import org.enso.distribution.DistributionManager;
|
||||
import org.enso.distribution.locking.LockManager;
|
||||
import org.enso.editions.LibraryName;
|
||||
import org.enso.interpreter.EnsoLanguage;
|
||||
import org.enso.interpreter.OptionsHelper;
|
||||
import org.enso.interpreter.instrument.NotificationHandler;
|
||||
import org.enso.interpreter.runtime.builtin.Builtins;
|
||||
import org.enso.interpreter.runtime.data.Type;
|
||||
import org.enso.interpreter.runtime.scope.TopLevelScope;
|
||||
import org.enso.interpreter.runtime.state.ExecutionEnvironment;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
import org.enso.interpreter.runtime.util.TruffleFileSystem;
|
||||
import org.enso.librarymanager.ProjectLoadingFailure;
|
||||
import org.enso.librarymanager.resolved.LibraryRoot;
|
||||
import org.enso.pkg.Package;
|
||||
import org.enso.pkg.PackageManager;
|
||||
import org.enso.pkg.QualifiedName;
|
||||
import org.enso.polyglot.LanguageInfo;
|
||||
import org.enso.polyglot.RuntimeOptions;
|
||||
import org.enso.polyglot.debugger.IdExecutionService;
|
||||
import org.graalvm.options.OptionKey;
|
||||
|
||||
import com.oracle.truffle.api.Assumption;
|
||||
import com.oracle.truffle.api.CallTarget;
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
@ -63,38 +19,74 @@ import com.oracle.truffle.api.io.TruffleProcessBuilder;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.object.Shape;
|
||||
import com.oracle.truffle.api.source.Source;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.PrintStream;
|
||||
import java.net.MalformedURLException;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.logging.Level;
|
||||
import org.enso.compiler.Compiler;
|
||||
import org.enso.compiler.PackageRepository;
|
||||
import org.enso.compiler.PackageRepositoryUtils;
|
||||
import org.enso.compiler.data.CompilerConfig;
|
||||
import org.enso.distribution.DistributionManager;
|
||||
import org.enso.distribution.locking.LockManager;
|
||||
import org.enso.editions.LibraryName;
|
||||
import org.enso.interpreter.EnsoLanguage;
|
||||
import org.enso.interpreter.OptionsHelper;
|
||||
import org.enso.interpreter.instrument.NotificationHandler;
|
||||
import org.enso.interpreter.runtime.builtin.Builtins;
|
||||
import org.enso.interpreter.runtime.data.Type;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
|
||||
import org.enso.interpreter.runtime.scope.TopLevelScope;
|
||||
import org.enso.interpreter.runtime.state.ExecutionEnvironment;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
import org.enso.interpreter.runtime.util.TruffleFileSystem;
|
||||
import org.enso.librarymanager.ProjectLoadingFailure;
|
||||
import org.enso.librarymanager.resolved.LibraryRoot;
|
||||
import org.enso.pkg.Package;
|
||||
import org.enso.pkg.PackageManager;
|
||||
import org.enso.pkg.QualifiedName;
|
||||
import org.enso.polyglot.LanguageInfo;
|
||||
import org.enso.polyglot.RuntimeOptions;
|
||||
import org.enso.polyglot.debugger.IdExecutionService;
|
||||
import org.graalvm.options.OptionKey;
|
||||
import scala.jdk.javaapi.OptionConverters;
|
||||
|
||||
/**
|
||||
* The language context is the internal state of the language that is associated
|
||||
* with each thread in a running Enso program.
|
||||
* The language context is the internal state of the language that is associated with each thread in
|
||||
* a running Enso program.
|
||||
*/
|
||||
public final class EnsoContext {
|
||||
|
||||
private static final TruffleLanguage.ContextReference<EnsoContext> REFERENCE
|
||||
= TruffleLanguage.ContextReference.create(EnsoLanguage.class);
|
||||
private static final TruffleLanguage.ContextReference<EnsoContext> REFERENCE =
|
||||
TruffleLanguage.ContextReference.create(EnsoLanguage.class);
|
||||
|
||||
private final EnsoLanguage language;
|
||||
private final Env environment;
|
||||
private final HostClassLoader hostClassLoader = new HostClassLoader();
|
||||
private final boolean assertionsEnabled;
|
||||
private final boolean isPrivateCheckDisabled;
|
||||
private @CompilationFinal
|
||||
Compiler compiler;
|
||||
private @CompilationFinal Compiler compiler;
|
||||
private final PrintStream out;
|
||||
private final PrintStream err;
|
||||
private final InputStream in;
|
||||
private final BufferedReader inReader;
|
||||
private @CompilationFinal
|
||||
PackageRepository packageRepository;
|
||||
private @CompilationFinal
|
||||
TopLevelScope topScope;
|
||||
private @CompilationFinal PackageRepository packageRepository;
|
||||
private @CompilationFinal TopLevelScope topScope;
|
||||
private final ThreadManager threadManager;
|
||||
private final ThreadExecutors threadExecutors;
|
||||
private final ResourceManager resourceManager;
|
||||
@ -126,12 +118,12 @@ public final class EnsoContext {
|
||||
* @param distributionManager a distribution manager
|
||||
*/
|
||||
public EnsoContext(
|
||||
EnsoLanguage language,
|
||||
String home,
|
||||
Env environment,
|
||||
NotificationHandler notificationHandler,
|
||||
LockManager lockManager,
|
||||
DistributionManager distributionManager) {
|
||||
EnsoLanguage language,
|
||||
String home,
|
||||
Env environment,
|
||||
NotificationHandler notificationHandler,
|
||||
LockManager lockManager,
|
||||
DistributionManager distributionManager) {
|
||||
this.language = language;
|
||||
this.environment = environment;
|
||||
this.out = new PrintStream(environment.out());
|
||||
@ -143,20 +135,20 @@ public final class EnsoContext {
|
||||
this.resourceManager = new ResourceManager(this);
|
||||
this.isInlineCachingDisabled = getOption(RuntimeOptions.DISABLE_INLINE_CACHES_KEY);
|
||||
var isParallelismEnabled = getOption(RuntimeOptions.ENABLE_AUTO_PARALLELISM_KEY);
|
||||
this.isIrCachingDisabled
|
||||
= getOption(RuntimeOptions.DISABLE_IR_CACHES_KEY) || isParallelismEnabled;
|
||||
this.isIrCachingDisabled =
|
||||
getOption(RuntimeOptions.DISABLE_IR_CACHES_KEY) || isParallelismEnabled;
|
||||
this.isPrivateCheckDisabled = getOption(RuntimeOptions.DISABLE_PRIVATE_CHECK_KEY);
|
||||
this.executionEnvironment = getOption(EnsoLanguage.EXECUTION_ENVIRONMENT);
|
||||
this.assertionsEnabled = shouldAssertionsBeEnabled();
|
||||
this.shouldWaitForPendingSerializationJobs
|
||||
= getOption(RuntimeOptions.WAIT_FOR_PENDING_SERIALIZATION_JOBS_KEY);
|
||||
this.compilerConfig
|
||||
= new CompilerConfig(
|
||||
isParallelismEnabled,
|
||||
true,
|
||||
!isPrivateCheckDisabled,
|
||||
getOption(RuntimeOptions.STRICT_ERRORS_KEY),
|
||||
scala.Option.empty());
|
||||
this.shouldWaitForPendingSerializationJobs =
|
||||
getOption(RuntimeOptions.WAIT_FOR_PENDING_SERIALIZATION_JOBS_KEY);
|
||||
this.compilerConfig =
|
||||
new CompilerConfig(
|
||||
isParallelismEnabled,
|
||||
true,
|
||||
!isPrivateCheckDisabled,
|
||||
getOption(RuntimeOptions.STRICT_ERRORS_KEY),
|
||||
scala.Option.empty());
|
||||
this.home = home;
|
||||
this.builtins = new Builtins(this);
|
||||
this.notificationHandler = notificationHandler;
|
||||
@ -165,46 +157,44 @@ public final class EnsoContext {
|
||||
this.warningsLimit = getOption(RuntimeOptions.WARNINGS_LIMIT_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform expensive initialization logic for the context.
|
||||
*/
|
||||
/** Perform expensive initialization logic for the context. */
|
||||
public void initialize() {
|
||||
TruffleFileSystem fs = new TruffleFileSystem();
|
||||
PackageManager<TruffleFile> packageManager = new PackageManager<>(fs);
|
||||
|
||||
Optional<TruffleFile> projectRoot = OptionsHelper.getProjectRoot(environment);
|
||||
Optional<Package<TruffleFile>> projectPackage
|
||||
= projectRoot.map(
|
||||
file
|
||||
-> packageManager
|
||||
.loadPackage(file)
|
||||
.fold(
|
||||
err -> {
|
||||
throw new ProjectLoadingFailure(file.getName(), err);
|
||||
},
|
||||
res -> res));
|
||||
Optional<Package<TruffleFile>> projectPackage =
|
||||
projectRoot.map(
|
||||
file ->
|
||||
packageManager
|
||||
.loadPackage(file)
|
||||
.fold(
|
||||
err -> {
|
||||
throw new ProjectLoadingFailure(file.getName(), err);
|
||||
},
|
||||
res -> res));
|
||||
|
||||
Optional<String> languageHome
|
||||
= OptionsHelper.getLanguageHomeOverride(environment).or(() -> Optional.ofNullable(home));
|
||||
Optional<String> languageHome =
|
||||
OptionsHelper.getLanguageHomeOverride(environment).or(() -> Optional.ofNullable(home));
|
||||
var editionOverride = OptionsHelper.getEditionOverride(environment);
|
||||
var resourceManager = new org.enso.distribution.locking.ResourceManager(lockManager);
|
||||
|
||||
packageRepository
|
||||
= DefaultPackageRepository.initializeRepository(
|
||||
OptionConverters.toScala(projectPackage),
|
||||
OptionConverters.toScala(languageHome),
|
||||
OptionConverters.toScala(editionOverride),
|
||||
distributionManager,
|
||||
resourceManager,
|
||||
this,
|
||||
builtins,
|
||||
notificationHandler);
|
||||
packageRepository =
|
||||
DefaultPackageRepository.initializeRepository(
|
||||
OptionConverters.toScala(projectPackage),
|
||||
OptionConverters.toScala(languageHome),
|
||||
OptionConverters.toScala(editionOverride),
|
||||
distributionManager,
|
||||
resourceManager,
|
||||
this,
|
||||
builtins,
|
||||
notificationHandler);
|
||||
topScope = new TopLevelScope(builtins, packageRepository);
|
||||
this.compiler
|
||||
= new Compiler(new TruffleCompilerContext(this), packageRepository, compilerConfig);
|
||||
this.compiler =
|
||||
new Compiler(new TruffleCompilerContext(this), packageRepository, compilerConfig);
|
||||
|
||||
projectPackage.ifPresent(
|
||||
pkg -> packageRepository.registerMainProjectPackage(pkg.libraryName(), pkg));
|
||||
pkg -> packageRepository.registerMainProjectPackage(pkg.libraryName(), pkg));
|
||||
|
||||
var preinit = environment.getOptions().get(RuntimeOptions.PREINITIALIZE_KEY);
|
||||
if (preinit != null && preinit.length() > 0) {
|
||||
@ -218,8 +208,7 @@ public final class EnsoContext {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param node the location of context access. Pass {@code null} if not in a
|
||||
* node.
|
||||
* @param node the location of context access. Pass {@code null} if not in a node.
|
||||
* @return the proper context instance for the current {@link
|
||||
* com.oracle.truffle.api.TruffleContext}.
|
||||
*/
|
||||
@ -231,7 +220,8 @@ public final class EnsoContext {
|
||||
return ctx;
|
||||
}
|
||||
|
||||
private static final Assumption checkNodes = Truffle.getRuntime().createAssumption("context check");
|
||||
private static final Assumption checkNodes =
|
||||
Truffle.getRuntime().createAssumption("context check");
|
||||
private static final Set<Node> reportedNulllRootNodes = new HashSet<>();
|
||||
private static long checkUntil = Long.MAX_VALUE;
|
||||
|
||||
@ -241,15 +231,16 @@ public final class EnsoContext {
|
||||
checkNodes.invalidate();
|
||||
}
|
||||
if (reportedNulllRootNodes.add(n)) {
|
||||
var ex = new Exception("""
|
||||
var ex =
|
||||
new Exception(
|
||||
"""
|
||||
no root node for {n}
|
||||
with section: {s}
|
||||
with root nodes: {r}
|
||||
"""
|
||||
.replace("{n}", "" + n)
|
||||
.replace("{s}", "" + n.getEncapsulatingSourceSection())
|
||||
.replace("{r}", "" + n.getRootNode())
|
||||
);
|
||||
.replace("{n}", "" + n)
|
||||
.replace("{s}", "" + n.getEncapsulatingSourceSection())
|
||||
.replace("{r}", "" + n.getRootNode()));
|
||||
ex.printStackTrace();
|
||||
checkUntil = System.currentTimeMillis() + 10000;
|
||||
}
|
||||
@ -259,9 +250,7 @@ public final class EnsoContext {
|
||||
return REFERENCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs eventual cleanup before the context is disposed of.
|
||||
*/
|
||||
/** Performs eventual cleanup before the context is disposed of. */
|
||||
public void shutdown() {
|
||||
threadExecutors.shutdown();
|
||||
threadManager.shutdown();
|
||||
@ -296,15 +285,11 @@ public final class EnsoContext {
|
||||
/**
|
||||
* Gets the compiler instance.
|
||||
*
|
||||
* <p>
|
||||
* The compiler is the portion of the interpreter that performs static
|
||||
* analysis and transformation passes on the input program. A handle to the
|
||||
* compiler lets you execute various portions of the compilation pipeline,
|
||||
* including parsing, analysis, and final code generation.
|
||||
* <p>The compiler is the portion of the interpreter that performs static analysis and
|
||||
* transformation passes on the input program. A handle to the compiler lets you execute various
|
||||
* portions of the compilation pipeline, including parsing, analysis, and final code generation.
|
||||
*
|
||||
* <p>
|
||||
* Having this access available means that Enso programs can metaprogram Enso
|
||||
* itself.
|
||||
* <p>Having this access available means that Enso programs can metaprogram Enso itself.
|
||||
*
|
||||
* @return a handle to the compiler
|
||||
*/
|
||||
@ -356,12 +341,11 @@ public final class EnsoContext {
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the module name associated with a given file, using the environment
|
||||
* packages information.
|
||||
* Fetches the module name associated with a given file, using the environment packages
|
||||
* information.
|
||||
*
|
||||
* @param path the path to decode.
|
||||
* @return a qualified name of the module corresponding to the file, if
|
||||
* exists.
|
||||
* @return a qualified name of the module corresponding to the file, if exists.
|
||||
*/
|
||||
public Optional<QualifiedName> getModuleNameForFile(File path) {
|
||||
TruffleFile p = getTruffleFile(path);
|
||||
@ -369,12 +353,11 @@ public final class EnsoContext {
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the module name associated with a given file, using the environment
|
||||
* packages information.
|
||||
* Fetches the module name associated with a given file, using the environment packages
|
||||
* information.
|
||||
*
|
||||
* @param file the path to decode.
|
||||
* @return a qualified name of the module corresponding to the file, if
|
||||
* exists.
|
||||
* @return a qualified name of the module corresponding to the file, if exists.
|
||||
*/
|
||||
public Optional<QualifiedName> getModuleNameForFile(TruffleFile file) {
|
||||
return PackageRepositoryUtils.getModuleNameForFile(packageRepository, file);
|
||||
@ -428,8 +411,8 @@ public final class EnsoContext {
|
||||
*/
|
||||
public Optional<Module> findModuleByExpressionId(UUID expressionId) {
|
||||
return getTopScope().getModules().stream()
|
||||
.filter(m -> m.containsUUID(expressionId))
|
||||
.findFirst();
|
||||
.filter(m -> m.containsUUID(expressionId))
|
||||
.findFirst();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -440,12 +423,12 @@ public final class EnsoContext {
|
||||
@TruffleBoundary
|
||||
public void addToClassPath(TruffleFile file) {
|
||||
if (findGuestJava() == null) {
|
||||
try {
|
||||
var url = file.toUri().toURL();
|
||||
hostClassLoader.add(url);
|
||||
} catch (MalformedURLException ex) {
|
||||
throw new IllegalStateException(ex);
|
||||
}
|
||||
try {
|
||||
var url = file.toUri().toURL();
|
||||
hostClassLoader.add(url);
|
||||
} catch (MalformedURLException ex) {
|
||||
throw new IllegalStateException(ex);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
var path = new File(file.toUri()).getAbsoluteFile();
|
||||
@ -460,8 +443,8 @@ public final class EnsoContext {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether provided object comes from Java. Either Java system
|
||||
* libraries or libraries added by {@link #addToClassPath(TruffleFile)}.
|
||||
* Checks whether provided object comes from Java. Either Java system libraries or libraries added
|
||||
* by {@link #addToClassPath(TruffleFile)}.
|
||||
*
|
||||
* @param obj the object to check
|
||||
* @return {@code true} or {@code false}
|
||||
@ -496,20 +479,18 @@ public final class EnsoContext {
|
||||
* @param obj java object
|
||||
* @return wrapper object
|
||||
*/
|
||||
// @Deprecated(forRemoval=true)
|
||||
// @Deprecated(forRemoval=true)
|
||||
public Object asGuestValue(Object obj) {
|
||||
return environment.asGuestValue(obj);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to lookup a Java class (host symbol in Truffle terminology) by its
|
||||
* fully qualified name. This method also tries to lookup inner classes. More
|
||||
* specifically, if the provided name resolves to an inner class, then the
|
||||
* import of the outer class is resolved, and the inner class is looked up by
|
||||
* iterating the members of the outer class via Truffle's interop protocol.
|
||||
* Tries to lookup a Java class (host symbol in Truffle terminology) by its fully qualified name.
|
||||
* This method also tries to lookup inner classes. More specifically, if the provided name
|
||||
* resolves to an inner class, then the import of the outer class is resolved, and the inner class
|
||||
* is looked up by iterating the members of the outer class via Truffle's interop protocol.
|
||||
*
|
||||
* @param className Fully qualified class name, can also be nested static
|
||||
* inner class.
|
||||
* @param className Fully qualified class name, can also be nested static inner class.
|
||||
* @return If the java class is found, return it, otherwise return null.
|
||||
*/
|
||||
@TruffleBoundary
|
||||
@ -519,8 +500,8 @@ public final class EnsoContext {
|
||||
for (int i = items.size() - 1; i >= 0; i--) {
|
||||
String pkgName = String.join(".", items.subList(0, i));
|
||||
String curClassName = items.get(i);
|
||||
List<String> nestedClassPart
|
||||
= i < items.size() - 1 ? items.subList(i + 1, items.size()) : List.of();
|
||||
List<String> nestedClassPart =
|
||||
i < items.size() - 1 ? items.subList(i + 1, items.size()) : List.of();
|
||||
try {
|
||||
var hostSymbol = lookupHostSymbol(pkgName, curClassName);
|
||||
if (nestedClassPart.isEmpty()) {
|
||||
@ -540,11 +521,9 @@ public final class EnsoContext {
|
||||
}
|
||||
|
||||
private Object lookupHostSymbol(String pkgName, String curClassName)
|
||||
throws ClassNotFoundException, UnknownIdentifierException, UnsupportedMessageException {
|
||||
throws ClassNotFoundException, UnknownIdentifierException, UnsupportedMessageException {
|
||||
if (findGuestJava() == null) {
|
||||
return environment.asHostSymbol(
|
||||
hostClassLoader.loadClass(pkgName + "." + curClassName)
|
||||
);
|
||||
return environment.asHostSymbol(hostClassLoader.loadClass(pkgName + "." + curClassName));
|
||||
} else {
|
||||
return InteropLibrary.getUncached().readMember(findGuestJava(), pkgName + "." + curClassName);
|
||||
}
|
||||
@ -569,8 +548,11 @@ public final class EnsoContext {
|
||||
logger.log(Level.SEVERE, "Using experimental Espresso support!");
|
||||
} catch (Exception ex) {
|
||||
if (ex.getMessage().contains("No language for id java found.")) {
|
||||
logger.log(Level.SEVERE, "Environment variable ENSO_JAVA=" + envJava + ", but " + ex.getMessage());
|
||||
logger.log(Level.SEVERE, "Use " + System.getProperty("java.home") + "/bin/gu install espresso");
|
||||
logger.log(
|
||||
Level.SEVERE,
|
||||
"Environment variable ENSO_JAVA=" + envJava + ", but " + ex.getMessage());
|
||||
logger.log(
|
||||
Level.SEVERE, "Use " + System.getProperty("java.home") + "/bin/gu install espresso");
|
||||
logger.log(Level.SEVERE, "Continuing in regular Java mode");
|
||||
} else {
|
||||
var ise = new IllegalStateException(ex.getMessage());
|
||||
@ -579,7 +561,8 @@ public final class EnsoContext {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new IllegalStateException("Specify ENSO_JAVA=espresso to use Espresso. Was: " + envJava);
|
||||
throw new IllegalStateException(
|
||||
"Specify ENSO_JAVA=espresso to use Espresso. Was: " + envJava);
|
||||
}
|
||||
return guestJava;
|
||||
}
|
||||
@ -603,7 +586,7 @@ public final class EnsoContext {
|
||||
public Optional<Module> createModuleForFile(File path) {
|
||||
TruffleFile f = getTruffleFile(path);
|
||||
return getModuleNameForFile(path)
|
||||
.map(name -> getTopScope().createModule(name, getPackageOf(f).orElse(null), f));
|
||||
.map(name -> getTopScope().createModule(name, getPackageOf(f).orElse(null), f));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -625,8 +608,8 @@ public final class EnsoContext {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the atom constructor corresponding to the {@code Nothing} type, for
|
||||
* builtin constructs that need to return an atom of this type.
|
||||
* Returns the atom constructor corresponding to the {@code Nothing} type, for builtin constructs
|
||||
* that need to return an atom of this type.
|
||||
*
|
||||
* @return the builtin {@code Nothing} atom constructor
|
||||
*/
|
||||
@ -675,8 +658,7 @@ public final class EnsoContext {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks value of
|
||||
* {@link RuntimeOptions#INTERPRETER_SEQUENTIAL_COMMAND_EXECUTION_KEY}.
|
||||
* Checks value of {@link RuntimeOptions#INTERPRETER_SEQUENTIAL_COMMAND_EXECUTION_KEY}.
|
||||
*
|
||||
* @return the value of the option
|
||||
*/
|
||||
@ -693,9 +675,7 @@ public final class EnsoContext {
|
||||
return getOption(RuntimeOptions.ENABLE_GLOBAL_SUGGESTIONS_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
* The job parallelism or 1
|
||||
*/
|
||||
/** The job parallelism or 1 */
|
||||
public int getJobParallelism() {
|
||||
var n = getOption(RuntimeOptions.JOB_PARALLELISM_KEY);
|
||||
var base = n == null ? 1 : n.intValue();
|
||||
@ -786,8 +766,8 @@ public final class EnsoContext {
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a logger for the specified class that is bound to this engine.
|
||||
* Such logger may then be safely used in threads defined in a thread-pool.
|
||||
* Gets a logger for the specified class that is bound to this engine. Such logger may then be
|
||||
* safely used in threads defined in a thread-pool.
|
||||
*
|
||||
* @param clazz the class to name log entries with
|
||||
* @return a new logger for the specified {@code path}
|
||||
@ -797,11 +777,9 @@ public final class EnsoContext {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current clock value and atomically increments the counter by
|
||||
* one.
|
||||
* Returns the current clock value and atomically increments the counter by one.
|
||||
*
|
||||
* <p>
|
||||
* The counter is used to track the creation time of warnings.
|
||||
* <p>The counter is used to track the creation time of warnings.
|
||||
*/
|
||||
public long nextSequenceId() {
|
||||
return clock.getAndIncrement();
|
||||
@ -811,16 +789,12 @@ public final class EnsoContext {
|
||||
return executionEnvironment;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the runtime execution environment of this context.
|
||||
*/
|
||||
/** Set the runtime execution environment of this context. */
|
||||
public void setExecutionEnvironment(ExecutionEnvironment executionEnvironment) {
|
||||
this.executionEnvironment = executionEnvironment;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a maximal number of warnings that can be attached to a value
|
||||
*/
|
||||
/** Returns a maximal number of warnings that can be attached to a value */
|
||||
public int getWarningsLimit() {
|
||||
return this.warningsLimit;
|
||||
}
|
||||
@ -846,8 +820,7 @@ public final class EnsoContext {
|
||||
|
||||
public TruffleFile findLibraryRootPath(LibraryRoot root) {
|
||||
return environment.getInternalTruffleFile(
|
||||
root.location().toAbsolutePath().normalize().toString()
|
||||
);
|
||||
root.location().toAbsolutePath().normalize().toString());
|
||||
}
|
||||
|
||||
public TruffleFile getPublicTruffleFile(String path) {
|
||||
@ -867,8 +840,9 @@ public final class EnsoContext {
|
||||
}
|
||||
|
||||
public Thread createThread(boolean systemThread, Runnable run) {
|
||||
return systemThread ? environment.createSystemThread(run)
|
||||
: environment.newTruffleThreadBuilder(run).build();
|
||||
return systemThread
|
||||
? environment.createSystemThread(run)
|
||||
: environment.newTruffleThreadBuilder(run).build();
|
||||
}
|
||||
|
||||
public Future<Void> submitThreadLocal(Thread[] threads, ThreadLocalAction action) {
|
||||
@ -892,22 +866,20 @@ public final class EnsoContext {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Helper method to use when an unexpected state happens that should raise a panic,
|
||||
* but not crash the interpreter. Creates a {@link PanicException} with
|
||||
* <em>assertion error</em> payload.
|
||||
*
|
||||
* Helper method to use when an unexpected state happens that should raise a panic, but not crash
|
||||
* the interpreter. Creates a {@link PanicException} with <em>assertion error</em> payload.
|
||||
*
|
||||
* @param node where the problem happened (may be {@code null})
|
||||
* @param message {@code null} (then {@code e.getMessage()} is used) or a special
|
||||
* message to use in the panic
|
||||
* @param message {@code null} (then {@code e.getMessage()} is used) or a special message to use
|
||||
* in the panic
|
||||
* @param e external exception to extract message and stack from or {@code null}
|
||||
* @return this method never returns it throws the {@link PanicException}
|
||||
* @throws PanicException with <em>assertion error</em> payload
|
||||
*/
|
||||
@CompilerDirectives.TruffleBoundary
|
||||
public PanicException raiseAssertionPanic(Node node, String message, Throwable e) throws PanicException {
|
||||
public PanicException raiseAssertionPanic(Node node, String message, Throwable e)
|
||||
throws PanicException {
|
||||
String msg;
|
||||
String sep;
|
||||
if (e != null) {
|
||||
|
@ -63,6 +63,7 @@ public final class Module implements EnsoObject {
|
||||
private final ModuleCache cache;
|
||||
private boolean wasLoadedFromCache;
|
||||
private final boolean synthetic;
|
||||
|
||||
/**
|
||||
* This list is filled in case there is a directory with the same name as this module. The
|
||||
* directory then contains submodules of this module that should be directly accessible from this
|
||||
@ -188,17 +189,23 @@ public final class Module implements EnsoObject {
|
||||
this.compilationStage = CompilationStage.INITIAL;
|
||||
}
|
||||
|
||||
/** @return the literal source of this module. */
|
||||
/**
|
||||
* @return the literal source of this module.
|
||||
*/
|
||||
public Rope getLiteralSource() {
|
||||
return sources.rope();
|
||||
}
|
||||
|
||||
/** @return true if this module represents a synthetic (compiler-generated) module */
|
||||
/**
|
||||
* @return true if this module represents a synthetic (compiler-generated) module
|
||||
*/
|
||||
public boolean isSynthetic() {
|
||||
return synthetic;
|
||||
}
|
||||
|
||||
/** @return true iff this module is private (project-private). */
|
||||
/**
|
||||
* @return true iff this module is private (project-private).
|
||||
*/
|
||||
public boolean isPrivate() {
|
||||
return ir.isPrivate();
|
||||
}
|
||||
@ -291,7 +298,9 @@ public final class Module implements EnsoObject {
|
||||
}
|
||||
}
|
||||
|
||||
/** @return the location of this module. */
|
||||
/**
|
||||
* @return the location of this module.
|
||||
*/
|
||||
public String getPath() {
|
||||
return sources.getPath();
|
||||
}
|
||||
@ -382,7 +391,9 @@ public final class Module implements EnsoObject {
|
||||
context.getCompiler().run(asCompilerModule());
|
||||
}
|
||||
|
||||
/** @return IR defined by this module. */
|
||||
/**
|
||||
* @return IR defined by this module.
|
||||
*/
|
||||
public org.enso.compiler.core.ir.Module getIr() {
|
||||
return ir;
|
||||
}
|
||||
@ -409,7 +420,9 @@ public final class Module implements EnsoObject {
|
||||
return map.containsKey(id);
|
||||
}
|
||||
|
||||
/** @return the current compilation stage of this module. */
|
||||
/**
|
||||
* @return the current compilation stage of this module.
|
||||
*/
|
||||
public CompilationStage getCompilationStage() {
|
||||
return compilationStage;
|
||||
}
|
||||
@ -439,7 +452,9 @@ public final class Module implements EnsoObject {
|
||||
this.uuidsMap = null;
|
||||
}
|
||||
|
||||
/** @return the runtime scope of this module. */
|
||||
/**
|
||||
* @return the runtime scope of this module.
|
||||
*/
|
||||
public ModuleScope getScope() {
|
||||
return scope;
|
||||
}
|
||||
@ -458,7 +473,9 @@ public final class Module implements EnsoObject {
|
||||
}
|
||||
}
|
||||
|
||||
/** @return the qualified name of this module. */
|
||||
/**
|
||||
* @return the qualified name of this module.
|
||||
*/
|
||||
public QualifiedName getName() {
|
||||
return name;
|
||||
}
|
||||
@ -476,7 +493,9 @@ public final class Module implements EnsoObject {
|
||||
this.name = name.renameProject(newName);
|
||||
}
|
||||
|
||||
/** @return the indexed flag. */
|
||||
/**
|
||||
* @return the indexed flag.
|
||||
*/
|
||||
public boolean isIndexed() {
|
||||
return isIndexed;
|
||||
}
|
||||
@ -486,27 +505,37 @@ public final class Module implements EnsoObject {
|
||||
isIndexed = indexed;
|
||||
}
|
||||
|
||||
/** @return the source file of this module. */
|
||||
/**
|
||||
* @return the source file of this module.
|
||||
*/
|
||||
public TruffleFile getSourceFile() {
|
||||
return sources.file();
|
||||
}
|
||||
|
||||
/** @return {@code true} if the module is interactive, {@code false} otherwise */
|
||||
/**
|
||||
* @return {@code true} if the module is interactive, {@code false} otherwise
|
||||
*/
|
||||
public boolean isInteractive() {
|
||||
return patchedValues != null;
|
||||
}
|
||||
|
||||
/** @return the cache for this module */
|
||||
/**
|
||||
* @return the cache for this module
|
||||
*/
|
||||
public ModuleCache getCache() {
|
||||
return cache;
|
||||
}
|
||||
|
||||
/** @return {@code true} if the module was loaded from the cache, {@code false} otherwise */
|
||||
/**
|
||||
* @return {@code true} if the module was loaded from the cache, {@code false} otherwise
|
||||
*/
|
||||
public boolean wasLoadedFromCache() {
|
||||
return wasLoadedFromCache;
|
||||
}
|
||||
|
||||
/** @param wasLoadedFromCache whether or not the module was loaded from the cache */
|
||||
/**
|
||||
* @param wasLoadedFromCache whether or not the module was loaded from the cache
|
||||
*/
|
||||
void setLoadedFromCache(boolean wasLoadedFromCache) {
|
||||
this.wasLoadedFromCache = wasLoadedFromCache;
|
||||
}
|
||||
@ -543,7 +572,8 @@ public final class Module implements EnsoObject {
|
||||
TruffleLogger logger = TruffleLogger.getLogger(LanguageInfo.ID, Module.class);
|
||||
logger.log(
|
||||
Level.SEVERE,
|
||||
"Failed to get the requested method. Try clearing your IR caches or disabling caching.");
|
||||
"Failed to get the requested method. Try clearing your IR caches or disabling"
|
||||
+ " caching.");
|
||||
throw npe;
|
||||
}
|
||||
}
|
||||
|
@ -7,25 +7,20 @@ import org.enso.pkg.QualifiedName;
|
||||
import org.enso.polyglot.LanguageInfo;
|
||||
import org.enso.text.buffer.Rope;
|
||||
|
||||
|
||||
/** Keeps information about various kinds of sources associated with a {@link Module}.
|
||||
* All the record fields are immutable. They can only change by creating new instance
|
||||
* of the whole record - usually via methods {@link #newWith(org.enso.text.buffer.Rope)}
|
||||
* or {@link #newWith(com.oracle.truffle.api.TruffleFile)}. The creation of
|
||||
* cached Truffle {@link Source} is delayed and happens only
|
||||
* per {@link #ensureCachedSource(org.enso.pkg.QualifiedName) request}.
|
||||
*
|
||||
/**
|
||||
* Keeps information about various kinds of sources associated with a {@link Module}. All the record
|
||||
* fields are immutable. They can only change by creating new instance of the whole record - usually
|
||||
* via methods {@link #newWith(org.enso.text.buffer.Rope)} or {@link
|
||||
* #newWith(com.oracle.truffle.api.TruffleFile)}. The creation of cached Truffle {@link Source} is
|
||||
* delayed and happens only per {@link #ensureCachedSource(org.enso.pkg.QualifiedName) request}.
|
||||
*/
|
||||
record ModuleSources(
|
||||
TruffleFile file,
|
||||
Rope rope,
|
||||
Source source) {
|
||||
record ModuleSources(TruffleFile file, Rope rope, Source source) {
|
||||
|
||||
/** Empty triple of sources. Continue with {@code newWith} methods.
|
||||
*/
|
||||
/** Empty triple of sources. Continue with {@code newWith} methods. */
|
||||
static final ModuleSources NONE = new ModuleSources(null, null, null);
|
||||
|
||||
/** Creates sources instances associated with provided Truffle file.
|
||||
/**
|
||||
* Creates sources instances associated with provided Truffle file.
|
||||
*
|
||||
* @param f the file to associate the sources with
|
||||
* @return new sources triple
|
||||
@ -34,8 +29,9 @@ record ModuleSources(
|
||||
return new ModuleSources(f, null, null);
|
||||
}
|
||||
|
||||
/** Associates current sources with modified text content. The {@code file}
|
||||
* is kept unchanged. The cached {@code source} is cleared.
|
||||
/**
|
||||
* Associates current sources with modified text content. The {@code file} is kept unchanged. The
|
||||
* cached {@code source} is cleared.
|
||||
*
|
||||
* @param r the new text content to assign to the sources
|
||||
* @return new sources triple
|
||||
@ -44,8 +40,9 @@ record ModuleSources(
|
||||
return new ModuleSources(file(), r, null);
|
||||
}
|
||||
|
||||
/** Resets the contents of sources but retains the reference to the
|
||||
* file. Continue with {@code newWith} methods.
|
||||
/**
|
||||
* Resets the contents of sources but retains the reference to the file. Continue with {@code
|
||||
* newWith} methods.
|
||||
*
|
||||
* @return new sources triple
|
||||
*/
|
||||
@ -53,10 +50,10 @@ record ModuleSources(
|
||||
return new ModuleSources(file(), null, null);
|
||||
}
|
||||
|
||||
/** Makes sure Truffle {@code Source} object is ready for the sources. If
|
||||
* this triple already contains Truffle source, it just returns itself.
|
||||
* Otherwise, if the {@link #rope() content} is set, it is used as chars
|
||||
* for the Truffle source. If the file isn't in memory yet, it is read in
|
||||
/**
|
||||
* Makes sure Truffle {@code Source} object is ready for the sources. If this triple already
|
||||
* contains Truffle source, it just returns itself. Otherwise, if the {@link #rope() content} is
|
||||
* set, it is used as chars for the Truffle source. If the file isn't in memory yet, it is read in
|
||||
* and both {@link #source()} and {@link #rope()} are initialized.
|
||||
*
|
||||
* @param name the name of the associated module
|
||||
@ -78,7 +75,8 @@ record ModuleSources(
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
/** Path of the associated {@link #file()}.
|
||||
/**
|
||||
* Path of the associated {@link #file()}.
|
||||
*
|
||||
* @return path or {@code null}
|
||||
*/
|
||||
|
@ -7,43 +7,39 @@ import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.function.Predicate;
|
||||
import org.enso.compiler.context.SimpleUpdate;
|
||||
import org.enso.compiler.core.IR;
|
||||
import org.enso.compiler.core.ir.Expression;
|
||||
import org.enso.interpreter.runtime.callable.function.Function;
|
||||
import org.enso.interpreter.runtime.tag.Patchable;
|
||||
|
||||
/**
|
||||
* Keeps patched values for expression in module. Also keeps mapping of
|
||||
* original source offset and new {@link #findDelta(int, boolean) deltas}.
|
||||
* Keeps patched values for expression in module. Also keeps mapping of original source offset and
|
||||
* new {@link #findDelta(int, boolean) deltas}.
|
||||
*/
|
||||
final class PatchedModuleValues {
|
||||
private final Module module;
|
||||
private final TreeMap<Integer,int[]> deltas = new TreeMap<>();
|
||||
private final TreeMap<Integer, int[]> deltas = new TreeMap<>();
|
||||
private Map<Node, Predicate<Expression>> values;
|
||||
|
||||
PatchedModuleValues(Module module) {
|
||||
this.module = module;
|
||||
}
|
||||
|
||||
/** Disposes the binding. No-op currently.
|
||||
*/
|
||||
void dispose() {
|
||||
}
|
||||
/** Disposes the binding. No-op currently. */
|
||||
void dispose() {}
|
||||
|
||||
/** Keeps "deltas" for each {@code offset} where a modification happened.
|
||||
* Edits are always deleting few characters and inserting another few characters
|
||||
* at a given location. The idea here is to use {@code SortedMap} to keep
|
||||
* information about "deltas" at each offset of modification. Whenever new
|
||||
* edit is made, the delta at its offset is recorded and all deltas after
|
||||
* the offset adjusted as they shift too.
|
||||
/**
|
||||
* Keeps "deltas" for each {@code offset} where a modification happened. Edits are always deleting
|
||||
* few characters and inserting another few characters at a given location. The idea here is to
|
||||
* use {@code SortedMap} to keep information about "deltas" at each offset of modification.
|
||||
* Whenever new edit is made, the delta at its offset is recorded and all deltas after the offset
|
||||
* adjusted as they shift too.
|
||||
*
|
||||
* @param collect {@link Node} to value map of the values to use
|
||||
* @param offset location when a modification happened
|
||||
* @param delta positive or negative change at the offset location
|
||||
*/
|
||||
private synchronized void performUpdates(
|
||||
Map<Node, Predicate<Expression>> collect, int offset, int delta
|
||||
) {
|
||||
Map<Node, Predicate<Expression>> collect, int offset, int delta) {
|
||||
if (values == null) {
|
||||
var scope = module.getScope();
|
||||
values = new HashMap<>();
|
||||
@ -58,22 +54,23 @@ final class PatchedModuleValues {
|
||||
}
|
||||
Map.Entry<Integer, int[]> previous = deltas.floorEntry(offset);
|
||||
if (previous == null) {
|
||||
deltas.put(offset, new int[] { delta });
|
||||
deltas.put(offset, new int[] {delta});
|
||||
} else if (previous.getKey() == offset) {
|
||||
previous.getValue()[0] += delta;
|
||||
} else {
|
||||
deltas.put(offset, new int[] { previous.getValue()[0] + delta });
|
||||
deltas.put(offset, new int[] {previous.getValue()[0] + delta});
|
||||
}
|
||||
for (int[] after : deltas.tailMap(offset, false).values()) {
|
||||
after[0] += delta;
|
||||
}
|
||||
}
|
||||
|
||||
/** Checks whether a simple edit is applicable and performs it if so.
|
||||
/**
|
||||
* Checks whether a simple edit is applicable and performs it if so.
|
||||
*
|
||||
* @param update information about the edit to be done
|
||||
* @return {@code true} if the edit was applied, {@code false} to proceed with
|
||||
* full re-parse of the source
|
||||
* @return {@code true} if the edit was applied, {@code false} to proceed with full re-parse of
|
||||
* the source
|
||||
*/
|
||||
boolean simpleUpdate(SimpleUpdate update) {
|
||||
var scope = module.getScope();
|
||||
@ -110,13 +107,15 @@ final class PatchedModuleValues {
|
||||
return true;
|
||||
}
|
||||
|
||||
private static void updateFunctionsMap(SimpleUpdate edit, List<Function> values, Map<Node, Predicate<Expression>> nodeValues) {
|
||||
private static void updateFunctionsMap(
|
||||
SimpleUpdate edit, List<Function> values, Map<Node, Predicate<Expression>> nodeValues) {
|
||||
for (Function f : values) {
|
||||
updateNode(edit, f.getCallTarget().getRootNode(), nodeValues);
|
||||
}
|
||||
}
|
||||
|
||||
private static void updateNode(SimpleUpdate update, Node root, Map<Node, Predicate<Expression>> nodeValues) {
|
||||
private static void updateNode(
|
||||
SimpleUpdate update, Node root, Map<Node, Predicate<Expression>> nodeValues) {
|
||||
LinkedList<Node> queue = new LinkedList<>();
|
||||
queue.add(root);
|
||||
while (!queue.isEmpty()) {
|
||||
@ -140,12 +139,10 @@ final class PatchedModuleValues {
|
||||
continue;
|
||||
}
|
||||
if (n instanceof Patchable node) {
|
||||
if (
|
||||
at.getStartLine() - 1 == edit.range().start().line() &&
|
||||
at.getStartColumn() - 1 == edit.range().start().character() &&
|
||||
at.getEndLine() - 1 == edit.range().end().line() &&
|
||||
at.getEndColumn() == edit.range().end().character()
|
||||
) {
|
||||
if (at.getStartLine() - 1 == edit.range().start().line()
|
||||
&& at.getStartColumn() - 1 == edit.range().start().character()
|
||||
&& at.getEndLine() - 1 == edit.range().end().line()
|
||||
&& at.getEndColumn() == edit.range().end().character()) {
|
||||
var patchableNode = node.asPatchableNode();
|
||||
if (patchableNode.test(update.newIr())) {
|
||||
nodeValues.put(patchableNode, patchableNode);
|
||||
@ -164,12 +161,13 @@ final class PatchedModuleValues {
|
||||
* Finds difference against location in the original source code.
|
||||
*
|
||||
* @param offset the original location
|
||||
* @param inclusive are modifications at the same location going to count or not
|
||||
* - they count for section ends, but do not count for section starts
|
||||
* @param inclusive are modifications at the same location going to count or not - they count for
|
||||
* section ends, but do not count for section starts
|
||||
* @return positive or negative delta to apply at given offset
|
||||
*/
|
||||
int findDelta(int offset, boolean inclusive) {
|
||||
Map.Entry<Integer, int[]> previous = inclusive ? deltas.floorEntry(offset) : deltas.lowerEntry(offset);
|
||||
Map.Entry<Integer, int[]> previous =
|
||||
inclusive ? deltas.floorEntry(offset) : deltas.lowerEntry(offset);
|
||||
return previous == null ? 0 : previous.getValue()[0];
|
||||
}
|
||||
}
|
||||
|
@ -21,7 +21,8 @@ public class ResourceManager {
|
||||
private volatile Thread workerThread;
|
||||
private final Runner worker = new Runner();
|
||||
private final ReferenceQueue<ManagedResource> referenceQueue = new ReferenceQueue<>();
|
||||
private final ConcurrentMap<PhantomReference<ManagedResource>, Item> items = new ConcurrentHashMap<>();
|
||||
private final ConcurrentMap<PhantomReference<ManagedResource>, Item> items =
|
||||
new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
* Creates a new instance of Resource Manager.
|
||||
@ -128,8 +129,9 @@ public class ResourceManager {
|
||||
@CompilerDirectives.TruffleBoundary
|
||||
public ManagedResource register(Object object, Object function) {
|
||||
if (isClosed) {
|
||||
throw EnsoContext.get(null).raiseAssertionPanic(null,
|
||||
"Can't register new resources after resource manager is closed.", null);
|
||||
throw EnsoContext.get(null)
|
||||
.raiseAssertionPanic(
|
||||
null, "Can't register new resources after resource manager is closed.", null);
|
||||
}
|
||||
if (workerThread == null || !workerThread.isAlive()) {
|
||||
worker.setKilled(false);
|
||||
@ -227,7 +229,11 @@ public class ResourceManager {
|
||||
* @param reference a phantom reference used for tracking the reachability status of the
|
||||
* resource.
|
||||
*/
|
||||
public Item(ManagedResource referent, Object underlying, Object finalizer, ReferenceQueue<ManagedResource> queue) {
|
||||
public Item(
|
||||
ManagedResource referent,
|
||||
Object underlying,
|
||||
Object finalizer,
|
||||
ReferenceQueue<ManagedResource> queue) {
|
||||
super(referent, queue);
|
||||
this.underlying = underlying;
|
||||
this.finalizer = finalizer;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user