Use SimpleFormatter.formatMessage to replace {0} with actual parameters (#5870)

Fixes #5801 to properly format Truffle log records before sending them for further processing.
This commit is contained in:
Jaroslav Tulach 2023-03-11 01:15:58 +01:00 committed by GitHub
parent 7887fb8d40
commit 632a303089
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 317 additions and 149 deletions

View File

@ -871,10 +871,12 @@ lazy val `logging-service` = project
"com.typesafe.scala-logging" %% "scala-logging" % scalaLoggingVersion,
akkaStream,
akkaHttp,
"io.circe" %%% "circe-core" % circeVersion,
"io.circe" %%% "circe-parser" % circeVersion,
"org.scalatest" %% "scalatest" % scalatestVersion % Test,
"org.graalvm.nativeimage" % "svm" % graalVersion % "provided"
"io.circe" %%% "circe-core" % circeVersion,
"io.circe" %%% "circe-parser" % circeVersion,
"junit" % "junit" % junitVersion % Test,
"com.novocode" % "junit-interface" % "0.11" % Test exclude ("junit", "junit-dep"),
"org.scalatest" %% "scalatest" % scalatestVersion % Test,
"org.graalvm.nativeimage" % "svm" % graalVersion % "provided"
)
)
.settings(

View File

@ -13,10 +13,8 @@ import org.enso.interpreter.runtime.EnsoContext;
import org.enso.pkg.QualifiedName;
import org.enso.pkg.SourceFile;
import scala.collection.immutable.Map;
import scala.jdk.CollectionConverters;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.logging.Level;
@ -69,7 +67,7 @@ public final class ImportExportCache extends Cache<ImportExportCache.CachedBindi
try {
return Optional.of(objectMapper.readValue(maybeJsonString, ImportExportCache.Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize library's metadata: " + e.getMessage(), e);
logger.log(logLevel, "Failed to deserialize library's metadata.", e);
return Optional.empty();
}
}

View File

@ -69,7 +69,7 @@ public final class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCac
try {
return Optional.of(objectMapper.readValue(maybeJsonString, Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize module's metadata: " + e.getMessage(), e);
logger.log(logLevel, "Failed to deserialize module's metadata.", e);
return Optional.empty();
}
}

View File

@ -73,7 +73,7 @@ public final class SuggestionsCache
try {
return Optional.of(objectMapper.readValue(maybeJsonString, SuggestionsCache.Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize suggestions' metadata: " + e.getMessage(), e);
logger.log(logLevel, "Failed to deserialize suggestions' metadata.", e);
return Optional.empty();
}
}

View File

@ -107,7 +107,8 @@ class Compiler(
if (!builtins.isIrInitialized) {
logger.log(
Compiler.defaultLogLevel,
s"Initialising IR for [${builtins.getModule.getName}]."
"Initialising IR for [{}].",
builtins.getModule.getName
)
builtins.initializeBuiltinsSource()
@ -169,7 +170,7 @@ class Compiler(
case None =>
logger.log(
Level.SEVERE,
s"No package found in the compiler environment. Aborting."
"No package found in the compiler environment. Aborting."
)
case Some(pkg) =>
val packageModule = packageRepository.getModuleMap.get(
@ -179,8 +180,8 @@ class Compiler(
case None =>
logger.log(
Level.SEVERE,
s"Could not find entry point for compilation in package " +
s"[${pkg.namespace}.${pkg.name}]."
"Could not find entry point for compilation in package [{}]",
s"${pkg.namespace}.${pkg.name}"
)
case Some(m) =>
logger.log(
@ -267,7 +268,8 @@ class Compiler(
) {
logger.log(
Compiler.defaultLogLevel,
s"Some imported modules' caches were invalided, forcing invalidation of ${module.getName.toString}"
"Some imported modules' caches were invalided, forcing invalidation of {}",
module.getName.toString
)
module.getCache.invalidate(context)
parseModule(module)
@ -287,14 +289,15 @@ class Compiler(
if (!flags.contains(false)) {
logger.log(
Compiler.defaultLogLevel,
s"Restored links (late phase) for module [${module.getName}]."
"Restored links (late phase) for module [{}].",
module.getName
)
} else {
hasInvalidModuleRelink = true
logger.log(
Compiler.defaultLogLevel,
s"Failed to restore links (late phase) for module " +
s"[${module.getName}]."
"Failed to restore links (late phase) for module [{}].",
module.getName
)
uncachedParseModule(module, isGenDocs = false)
}
@ -377,7 +380,8 @@ class Compiler(
if (generateCode) {
logger.log(
Compiler.defaultLogLevel,
s"Generating code for module [${module.getName}]."
"Generating code for module [{}].",
module.getName
)
truffleCodegen(module.getIr, module.getSource, module.getScope)
@ -396,7 +400,8 @@ class Compiler(
} else {
logger.log(
Compiler.defaultLogLevel,
s"Skipping serialization for [${module.getName}]."
"Skipping serialization for [{}].",
module.getName
)
}
}
@ -485,7 +490,8 @@ class Compiler(
): Unit = {
logger.log(
Compiler.defaultLogLevel,
s"Parsing the module [${module.getName}]."
"Parsing module [{}].",
module.getName
)
module.ensureScopeExists(context)
module.getScope.reset()
@ -517,7 +523,8 @@ class Compiler(
private def uncachedParseModule(module: Module, isGenDocs: Boolean): Unit = {
logger.log(
Compiler.defaultLogLevel,
s"Loading module `${module.getName}` from source."
"Loading module [{}] from source.",
module.getName
)
module.ensureScopeExists(context)
module.getScope.reset()

View File

@ -105,7 +105,8 @@ final class SerializationManager(
): Future[Boolean] = {
logger.log(
debugLogLevel,
s"Requesting serialization for module [${module.getName}]."
"Requesting serialization for module [{}].",
module.getName
)
val duplicatedIr = compiler.updateMetadata(
module.getIr,
@ -148,7 +149,8 @@ final class SerializationManager(
): Future[Boolean] = {
logger.log(
Level.INFO,
s"Requesting serialization for library [$libraryName]."
"Requesting serialization for library [{}].",
libraryName
)
val task: Callable[Boolean] =
@ -180,7 +182,8 @@ final class SerializationManager(
): Callable[Boolean] = () => {
logger.log(
debugLogLevel,
s"Running serialization for bindings [$libraryName]."
"Running serialization for bindings [{}].",
libraryName
)
startSerializing(libraryName.toQualifiedName)
val bindingsCache = new ImportExportCache.CachedBindings(
@ -285,13 +288,15 @@ final class SerializationManager(
case result @ Some(_: SuggestionsCache.CachedSuggestions) =>
logger.log(
Level.FINE,
s"Restored suggestions for library [$libraryName]."
"Restored suggestions for library [{}].",
libraryName
)
result
case _ =>
logger.log(
Level.FINEST,
s"Unable to load suggestions for library [$libraryName]."
"Unable to load suggestions for library [{}].",
libraryName
)
None
}
@ -312,13 +317,15 @@ final class SerializationManager(
case result @ Some(_: ImportExportCache.CachedBindings) =>
logger.log(
Level.FINE,
s"Restored bindings for library [$libraryName]."
"Restored bindings for library [{}].",
libraryName
)
result
case _ =>
logger.log(
Level.FINEST,
s"Unable to load bindings for library [${libraryName}]."
"Unable to load bindings for library [{}].",
libraryName
)
None
}
@ -359,20 +366,23 @@ final class SerializationManager(
module.setLoadedFromCache(true)
logger.log(
debugLogLevel,
s"Restored IR from cache for module [${module.getName}] at stage [${loadedCache.compilationStage()}]."
"Restored IR from cache for module [{}] at stage [{}].",
Array(module.getName, loadedCache.compilationStage())
)
if (!relinkedIrChecks.contains(false)) {
module.setHasCrossModuleLinks(true)
logger.log(
debugLogLevel,
s"Restored links (early phase) in module [${module.getName}]."
"Restored links (early phase) in module [{}].",
module.getName
)
Some(true)
} else {
logger.log(
debugLogLevel,
s"Could not restore links (early phase) in module [${module.getName}]."
"Could not restore links (early phase) in module [{}].",
module.getName
)
module.setHasCrossModuleLinks(false)
Some(false)
@ -380,7 +390,8 @@ final class SerializationManager(
case None =>
logger.log(
debugLogLevel,
s"Unable to load a cache for module [${module.getName}]."
"Unable to load a cache for module [{}].",
module.getName
)
None
}
@ -476,7 +487,8 @@ final class SerializationManager(
val jobCount = waitingCount + isSerializing.size
logger.log(
debugLogLevel,
s"Waiting for $jobCount serialization jobs to complete."
"Waiting for #{} serialization jobs to complete.",
jobCount
)
// Bound the waiting loop
@ -535,7 +547,8 @@ final class SerializationManager(
): Callable[Boolean] = { () =>
logger.log(
debugLogLevel,
s"Running serialization for module [$name]."
"Running serialization for module [{}].",
name
)
startSerializing(name)
try {

View File

@ -2,7 +2,7 @@ package org.enso.loggingservice
import org.enso.loggingservice.internal.{InternalLogMessage, LoggerConnection}
import java.util.logging.{Handler, Level, LogRecord}
import java.util.logging.{Handler, Level, LogRecord, SimpleFormatter}
/** A [[Handler]] implementation that allows to use the logging service as a
* backend for [[java.util.logging]].
@ -21,7 +21,7 @@ class JavaLoggingLogHandler(
level = level,
timestamp = record.getInstant,
group = record.getLoggerName,
message = record.getMessage,
message = JavaLoggingLogHandler.formatter.formatMessage(record),
exception = Option(record.getThrown)
)
connection.send(message)
@ -36,6 +36,7 @@ class JavaLoggingLogHandler(
}
object JavaLoggingLogHandler {
private val formatter = new SimpleFormatter()
/** Creates a [[Handler]] with the provided mapping from Java's log levels to
* our log levels.

View File

@ -0,0 +1,51 @@
package org.enso.loggingservice;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import org.enso.loggingservice.internal.InternalLogMessage;
import org.enso.loggingservice.internal.LoggerConnection;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import scala.collection.immutable.Map;
public class JavaLoggingLogHandlerTest {
@Test
public void verifyFormatting() {
var c =
new LoggerConnection() {
final List<InternalLogMessage> messages = new ArrayList<>();
@Override
public void send(InternalLogMessage message) {
messages.add(message);
}
@Override
public LogLevel logLevel() {
throw new UnsupportedOperationException();
}
@Override
public Map<String, LogLevel> loggers() {
throw new UnsupportedOperationException();
}
@Override
public boolean isEnabled(String name, LogLevel level) {
return true;
}
};
var h = new JavaLoggingLogHandler((v1) -> LogLevel.Debug$.MODULE$, c);
LogRecord r = new LogRecord(Level.SEVERE, "Init {0} done");
r.setParameters(new Object[] {"js"});
h.publish(r);
assertEquals("One message: " + c.messages, 1, c.messages.size());
assertEquals("Init js done", c.messages.get(0).message());
}
}

View File

@ -6,7 +6,7 @@ import scala.sys.process._
/** A wrapper for executing the command `cargo`. */
object Cargo {
private val cargoCmd = "cargo"
private val cargoCmd = "cargo"
private var wasCargoOk: Boolean = false
/** Executes the command `cargo $args`. */
@ -48,13 +48,13 @@ object Cargo {
}
/** Checks that cargo is installed. Logs an error and returns false if not. */
def cargoOk(log: ManagedLogger): Boolean = if (wasCargoOk) true else {
def cargoOk(log: ManagedLogger): Boolean = if (wasCargoOk) true
else {
try {
s"$cargoCmd version".!!
wasCargoOk = true
true
}
catch {
} catch {
case _: RuntimeException =>
log.error(s"The command `cargo` isn't on path. Did you install cargo?")
false

View File

@ -1,16 +1,16 @@
package sbt.internal.util
import sbt.internal.LogManager
import sbt.internal.util.ConsoleAppender.{Properties, noSuppressedMessage}
import sbt.internal.util.ConsoleAppender.{noSuppressedMessage, Properties}
object CustomLogManager {
def excludeMsg(msgPrefix: String, level: sbt.Level.Value): LogManager = {
sbt.internal.LogManager.
withLoggers((_, _) => new CustomAppender(level, msgPrefix, ConsoleOut.systemOut))
sbt.internal.LogManager.withLoggers((_, _) =>
new CustomAppender(level, msgPrefix, ConsoleOut.systemOut)
)
}
/**
* Returns a custom ConsoleAppender that will skip log messages starting with a certain prefix.
/** Returns a custom ConsoleAppender that will skip log messages starting with a certain prefix.
*
* The only reason for such appender is to force SBT to keep quiet about certain kind of messages
* coming from the analyzing compiler (wrapper around java compiler) when it tries to match class files
@ -22,8 +22,16 @@ object CustomLogManager {
* @param prefix prefix of log message to exclude (together with log level)
* @param out object representing console output
*/
private final class CustomAppender(excludeLevel: sbt.Level.Value, prefix: String, out: ConsoleOut)
extends ConsoleAppender("out", Properties.from(out, Terminal.isAnsiSupported, Terminal.isAnsiSupported), noSuppressedMessage) {
final private class CustomAppender(
excludeLevel: sbt.Level.Value,
prefix: String,
out: ConsoleOut
) extends ConsoleAppender(
"out",
Properties
.from(out, Terminal.isAnsiSupported, Terminal.isAnsiSupported),
noSuppressedMessage
) {
override def appendLog(level: sbt.Level.Value, message: => String): Unit = {
if (excludeLevel != level || !message.startsWith(prefix)) {
super.appendLog(level, message)

View File

@ -172,31 +172,32 @@ object DistributionPackage {
)
indexStdLib(
stdLibVersion = targetStdlibVersion,
ensoVersion = ensoVersion,
stdLibRoot = distributionRoot / "lib",
ensoExecutable =
distributionRoot / "bin" / "enso",
cacheFactory = cacheFactory.sub("stdlib"),
log = log
stdLibVersion = targetStdlibVersion,
ensoVersion = ensoVersion,
stdLibRoot = distributionRoot / "lib",
ensoExecutable = distributionRoot / "bin" / "enso",
cacheFactory = cacheFactory.sub("stdlib"),
log = log
)
}
def indexStdLib(
stdLibVersion: String,
ensoVersion: String,
stdLibRoot: File,
ensoExecutable: File,
cacheFactory: CacheStoreFactory,
log: Logger
): Unit = {
stdLibVersion: String,
ensoVersion: String,
stdLibRoot: File,
ensoExecutable: File,
cacheFactory: CacheStoreFactory,
log: Logger
): Unit = {
for {
libMajor <- stdLibRoot.listFiles()
libName <- (stdLibRoot / libMajor.getName).listFiles()
libName <- (stdLibRoot / libMajor.getName).listFiles()
} yield {
val cache = cacheFactory.make(s"$libName.$ensoVersion")
val path = (libName / ensoVersion)
Tracked.diffInputs(cache, FileInfo.lastModified)(path.globRecursive("*.enso").get().toSet) { diff =>
val path = libName / ensoVersion
Tracked.diffInputs(cache, FileInfo.lastModified)(
path.globRecursive("*.enso").get().toSet
) { diff =>
if (diff.modified.nonEmpty) {
println(s"Generating index for ${libName} ")
val command = Seq(
@ -206,7 +207,11 @@ object DistributionPackage {
path.toString
)
log.debug(command.mkString(" "))
val exitCode = Process(command, None, "JAVA_OPTS"->"-Dorg.jline.terminal.dumb=true").!
val exitCode = Process(
command,
None,
"JAVA_OPTS" -> "-Dorg.jline.terminal.dumb=true"
).!
if (exitCode != 0) {
throw new RuntimeException(s"Cannot compile $libMajor.$libName.")
}
@ -226,13 +231,13 @@ object DistributionPackage {
val enso = distributionRoot / "bin" / "enso"
log.info(s"Executing $enso ${args.mkString(" ")}")
val pb = new java.lang.ProcessBuilder()
val pb = new java.lang.ProcessBuilder()
val all = new java.util.ArrayList[String]()
all.add(enso.getAbsolutePath())
all.addAll(args.asJava)
pb.command(all)
pb.inheritIO()
val p = pb.start()
val p = pb.start()
val exitCode = p.waitFor()
if (exitCode != 0) {
log.warn(enso + " finished with exit code " + exitCode)

View File

@ -28,7 +28,11 @@ object FrgaalJavaCompiler {
val frgaal = "org.frgaal" % "compiler" % "19.0.0" % "provided"
def compilers(classpath: sbt.Keys.Classpath, sbtCompilers: xsbti.compile.Compilers, javaVersion: String) = {
def compilers(
classpath: sbt.Keys.Classpath,
sbtCompilers: xsbti.compile.Compilers,
javaVersion: String
) = {
// Enable Java 11+ features by invoking Frgaal instead of regular javac
val javaHome = Option(System.getProperty("java.home")).map(Paths.get(_))
@ -36,40 +40,51 @@ object FrgaalJavaCompiler {
val frgaalModule = FrgaalJavaCompiler.frgaal
val frgaalCheck = (module: ModuleID) =>
module.organization == frgaalModule.organization &&
module.name == frgaalModule.name &&
module.revision == frgaalModule.revision
module.name == frgaalModule.name &&
module.revision == frgaalModule.revision
val frgaalOnClasspath =
classpath.find(f => f.metadata.get(AttributeKey[ModuleID]("moduleID")).map(frgaalCheck).getOrElse(false))
classpath
.find(f =>
f.metadata
.get(AttributeKey[ModuleID]("moduleID"))
.map(frgaalCheck)
.getOrElse(false)
)
.map(_.data.toPath)
if (frgaalOnClasspath.isEmpty) {
throw new RuntimeException("Failed to resolve Frgaal compiler. Aborting!")
}
val frgaalJavac = new FrgaalJavaCompiler(javaHome, frgaalOnClasspath.get, target = javaVersion)
val javaTools = sbt.internal.inc.javac.JavaTools(frgaalJavac, sbtCompilers.javaTools.javadoc())
val frgaalJavac = new FrgaalJavaCompiler(
javaHome,
frgaalOnClasspath.get,
target = javaVersion
)
val javaTools = sbt.internal.inc.javac
.JavaTools(frgaalJavac, sbtCompilers.javaTools.javadoc())
xsbti.compile.Compilers.of(sbtCompilers.scalac, javaTools)
}
/** Helper method to launch programs. */
def launch(
javaHome: Option[Path],
compilerJar: Path,
sources0: Seq[VirtualFile],
options: Seq[String],
output: Output,
log: Logger,
reporter: Reporter,
source: Option[String],
target: String
): Boolean = {
javaHome: Option[Path],
compilerJar: Path,
sources0: Seq[VirtualFile],
options: Seq[String],
output: Output,
log: Logger,
reporter: Reporter,
source: Option[String],
target: String
): Boolean = {
val (jArgs, nonJArgs) = options.partition(_.startsWith("-J"))
val outputOption = CompilerArguments.outputOption(output)
val sources = sources0 map {
case x: PathBasedFile => x.toPath.toAbsolutePath.toString
val outputOption = CompilerArguments.outputOption(output)
val sources = sources0 map { case x: PathBasedFile =>
x.toPath.toAbsolutePath.toString
}
def asPath(a: Any) : Path = a match {
case p: PathBasedFile => p.toPath
case p: Path => p
def asPath(a: Any): Path = a match {
case p: PathBasedFile => p.toPath
case p: Path => p
}
def asCommon(a: Any, b: Any): Path = {
@ -77,7 +92,11 @@ object FrgaalJavaCompiler {
val bp = asPath(b)
var i = 0
while (i < Math.min(ap.getNameCount(), bp.getNameCount()) && ap.getName(i) == bp.getName(i)) {
while (
i < Math.min(ap.getNameCount(), bp.getNameCount()) && ap.getName(
i
) == bp.getName(i)
) {
i += 1;
}
@ -87,16 +106,16 @@ object FrgaalJavaCompiler {
ap
}
val out = output.getSingleOutputAsPath().get()
val out = output.getSingleOutputAsPath().get()
val shared = sources0.fold(out)(asCommon).asInstanceOf[Path]
// searching for $shared/src/main/java or
// $shared/src/test/java or
// $shared/src/bench/java or etc.
def findUnder(depth : Int, dir : Path): Path = {
def findUnder(depth: Int, dir: Path): Path = {
var d = dir
while (d.getNameCount() > depth) {
val threeUp = d.subpath(0, d.getNameCount() - depth)
val threeUp = d.subpath(0, d.getNameCount() - depth)
val relShare = shared.subpath(0, shared.getNameCount())
if (relShare.equals(threeUp)) {
return d
@ -109,10 +128,14 @@ object FrgaalJavaCompiler {
"\nout: " + out + "\nsources: " + sources
)
}
def checkTarget(x : Any) = {
def checkTarget(x: Any) = {
val p = asPath(x)
val namesCheck = for (i <- 0 until p.getNameCount)
yield "target".equals(p.getName(i).toString()) || p.getName(i).toString().endsWith("-windows") || p.getName(i).toString().endsWith("-unix")
val namesCheck =
for (i <- 0 until p.getNameCount)
yield "target".equals(p.getName(i).toString()) || p
.getName(i)
.toString()
.endsWith("-windows") || p.getName(i).toString().endsWith("-unix")
val inATargetDir = namesCheck.exists(x => x)
inATargetDir
}
@ -121,21 +144,38 @@ object FrgaalJavaCompiler {
val in = if (noTarget.isEmpty) {
None
} else {
Some(findUnder(3, noTarget.tail.fold(asPath(noTarget.head))(asCommon).asInstanceOf[Path]))
Some(
findUnder(
3,
noTarget.tail.fold(asPath(noTarget.head))(asCommon).asInstanceOf[Path]
)
)
}
val generated = if (withTarget.isEmpty) {
None
} else {
Some(findUnder(4, withTarget.tail.fold(asPath(withTarget.head))(asCommon).asInstanceOf[Path]))
Some(
findUnder(
4,
withTarget.tail
.fold(asPath(withTarget.head))(asCommon)
.asInstanceOf[Path]
)
)
}
if (shared.toFile().exists()) {
val ensoMarker = new File(shared.toFile(), ENSO_SOURCES)
val ensoConfig = new File(shared.toFile(), ENSO_SOURCES + "-" + out.getFileName().toString())
val ensoConfig = new File(
shared.toFile(),
ENSO_SOURCES + "-" + out.getFileName().toString()
)
val ensoProperties = new java.util.Properties()
def storeArray(name: String, values : Seq[String]) = {
values.zipWithIndex.foreach { case (value, idx) => ensoProperties.setProperty(s"$name.$idx", value) }
def storeArray(name: String, values: Seq[String]) = {
values.zipWithIndex.foreach { case (value, idx) =>
ensoProperties.setProperty(s"$name.$idx", value)
}
}
if (in.isDefined) {
ensoProperties.setProperty("input", in.get.toString())
@ -147,31 +187,42 @@ object FrgaalJavaCompiler {
storeArray("options", options)
source.foreach(v => ensoProperties.setProperty("source", v))
ensoProperties.setProperty("target", target)
javaHome.foreach(v => ensoProperties.setProperty("java.home", v.toString()))
javaHome.foreach(v =>
ensoProperties.setProperty("java.home", v.toString())
)
Using(new FileWriter(ensoConfig)) { w =>
ensoProperties.store(w, "# Enso compiler configuration")
}
Using(new FileWriter(ensoMarker)) { _ =>
}
Using(new FileWriter(ensoMarker)) { _ => }
} else {
throw new IllegalStateException("Cannot write Enso source options to " + shared + " values:\n" +
"options: " + options + " sources0: " + sources +" output: " + output
throw new IllegalStateException(
"Cannot write Enso source options to " + shared + " values:\n" +
"options: " + options + " sources0: " + sources + " output: " + output
)
}
val frgaalOptions: Seq[String] = source.map(v => Seq("-source", v)).getOrElse(Seq()) ++ Seq("-target", target)
val frgaalOptions: Seq[String] =
source.map(v => Seq("-source", v)).getOrElse(Seq()) ++ Seq(
"-target",
target
)
val allArguments = outputOption ++ frgaalOptions ++ nonJArgs ++ sources
withArgumentFile(allArguments) { argsFile =>
// Need to disable standard compiler tools that come with used jdk and replace them
// with the ones provided with Frgaal.
val forkArgs = (jArgs ++ Seq("--limit-modules", "java.base,jdk.zipfs,jdk.internal.vm.compiler.management", "-jar", compilerJar.toString)) :+
s"@${normalizeSlash(argsFile.getAbsolutePath)}"
val exe = getJavaExecutable(javaHome, "java")
val cwd = new File(new File(".").getAbsolutePath).getCanonicalFile
val forkArgs = (jArgs ++ Seq(
"--limit-modules",
"java.base,jdk.zipfs,jdk.internal.vm.compiler.management",
"-jar",
compilerJar.toString
)) :+
s"@${normalizeSlash(argsFile.getAbsolutePath)}"
val exe = getJavaExecutable(javaHome, "java")
val cwd = new File(new File(".").getAbsolutePath).getCanonicalFile
val javacLogger = new JavacLogger(log, reporter, cwd)
var exitCode = -1
var exitCode = -1
try {
exitCode = Process(exe +: forkArgs, cwd) ! javacLogger
} finally {
@ -182,8 +233,7 @@ object FrgaalJavaCompiler {
}
}
/**
* Helper method to create an argument file that we pass to Javac. Gets over the windows
/** Helper method to create an argument file that we pass to Javac. Gets over the windows
* command line length limitation.
* @param args The string arguments to pass to Javac.
* @param f A function which is passed the arg file.
@ -191,7 +241,7 @@ object FrgaalJavaCompiler {
* @return The result of using the argument file.
*/
def withArgumentFile[T](args: Seq[String])(f: File => T): T = {
import IO.{ Newline, withTemporaryDirectory, write }
import IO.{withTemporaryDirectory, write, Newline}
withTemporaryDirectory { tmp =>
val argFile = new File(tmp, "argfile")
write(argFile, args.map(escapeSpaces).mkString(Newline))
@ -200,7 +250,7 @@ object FrgaalJavaCompiler {
}
// javac's argument file seems to allow naive space escaping with quotes. escaping a quote with a backslash does not work
private def escapeSpaces(s: String): String = '\"' + normalizeSlash(s) + '\"'
private def normalizeSlash(s: String) = s.replace(File.separatorChar, '/')
private def normalizeSlash(s: String) = s.replace(File.separatorChar, '/')
/** create the executable name for java */
def getJavaExecutable(javaHome: Option[Path], name: String): String =
@ -212,14 +262,29 @@ object FrgaalJavaCompiler {
}
/** An implementation of compiling java which forks Frgaal instance. */
final class FrgaalJavaCompiler(javaHome: Option[Path], compilerPath: Path, target: String, source: Option[String] = None) extends XJavaCompiler {
final class FrgaalJavaCompiler(
javaHome: Option[Path],
compilerPath: Path,
target: String,
source: Option[String] = None
) extends XJavaCompiler {
def run(
sources: Array[VirtualFile],
options: Array[String],
output: Output,
incToolOptions: IncToolOptions,
reporter: Reporter,
log: XLogger
): Boolean =
FrgaalJavaCompiler.launch(javaHome, compilerPath, sources, options, output, log, reporter, source, target)
sources: Array[VirtualFile],
options: Array[String],
output: Output,
incToolOptions: IncToolOptions,
reporter: Reporter,
log: XLogger
): Boolean =
FrgaalJavaCompiler.launch(
javaHome,
compilerPath,
sources,
options,
output,
log,
reporter,
source,
target
)
}

View File

@ -43,7 +43,10 @@ object GenerateFlatbuffers {
val allGeneratedSourcesExist = generatedSources.forall(_.exists())
if (schemasDiff.modified.nonEmpty || !allGeneratedSourcesExist) {
schemas foreach { schema =>
val cmdGenerate = Process(flatcCmd, List("--java", "-o", out.getAbsolutePath, schema.toString))
val cmdGenerate = Process(
flatcCmd,
List("--java", "-o", out.getAbsolutePath, schema.toString)
)
// val cmdGenerate =
// s"$flatcCmd --java -o ${out.getAbsolutePath} $schema"
cmdGenerate.!! // Note [flatc Error Reporting]
@ -160,7 +163,16 @@ object GenerateFlatbuffers {
): Set[File] = {
val affectedSources =
schemas.flatMap { schema =>
val cmdMakeRules = Process(flatcCmd, List("-M", "--java", "-o", out.getAbsolutePath, schema.getAbsolutePath))
val cmdMakeRules = Process(
flatcCmd,
List(
"-M",
"--java",
"-o",
out.getAbsolutePath,
schema.getAbsolutePath
)
)
// val cmdMakeRules =
// s"$flatcCmd -M --java -o ${out.getAbsolutePath} ${schema.getAbsolutePath}"
val makeRules =

View File

@ -11,7 +11,7 @@ object LauncherShimsForTest {
val log = state.value.log
Cargo.run(
Seq("build", "-p", "launcher-shims"),
log = log
log = log
)
}
}

View File

@ -128,7 +128,7 @@ object NativeImage {
val cpf = new File(cp.get).getAbsoluteFile()
if (!cpf.exists()) throw new IllegalStateException("Cannot find " + cpf)
val joinCp = pathToJAR.toString + File.pathSeparator + cpf
System.out.println("Class-path: "+ joinCp);
System.out.println("Class-path: " + joinCp);
cmd = cmd ++
Seq("-cp", joinCp) ++

View File

@ -30,7 +30,8 @@ object StdBits {
val baseFilter: NameFilter = new ExactFilter(Configurations.Runtime.name)
val validConfig =
if (ignoreScalaLibrary) baseFilter - new ExactFilter(Configurations.ScalaTool.name)
if (ignoreScalaLibrary)
baseFilter - new ExactFilter(Configurations.ScalaTool.name)
else baseFilter
val configFilter: ConfigurationFilter =
DependencyFilter.configurationFilter(name = validConfig)
@ -49,9 +50,7 @@ object StdBits {
Tracked.diffInputs(dependencyStore, FileInfo.hash)(relevantFiles.toSet) {
report =>
val expectedFileNames =
report.checked.map(
file => file.getName
) ++ baseJarName.toSeq
report.checked.map(file => file.getName) ++ baseJarName.toSeq
for (existing <- IO.listFiles(destination)) {
if (!expectedFileNames.contains(existing.getName)) {
log.info(
@ -85,16 +84,15 @@ object StdBits {
IO.copyFile(jar, destination)
}
/**
* Builds a single standard library package `name`. Should only be used
* in tasks used in local development.
*
* @param name name of the package, see `stdBitsProjects` in build.sbt
* @param root top directory where distribution is being built
* @param cache used for persisting the cached information
* @param log logger used in the task
* @param defaultDevEnsoVersion default `dev` version
*/
/** Builds a single standard library package `name`. Should only be used
* in tasks used in local development.
*
* @param name name of the package, see `stdBitsProjects` in build.sbt
* @param root top directory where distribution is being built
* @param cache used for persisting the cached information
* @param log logger used in the task
* @param defaultDevEnsoVersion default `dev` version
*/
def buildStdLibPackage(
name: String,
root: File,
@ -103,16 +101,18 @@ object StdBits {
defaultDevEnsoVersion: String
) = {
log.info(s"Building standard library package for '$name'")
val prefix = "Standard"
val prefix = "Standard"
val targetPkgRoot = root / "lib" / prefix / name / defaultDevEnsoVersion
val sourceDir = file(s"distribution/lib/$prefix/$name/$defaultDevEnsoVersion")
val sourceDir = file(
s"distribution/lib/$prefix/$name/$defaultDevEnsoVersion"
)
if (!sourceDir.exists) {
throw new RuntimeException("Invalid standard library package " + name)
}
val result = DistributionPackage.copyDirectoryIncremental(
source = file(s"distribution/lib/$prefix/$name/$defaultDevEnsoVersion"),
destination = targetPkgRoot,
cache = cacheFactory.sub("engine-libraries").make(s"$prefix.$name"),
cache = cacheFactory.sub("engine-libraries").make(s"$prefix.$name")
)
if (result) {
log.info(s"Package '$name' has been updated")

View File

@ -7,5 +7,5 @@ addSbtPlugin("com.lightbend.sbt" % "sbt-java-formatter" % "0.7.0")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.8.3")
libraryDependencies += "io.circe" %% "circe-yaml" % "0.14.1"
libraryDependencies += "io.circe" %% "circe-yaml" % "0.14.1"
libraryDependencies += "commons-io" % "commons-io" % "2.11.0"

View File

@ -4,7 +4,11 @@ import java.security.MessageDigest
import sbt.{File, IO, Logger}
import src.main.scala.licenses.frontend.DependencyFilter
import src.main.scala.licenses.{DistributionDescription, FilesHelper, PortablePath}
import src.main.scala.licenses.{
DistributionDescription,
FilesHelper,
PortablePath
}
import scala.util.control.NonFatal
@ -84,7 +88,9 @@ object ReportState {
digest.update(sbtComponent.name.getBytes)
val dependencies =
sbtComponent.licenseReport.licenses.sortBy(_.module.toString)
for (dep <- dependencies.filter(d => DependencyFilter.shouldKeep(d.module))) {
for (
dep <- dependencies.filter(d => DependencyFilter.shouldKeep(d.module))
) {
digest.update(dep.module.toString.getBytes)
digest.update(dep.license.name.getBytes)
}