Replace Jackson library by simple data I/O streams (#8693)

This commit is contained in:
Jaroslav Tulach 2024-01-08 13:17:37 +01:00 committed by GitHub
parent b30f558585
commit d86c6c472c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 91 additions and 83 deletions

View File

@ -1557,7 +1557,7 @@ lazy val runtime = (project in file("engine/runtime"))
), // show timings for individual tests ), // show timings for individual tests
scalacOptions += "-Ymacro-annotations", scalacOptions += "-Ymacro-annotations",
scalacOptions ++= Seq("-Ypatmat-exhaust-depth", "off"), scalacOptions ++= Seq("-Ypatmat-exhaust-depth", "off"),
libraryDependencies ++= jmh ++ jaxb ++ circe ++ GraalVM.langsPkgs ++ Seq( libraryDependencies ++= jmh ++ jaxb ++ GraalVM.langsPkgs ++ Seq(
"org.apache.commons" % "commons-lang3" % commonsLangVersion, "org.apache.commons" % "commons-lang3" % commonsLangVersion,
"org.apache.tika" % "tika-core" % tikaVersion, "org.apache.tika" % "tika-core" % tikaVersion,
"org.graalvm.polyglot" % "polyglot" % graalMavenPackagesVersion % "provided", "org.graalvm.polyglot" % "polyglot" % graalMavenPackagesVersion % "provided",
@ -1571,7 +1571,6 @@ lazy val runtime = (project in file("engine/runtime"))
"org.scalactic" %% "scalactic" % scalacticVersion % Test, "org.scalactic" %% "scalactic" % scalacticVersion % Test,
"org.scalatest" %% "scalatest" % scalatestVersion % Test, "org.scalatest" %% "scalatest" % scalatestVersion % Test,
"org.graalvm.truffle" % "truffle-api" % graalMavenPackagesVersion % Benchmark, "org.graalvm.truffle" % "truffle-api" % graalMavenPackagesVersion % Benchmark,
"org.typelevel" %% "cats-core" % catsVersion,
"junit" % "junit" % junitVersion % Test, "junit" % "junit" % junitVersion % Test,
"com.github.sbt" % "junit-interface" % junitIfVersion % Test, "com.github.sbt" % "junit-interface" % junitIfVersion % Test,
"org.hamcrest" % "hamcrest-all" % hamcrestVersion % Test, "org.hamcrest" % "hamcrest-all" % hamcrestVersion % Test,

View File

@ -4,8 +4,6 @@ import com.oracle.truffle.api.TruffleFile;
import com.oracle.truffle.api.TruffleLogger; import com.oracle.truffle.api.TruffleLogger;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.NoSuchFileException; import java.nio.file.NoSuchFileException;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.StandardOpenOption; import java.nio.file.StandardOpenOption;
@ -175,8 +173,10 @@ public abstract class Cache<T, M extends Cache.Metadata> {
* @param blobDigest digest of serialized data * @param blobDigest digest of serialized data
* @param entry data to serialize * @param entry data to serialize
* @return raw bytes representing serialized metadata * @return raw bytes representing serialized metadata
* @throws java.io.IOException in case of I/O error
*/ */
protected abstract byte[] metadata(String sourceDigest, String blobDigest, T entry); protected abstract byte[] metadata(String sourceDigest, String blobDigest, T entry)
throws IOException;
/** /**
* Loads cache for this data, if possible. * Loads cache for this data, if possible.
@ -333,9 +333,12 @@ public abstract class Cache<T, M extends Cache.Metadata> {
* De-serializes raw bytes to data's metadata. * De-serializes raw bytes to data's metadata.
* *
* @param bytes raw bytes representing metadata * @param bytes raw bytes representing metadata
* @param logger logger to use
* @return non-empty metadata, if de-serialization was successful * @return non-empty metadata, if de-serialization was successful
* @throws IOException in case of I/O error
*/ */
protected abstract Optional<M> metadataFromBytes(byte[] bytes, TruffleLogger logger); protected abstract Optional<M> metadataFromBytes(byte[] bytes, TruffleLogger logger)
throws IOException;
/** /**
* Compute digest of cache's data * Compute digest of cache's data
@ -509,8 +512,6 @@ public abstract class Cache<T, M extends Cache.Metadata> {
} }
} }
protected static final Charset metadataCharset = StandardCharsets.UTF_8;
/** /**
* Roots encapsulates two possible locations where caches can be stored. * Roots encapsulates two possible locations where caches can be stored.
* *

View File

@ -1,11 +1,12 @@
package org.enso.interpreter.caches; package org.enso.interpreter.caches;
import buildinfo.Info; import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleFile; import com.oracle.truffle.api.TruffleFile;
import com.oracle.truffle.api.TruffleLogger; import com.oracle.truffle.api.TruffleLogger;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
@ -40,14 +41,9 @@ public final class ImportExportCache
} }
@Override @Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedBindings entry) { protected byte[] metadata(String sourceDigest, String blobDigest, CachedBindings entry)
try { throws IOException {
return objectMapper return new Metadata(sourceDigest, blobDigest).toBytes();
.writeValueAsString(new Metadata(sourceDigest, blobDigest))
.getBytes(metadataCharset);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
} }
@Override @Override
@ -60,15 +56,9 @@ public final class ImportExportCache
} }
@Override @Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) { protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger)
var maybeJsonString = new String(bytes, Cache.metadataCharset); throws IOException {
var mapper = new ObjectMapper(); return Optional.of(Metadata.read(bytes));
try {
return Optional.of(objectMapper.readValue(maybeJsonString, ImportExportCache.Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize library's metadata.", e);
return Optional.empty();
}
} }
@Override @Override
@ -172,16 +162,28 @@ public final class ImportExportCache
MapToBindings bindings, MapToBindings bindings,
Optional<List<SourceFile<TruffleFile>>> sources) {} Optional<List<SourceFile<TruffleFile>>> sources) {}
public record Metadata( public record Metadata(String sourceHash, String blobHash) implements Cache.Metadata {
@JsonProperty("source_hash") String sourceHash, @JsonProperty("blob_hash") String blobHash) byte[] toBytes() throws IOException {
implements Cache.Metadata {} try (var os = new ByteArrayOutputStream();
var dos = new DataOutputStream(os)) {
dos.writeUTF(sourceHash());
dos.writeUTF(blobHash());
return os.toByteArray();
}
}
static Metadata read(byte[] arr) throws IOException {
try (var is = new ByteArrayInputStream(arr);
var dis = new DataInputStream(is)) {
return new Metadata(dis.readUTF(), dis.readUTF());
}
}
}
private static final String bindingsCacheDataExtension = ".bindings"; private static final String bindingsCacheDataExtension = ".bindings";
private static final String bindingsCacheMetadataExtension = ".bindings.meta"; private static final String bindingsCacheMetadataExtension = ".bindings.meta";
private static final ObjectMapper objectMapper = new ObjectMapper();
@Persistable(clazz = BindingsMap.PolyglotSymbol.class, id = 33006) @Persistable(clazz = BindingsMap.PolyglotSymbol.class, id = 33006)
@Persistable( @Persistable(
clazz = org.enso.compiler.data.BindingsMap$ModuleReference$Abstract.class, clazz = org.enso.compiler.data.BindingsMap$ModuleReference$Abstract.class,

View File

@ -1,12 +1,14 @@
package org.enso.interpreter.caches; package org.enso.interpreter.caches;
import buildinfo.Info; import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleLogger; import com.oracle.truffle.api.TruffleLogger;
import com.oracle.truffle.api.source.Source; import com.oracle.truffle.api.source.Source;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Optional; import java.util.Optional;
@ -33,13 +35,9 @@ public final class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCac
} }
@Override @Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedModule entry) { protected byte[] metadata(String sourceDigest, String blobDigest, CachedModule entry)
try { throws IOException {
return objectMapper.writeValueAsBytes( return new Metadata(sourceDigest, blobDigest, entry.compilationStage().toString()).toBytes();
new Metadata(sourceDigest, blobDigest, entry.compilationStage().toString()));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
} }
@Override @Override
@ -69,14 +67,9 @@ public final class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCac
} }
@Override @Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) { protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger)
var maybeJsonString = new String(bytes, Cache.metadataCharset); throws IOException {
try { return Optional.of(Metadata.read(bytes));
return Optional.of(objectMapper.readValue(maybeJsonString, Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize module's metadata.", e);
return Optional.empty();
}
} }
private Optional<String> computeDigestOfModuleSources(Source source) { private Optional<String> computeDigestOfModuleSources(Source source) {
@ -85,7 +78,7 @@ public final class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCac
if (source.hasBytes()) { if (source.hasBytes()) {
sourceBytes = source.getBytes().toByteArray(); sourceBytes = source.getBytes().toByteArray();
} else { } else {
sourceBytes = source.getCharacters().toString().getBytes(metadataCharset); sourceBytes = source.getCharacters().toString().getBytes(StandardCharsets.UTF_8);
} }
return Optional.of(computeDigestFromBytes(sourceBytes)); return Optional.of(computeDigestFromBytes(sourceBytes));
} else { } else {
@ -171,18 +164,30 @@ public final class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCac
public record CachedModule(Module moduleIR, CompilationStage compilationStage, Source source) {} public record CachedModule(Module moduleIR, CompilationStage compilationStage, Source source) {}
public record Metadata( public record Metadata(String sourceHash, String blobHash, String compilationStage)
@JsonProperty("source_hash") String sourceHash, implements Cache.Metadata {
@JsonProperty("blob_hash") String blobHash, byte[] toBytes() throws IOException {
@JsonProperty("compilation_stage") String compilationStage) try (var os = new ByteArrayOutputStream();
implements Cache.Metadata {} var dos = new DataOutputStream(os)) {
dos.writeUTF(sourceHash());
dos.writeUTF(blobHash());
dos.writeUTF(compilationStage());
return os.toByteArray();
}
}
static Metadata read(byte[] arr) throws IOException {
try (var is = new ByteArrayInputStream(arr);
var dis = new DataInputStream(is)) {
return new Metadata(dis.readUTF(), dis.readUTF(), dis.readUTF());
}
}
}
private static final String irCacheDataExtension = ".ir"; private static final String irCacheDataExtension = ".ir";
private static final String irCacheMetadataExtension = ".meta"; private static final String irCacheMetadataExtension = ".meta";
private static final ObjectMapper objectMapper = new ObjectMapper();
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static <T extends Exception> T raise(Class<T> cls, Exception e) throws T { private static <T extends Exception> T raise(Class<T> cls, Exception e) throws T {
throw (T) e; throw (T) e;

View File

@ -1,13 +1,12 @@
package org.enso.interpreter.caches; package org.enso.interpreter.caches;
import buildinfo.Info; import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleFile; import com.oracle.truffle.api.TruffleFile;
import com.oracle.truffle.api.TruffleLogger; import com.oracle.truffle.api.TruffleLogger;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.ObjectInputStream; import java.io.ObjectInputStream;
import java.io.ObjectOutputStream; import java.io.ObjectOutputStream;
@ -27,8 +26,6 @@ public final class SuggestionsCache
private static final String SUGGESTIONS_CACHE_DATA_EXTENSION = ".suggestions"; private static final String SUGGESTIONS_CACHE_DATA_EXTENSION = ".suggestions";
private static final String SUGGESTIONS_CACHE_METADATA_EXTENSION = ".suggestions.meta"; private static final String SUGGESTIONS_CACHE_METADATA_EXTENSION = ".suggestions.meta";
private static final ObjectMapper objectMapper = new ObjectMapper();
final LibraryName libraryName; final LibraryName libraryName;
public SuggestionsCache(LibraryName libraryName) { public SuggestionsCache(LibraryName libraryName) {
@ -40,14 +37,9 @@ public final class SuggestionsCache
} }
@Override @Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedSuggestions entry) { protected byte[] metadata(String sourceDigest, String blobDigest, CachedSuggestions entry)
try { throws IOException {
return objectMapper return new Metadata(sourceDigest, blobDigest).toBytes();
.writeValueAsString(new Metadata(sourceDigest, blobDigest))
.getBytes(metadataCharset);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
} }
@Override @Override
@ -65,14 +57,9 @@ public final class SuggestionsCache
} }
@Override @Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) { protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger)
var maybeJsonString = new String(bytes, Cache.metadataCharset); throws IOException {
try { return Optional.of(Metadata.read(bytes));
return Optional.of(objectMapper.readValue(maybeJsonString, SuggestionsCache.Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize suggestions' metadata.", e);
return Optional.empty();
}
} }
@Override @Override
@ -173,7 +160,21 @@ public final class SuggestionsCache
} }
} }
record Metadata( record Metadata(String sourceHash, String blobHash) implements Cache.Metadata {
@JsonProperty("source_hash") String sourceHash, @JsonProperty("blob_hash") String blobHash) byte[] toBytes() throws IOException {
implements Cache.Metadata {} try (var os = new ByteArrayOutputStream();
var dos = new DataOutputStream(os)) {
dos.writeUTF(sourceHash());
dos.writeUTF(blobHash());
return os.toByteArray();
}
}
static Metadata read(byte[] arr) throws IOException {
try (var is = new ByteArrayInputStream(arr);
var dis = new DataInputStream(is)) {
return new Metadata(dis.readUTF(), dis.readUTF());
}
}
}
} }