Replace Jackson library by simple data I/O streams (#8693)

This commit is contained in:
Jaroslav Tulach 2024-01-08 13:17:37 +01:00 committed by GitHub
parent b30f558585
commit d86c6c472c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 91 additions and 83 deletions

View File

@ -1557,7 +1557,7 @@ lazy val runtime = (project in file("engine/runtime"))
), // show timings for individual tests
scalacOptions += "-Ymacro-annotations",
scalacOptions ++= Seq("-Ypatmat-exhaust-depth", "off"),
libraryDependencies ++= jmh ++ jaxb ++ circe ++ GraalVM.langsPkgs ++ Seq(
libraryDependencies ++= jmh ++ jaxb ++ GraalVM.langsPkgs ++ Seq(
"org.apache.commons" % "commons-lang3" % commonsLangVersion,
"org.apache.tika" % "tika-core" % tikaVersion,
"org.graalvm.polyglot" % "polyglot" % graalMavenPackagesVersion % "provided",
@ -1571,7 +1571,6 @@ lazy val runtime = (project in file("engine/runtime"))
"org.scalactic" %% "scalactic" % scalacticVersion % Test,
"org.scalatest" %% "scalatest" % scalatestVersion % Test,
"org.graalvm.truffle" % "truffle-api" % graalMavenPackagesVersion % Benchmark,
"org.typelevel" %% "cats-core" % catsVersion,
"junit" % "junit" % junitVersion % Test,
"com.github.sbt" % "junit-interface" % junitIfVersion % Test,
"org.hamcrest" % "hamcrest-all" % hamcrestVersion % Test,

View File

@ -4,8 +4,6 @@ import com.oracle.truffle.api.TruffleFile;
import com.oracle.truffle.api.TruffleLogger;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
@ -175,8 +173,10 @@ public abstract class Cache<T, M extends Cache.Metadata> {
* @param blobDigest digest of serialized data
* @param entry data to serialize
* @return raw bytes representing serialized metadata
* @throws java.io.IOException in case of I/O error
*/
protected abstract byte[] metadata(String sourceDigest, String blobDigest, T entry);
protected abstract byte[] metadata(String sourceDigest, String blobDigest, T entry)
throws IOException;
/**
* Loads cache for this data, if possible.
@ -333,9 +333,12 @@ public abstract class Cache<T, M extends Cache.Metadata> {
* De-serializes raw bytes to data's metadata.
*
* @param bytes raw bytes representing metadata
* @param logger logger to use
* @return non-empty metadata, if de-serialization was successful
* @throws IOException in case of I/O error
*/
protected abstract Optional<M> metadataFromBytes(byte[] bytes, TruffleLogger logger);
protected abstract Optional<M> metadataFromBytes(byte[] bytes, TruffleLogger logger)
throws IOException;
/**
* Compute digest of cache's data
@ -509,8 +512,6 @@ public abstract class Cache<T, M extends Cache.Metadata> {
}
}
protected static final Charset metadataCharset = StandardCharsets.UTF_8;
/**
* Roots encapsulates two possible locations where caches can be stored.
*

View File

@ -1,11 +1,12 @@
package org.enso.interpreter.caches;
import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleFile;
import com.oracle.truffle.api.TruffleLogger;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
@ -40,14 +41,9 @@ public final class ImportExportCache
}
@Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedBindings entry) {
try {
return objectMapper
.writeValueAsString(new Metadata(sourceDigest, blobDigest))
.getBytes(metadataCharset);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
protected byte[] metadata(String sourceDigest, String blobDigest, CachedBindings entry)
throws IOException {
return new Metadata(sourceDigest, blobDigest).toBytes();
}
@Override
@ -60,15 +56,9 @@ public final class ImportExportCache
}
@Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) {
var maybeJsonString = new String(bytes, Cache.metadataCharset);
var mapper = new ObjectMapper();
try {
return Optional.of(objectMapper.readValue(maybeJsonString, ImportExportCache.Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize library's metadata.", e);
return Optional.empty();
}
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger)
throws IOException {
return Optional.of(Metadata.read(bytes));
}
@Override
@ -172,16 +162,28 @@ public final class ImportExportCache
MapToBindings bindings,
Optional<List<SourceFile<TruffleFile>>> sources) {}
public record Metadata(
@JsonProperty("source_hash") String sourceHash, @JsonProperty("blob_hash") String blobHash)
implements Cache.Metadata {}
public record Metadata(String sourceHash, String blobHash) implements Cache.Metadata {
byte[] toBytes() throws IOException {
try (var os = new ByteArrayOutputStream();
var dos = new DataOutputStream(os)) {
dos.writeUTF(sourceHash());
dos.writeUTF(blobHash());
return os.toByteArray();
}
}
static Metadata read(byte[] arr) throws IOException {
try (var is = new ByteArrayInputStream(arr);
var dis = new DataInputStream(is)) {
return new Metadata(dis.readUTF(), dis.readUTF());
}
}
}
private static final String bindingsCacheDataExtension = ".bindings";
private static final String bindingsCacheMetadataExtension = ".bindings.meta";
private static final ObjectMapper objectMapper = new ObjectMapper();
@Persistable(clazz = BindingsMap.PolyglotSymbol.class, id = 33006)
@Persistable(
clazz = org.enso.compiler.data.BindingsMap$ModuleReference$Abstract.class,

View File

@ -1,12 +1,14 @@
package org.enso.interpreter.caches;
import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleLogger;
import com.oracle.truffle.api.source.Source;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Optional;
@ -33,13 +35,9 @@ public final class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCac
}
@Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedModule entry) {
try {
return objectMapper.writeValueAsBytes(
new Metadata(sourceDigest, blobDigest, entry.compilationStage().toString()));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
protected byte[] metadata(String sourceDigest, String blobDigest, CachedModule entry)
throws IOException {
return new Metadata(sourceDigest, blobDigest, entry.compilationStage().toString()).toBytes();
}
@Override
@ -69,14 +67,9 @@ public final class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCac
}
@Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) {
var maybeJsonString = new String(bytes, Cache.metadataCharset);
try {
return Optional.of(objectMapper.readValue(maybeJsonString, Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize module's metadata.", e);
return Optional.empty();
}
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger)
throws IOException {
return Optional.of(Metadata.read(bytes));
}
private Optional<String> computeDigestOfModuleSources(Source source) {
@ -85,7 +78,7 @@ public final class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCac
if (source.hasBytes()) {
sourceBytes = source.getBytes().toByteArray();
} else {
sourceBytes = source.getCharacters().toString().getBytes(metadataCharset);
sourceBytes = source.getCharacters().toString().getBytes(StandardCharsets.UTF_8);
}
return Optional.of(computeDigestFromBytes(sourceBytes));
} else {
@ -171,18 +164,30 @@ public final class ModuleCache extends Cache<ModuleCache.CachedModule, ModuleCac
public record CachedModule(Module moduleIR, CompilationStage compilationStage, Source source) {}
public record Metadata(
@JsonProperty("source_hash") String sourceHash,
@JsonProperty("blob_hash") String blobHash,
@JsonProperty("compilation_stage") String compilationStage)
implements Cache.Metadata {}
public record Metadata(String sourceHash, String blobHash, String compilationStage)
implements Cache.Metadata {
byte[] toBytes() throws IOException {
try (var os = new ByteArrayOutputStream();
var dos = new DataOutputStream(os)) {
dos.writeUTF(sourceHash());
dos.writeUTF(blobHash());
dos.writeUTF(compilationStage());
return os.toByteArray();
}
}
static Metadata read(byte[] arr) throws IOException {
try (var is = new ByteArrayInputStream(arr);
var dis = new DataInputStream(is)) {
return new Metadata(dis.readUTF(), dis.readUTF(), dis.readUTF());
}
}
}
private static final String irCacheDataExtension = ".ir";
private static final String irCacheMetadataExtension = ".meta";
private static final ObjectMapper objectMapper = new ObjectMapper();
@SuppressWarnings("unchecked")
private static <T extends Exception> T raise(Class<T> cls, Exception e) throws T {
throw (T) e;

View File

@ -1,13 +1,12 @@
package org.enso.interpreter.caches;
import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleFile;
import com.oracle.truffle.api.TruffleLogger;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
@ -27,8 +26,6 @@ public final class SuggestionsCache
private static final String SUGGESTIONS_CACHE_DATA_EXTENSION = ".suggestions";
private static final String SUGGESTIONS_CACHE_METADATA_EXTENSION = ".suggestions.meta";
private static final ObjectMapper objectMapper = new ObjectMapper();
final LibraryName libraryName;
public SuggestionsCache(LibraryName libraryName) {
@ -40,14 +37,9 @@ public final class SuggestionsCache
}
@Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedSuggestions entry) {
try {
return objectMapper
.writeValueAsString(new Metadata(sourceDigest, blobDigest))
.getBytes(metadataCharset);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
protected byte[] metadata(String sourceDigest, String blobDigest, CachedSuggestions entry)
throws IOException {
return new Metadata(sourceDigest, blobDigest).toBytes();
}
@Override
@ -65,14 +57,9 @@ public final class SuggestionsCache
}
@Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) {
var maybeJsonString = new String(bytes, Cache.metadataCharset);
try {
return Optional.of(objectMapper.readValue(maybeJsonString, SuggestionsCache.Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize suggestions' metadata.", e);
return Optional.empty();
}
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger)
throws IOException {
return Optional.of(Metadata.read(bytes));
}
@Override
@ -173,7 +160,21 @@ public final class SuggestionsCache
}
}
record Metadata(
@JsonProperty("source_hash") String sourceHash, @JsonProperty("blob_hash") String blobHash)
implements Cache.Metadata {}
record Metadata(String sourceHash, String blobHash) implements Cache.Metadata {
byte[] toBytes() throws IOException {
try (var os = new ByteArrayOutputStream();
var dos = new DataOutputStream(os)) {
dos.writeUTF(sourceHash());
dos.writeUTF(blobHash());
return os.toByteArray();
}
}
static Metadata read(byte[] arr) throws IOException {
try (var is = new ByteArrayInputStream(arr);
var dis = new DataInputStream(is)) {
return new Metadata(dis.readUTF(), dis.readUTF());
}
}
}
}