Make EnsoOutputStream fast again! (#10559)

Will fix #10550 by properly using `@TruffleBoundary` annotation
This commit is contained in:
Jaroslav Tulach 2024-07-16 19:30:56 +02:00 committed by GitHub
parent 2442ebc52e
commit a33f632a73
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 43 additions and 7 deletions

View File

@ -103,9 +103,9 @@ public final class EnsoFile implements EnsoObject {
return ArrayLikeHelpers.wrapStrings(MEMBERS);
}
@TruffleBoundary
@ExportMessage
Object invokeMember(
static Object invokeMember(
EnsoOutputStream os,
String name,
Object[] args,
@Cached ArrayLikeLengthNode lengthNode,
@ -130,20 +130,28 @@ public final class EnsoFile implements EnsoObject {
throw ArityException.create(1, 3, args.length);
}
}
var buf = new byte[8192];
var at = 0;
for (long i = from; i < to; i++) {
var elem = atNode.executeAt(args[0], i);
var byt = iop.asInt(elem);
os.write(byt);
buf[at++] = iop.asByte(elem);
if (at == buf.length) {
os.write(buf, 0, buf.length);
at = 0;
}
}
yield this;
if (at > 0) {
os.write(buf, 0, at);
}
yield os;
}
case "flush" -> {
os.flush();
yield this;
yield os;
}
case "close" -> {
os.close();
yield this;
yield os;
}
default -> throw UnknownIdentifierException.create(name);
};
@ -155,6 +163,21 @@ public final class EnsoFile implements EnsoObject {
}
}
@TruffleBoundary
final void write(byte[] buf, int offset, int length) throws IOException {
os.write(buf, offset, length);
}
@TruffleBoundary
final void flush() throws IOException {
os.flush();
}
@TruffleBoundary
final void close() throws IOException {
os.close();
}
@Override
public String toString() {
return "EnsoOutputStream";

View File

@ -6,12 +6,25 @@ import Standard.Examples
options = Bench.options . set_warmup (Bench.phase_conf 2 5) . set_measure (Bench.phase_conf 2 5)
type Lazy_Data
private Write ~table:Table
collect_benches = Bench.build builder->
assert Examples.csv_2500_rows.exists "Expecting the file to exist at "+Examples.csv_2500_rows.path
write_data = Lazy_Data.Write (Examples.csv_2500_rows . read)
builder.group ("Read_csv_file") options group_builder->
group_builder.specify "data_csv" <|
table = Examples.csv_2500_rows . read
assert (table.row_count == 2500) "Expecting two and half thousand rows, but got "+table.row_count.to_text
builder.group ("Write_csv_file") options group_builder->
group_builder.specify "data_csv" <|
file = File.create_temporary_file "data_csv"
Panic.with_finalizer file.delete <|
assert (file.size == 0) "File "+file.to_text+" shall be empty, size: "+file.size.to_text
write_data.table . write file (..Delimited delimiter="," headers=False)
assert (file.size > 111111) "File "+file.to_text+" exists now, size: "+file.size.to_text
main = collect_benches . run_main