mirror of
https://github.com/enso-org/enso.git
synced 2024-11-23 16:18:23 +03:00
400x faster with linear hashing of the hash map entries (#8425)
Fixes #5233 by removing `EconomicMap` & co. and using plain old good _linear hashing_. Fixes #8090 by introducing `StorageEntry.removed()` rather than copying the builder on each removal.
This commit is contained in:
parent
27072622f7
commit
81f06456bf
@ -55,6 +55,8 @@ import org.enso.interpreter.runtime.scope.ModuleScope;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
import org.enso.polyglot.common_utils.Core_Text_Utils;
|
||||
|
||||
import com.oracle.truffle.api.dsl.Fallback;
|
||||
|
||||
/**
|
||||
* Implements {@code hash_code} functionality.
|
||||
*
|
||||
@ -630,6 +632,11 @@ public abstract class HashCodeNode extends Node {
|
||||
return hashCodeNode.execute(interop.toDisplayString(hostFunction));
|
||||
}
|
||||
|
||||
@Fallback
|
||||
long fallbackConstant(Object any) {
|
||||
return 5343210;
|
||||
}
|
||||
|
||||
static boolean isAtom(Object object) {
|
||||
return object instanceof Atom;
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package org.enso.interpreter.runtime.data.hash;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.UnknownKeyException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
@ -16,7 +17,9 @@ import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.data.EnsoObject;
|
||||
import org.enso.interpreter.runtime.data.Type;
|
||||
import org.enso.interpreter.runtime.data.hash.EnsoHashMapBuilder.StorageEntry;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.data.vector.ArrayLikeHelpers;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
import org.enso.interpreter.runtime.library.dispatch.TypesLibrary;
|
||||
|
||||
/**
|
||||
@ -34,36 +37,32 @@ import org.enso.interpreter.runtime.library.dispatch.TypesLibrary;
|
||||
@Builtin(stdlibName = "Standard.Base.Data.Map.Map", name = "Map")
|
||||
public final class EnsoHashMap implements EnsoObject {
|
||||
private final EnsoHashMapBuilder mapBuilder;
|
||||
/**
|
||||
* Size of this Map. Basically an index into {@link EnsoHashMapBuilder}'s storage. See {@link
|
||||
* #isEntryInThisMap(StorageEntry)}.
|
||||
*/
|
||||
private final int snapshotSize;
|
||||
/**
|
||||
* True iff {@code insert} method was already called. If insert was already called, and we are
|
||||
* calling {@code insert} again, the {@link #mapBuilder} should be duplicated for the newly
|
||||
* created Map.
|
||||
*/
|
||||
private boolean insertCalled;
|
||||
private final int generation;
|
||||
private final int size;
|
||||
|
||||
private Object cachedVectorRepresentation;
|
||||
|
||||
private EnsoHashMap(EnsoHashMapBuilder mapBuilder, int snapshotSize) {
|
||||
private EnsoHashMap(EnsoHashMapBuilder mapBuilder) {
|
||||
this.mapBuilder = mapBuilder;
|
||||
this.snapshotSize = snapshotSize;
|
||||
assert snapshotSize <= mapBuilder.getSize();
|
||||
this.generation = mapBuilder.generation();
|
||||
this.size = mapBuilder.size();
|
||||
}
|
||||
|
||||
static EnsoHashMap createWithBuilder(EnsoHashMapBuilder mapBuilder, int snapshotSize) {
|
||||
return new EnsoHashMap(mapBuilder, snapshotSize);
|
||||
static EnsoHashMap createWithBuilder(EnsoHashMapBuilder mapBuilder) {
|
||||
return new EnsoHashMap(mapBuilder);
|
||||
}
|
||||
|
||||
static EnsoHashMap createEmpty(HashCodeNode hashCodeNode, EqualsNode equalsNode) {
|
||||
return new EnsoHashMap(EnsoHashMapBuilder.create(hashCodeNode, equalsNode), 0);
|
||||
static EnsoHashMap createEmpty() {
|
||||
return new EnsoHashMap(EnsoHashMapBuilder.create());
|
||||
}
|
||||
|
||||
EnsoHashMapBuilder getMapBuilder() {
|
||||
EnsoHashMapBuilder getMapBuilder(
|
||||
boolean readOnly, HashCodeNode hashCodeNode, EqualsNode equalsNode) {
|
||||
if (readOnly) {
|
||||
return mapBuilder;
|
||||
} else {
|
||||
return mapBuilder.asModifiable(generation, hashCodeNode, equalsNode);
|
||||
}
|
||||
}
|
||||
|
||||
Object getCachedVectorRepresentation() {
|
||||
@ -72,37 +71,24 @@ public final class EnsoHashMap implements EnsoObject {
|
||||
|
||||
Object getCachedVectorRepresentation(ConditionProfile isNotCachedProfile) {
|
||||
if (isNotCachedProfile.profile(cachedVectorRepresentation == null)) {
|
||||
Object[] keys = new Object[snapshotSize];
|
||||
Object[] values = new Object[snapshotSize];
|
||||
int arrIdx = 0;
|
||||
for (StorageEntry entry : mapBuilder.getStorage().getValues()) {
|
||||
if (entry.index() < snapshotSize) {
|
||||
keys[arrIdx] = entry.key();
|
||||
values[arrIdx] = entry.value();
|
||||
arrIdx++;
|
||||
var keys = new Object[size];
|
||||
var values = new Object[size];
|
||||
var at = 0;
|
||||
for (var entry : mapBuilder.getEntries(generation, size)) {
|
||||
keys[at] = entry.key();
|
||||
values[at] = entry.value();
|
||||
at++;
|
||||
}
|
||||
}
|
||||
cachedVectorRepresentation =
|
||||
ArrayLikeHelpers.asVectorFromArray(
|
||||
HashEntriesVector.createFromKeysAndValues(keys, values));
|
||||
var pairs = HashEntriesVector.createFromKeysAndValues(keys, values);
|
||||
cachedVectorRepresentation = ArrayLikeHelpers.asVectorFromArray(pairs);
|
||||
}
|
||||
return cachedVectorRepresentation;
|
||||
}
|
||||
|
||||
public boolean isInsertCalled() {
|
||||
return insertCalled;
|
||||
}
|
||||
|
||||
public void setInsertCalled() {
|
||||
assert !insertCalled : "setInsertCalled should be called at most once";
|
||||
insertCalled = true;
|
||||
}
|
||||
|
||||
@Builtin.Method
|
||||
@Builtin.Specialize
|
||||
public static EnsoHashMap empty(
|
||||
@Cached HashCodeNode hashCodeNode, @Cached EqualsNode equalsNode) {
|
||||
return createEmpty(hashCodeNode, equalsNode);
|
||||
public static EnsoHashMap empty() {
|
||||
return createEmpty();
|
||||
}
|
||||
|
||||
@ExportMessage
|
||||
@ -112,23 +98,34 @@ public final class EnsoHashMap implements EnsoObject {
|
||||
|
||||
@ExportMessage
|
||||
int getHashSize() {
|
||||
return snapshotSize;
|
||||
return size;
|
||||
}
|
||||
|
||||
@ExportMessage
|
||||
boolean isHashEntryExisting(Object key) {
|
||||
return isEntryInThisMap(mapBuilder.get(key));
|
||||
boolean isHashEntryExisting(
|
||||
Object key,
|
||||
@Shared("hash") @Cached HashCodeNode hashCodeNode,
|
||||
@Shared("equals") @Cached EqualsNode equalsNode) {
|
||||
var entry = mapBuilder.get(key, generation, hashCodeNode, equalsNode);
|
||||
return entry != null;
|
||||
}
|
||||
|
||||
@ExportMessage
|
||||
boolean isHashEntryReadable(Object key) {
|
||||
return isHashEntryExisting(key);
|
||||
boolean isHashEntryReadable(
|
||||
Object key,
|
||||
@Shared("hash") @Cached HashCodeNode hashCodeNode,
|
||||
@Shared("equals") @Cached EqualsNode equalsNode) {
|
||||
return isHashEntryExisting(key, hashCodeNode, equalsNode);
|
||||
}
|
||||
|
||||
@ExportMessage
|
||||
Object readHashValue(Object key) throws UnknownKeyException {
|
||||
StorageEntry entry = mapBuilder.get(key);
|
||||
if (isEntryInThisMap(entry)) {
|
||||
Object readHashValue(
|
||||
Object key,
|
||||
@Shared("hash") @Cached HashCodeNode hashCodeNode,
|
||||
@Shared("equals") @Cached EqualsNode equalsNode)
|
||||
throws UnknownKeyException {
|
||||
StorageEntry entry = mapBuilder.get(key, generation, hashCodeNode, equalsNode);
|
||||
if (entry != null) {
|
||||
return entry.value();
|
||||
} else {
|
||||
throw UnknownKeyException.create(key);
|
||||
@ -140,7 +137,7 @@ public final class EnsoHashMap implements EnsoObject {
|
||||
try {
|
||||
return interop.getIterator(getCachedVectorRepresentation());
|
||||
} catch (UnsupportedMessageException e) {
|
||||
throw new IllegalStateException(e);
|
||||
throw new PanicException(Text.create(e.getMessage()), interop);
|
||||
}
|
||||
}
|
||||
|
||||
@ -181,12 +178,10 @@ public final class EnsoHashMap implements EnsoObject {
|
||||
var sb = new StringBuilder();
|
||||
sb.append("{");
|
||||
boolean empty = true;
|
||||
for (StorageEntry entry : mapBuilder.getStorage().getValues()) {
|
||||
if (isEntryInThisMap(entry)) {
|
||||
for (StorageEntry entry : mapBuilder.getEntries(generation, size)) {
|
||||
empty = false;
|
||||
sb.append(entryToString(entry, useInterop)).append(", ");
|
||||
}
|
||||
}
|
||||
if (!empty) {
|
||||
// Delete last comma
|
||||
sb.delete(sb.length() - 2, sb.length());
|
||||
@ -204,7 +199,7 @@ public final class EnsoHashMap implements EnsoObject {
|
||||
keyStr = interop.asString(interop.toDisplayString(entry.key()));
|
||||
valStr = interop.asString(interop.toDisplayString(entry.value()));
|
||||
} catch (UnsupportedMessageException e) {
|
||||
throw new IllegalStateException("Unreachable", e);
|
||||
throw new PanicException(Text.create(e.getMessage()), interop);
|
||||
}
|
||||
} else {
|
||||
keyStr = entry.key().toString();
|
||||
@ -212,8 +207,4 @@ public final class EnsoHashMap implements EnsoObject {
|
||||
}
|
||||
return keyStr + "=" + valStr;
|
||||
}
|
||||
|
||||
private boolean isEntryInThisMap(StorageEntry entry) {
|
||||
return entry != null && entry.index() < snapshotSize;
|
||||
}
|
||||
}
|
||||
|
@ -1,193 +1,264 @@
|
||||
package org.enso.interpreter.runtime.data.hash;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.enso.interpreter.node.expression.builtin.meta.EqualsNode;
|
||||
import org.enso.interpreter.node.expression.builtin.meta.HashCodeNode;
|
||||
import org.graalvm.collections.EconomicMap;
|
||||
import org.graalvm.collections.Equivalence;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
|
||||
/**
|
||||
* A storage for a {@link EnsoHashMap}. For one builder, there may be many snapshots ({@link
|
||||
* EnsoHashMap}). There should be at most one snapshot for a given size. All the snapshots should
|
||||
* have size smaller than this builder size.
|
||||
* A storage for a {@link EnsoHashMap}. For one builder, there may be many
|
||||
* {@link EnsoHashMap} instances that serve as a snapshot.
|
||||
*
|
||||
* There should be at most one snapshot for a given generation. All the snapshots should
|
||||
have generation smaller than this builder generation.
|
||||
*/
|
||||
public final class EnsoHashMapBuilder {
|
||||
private final EconomicMap<Object, StorageEntry> storage;
|
||||
/** All entries stored by their sequential index. */
|
||||
private final List<StorageEntry> sequentialEntries;
|
||||
final class EnsoHashMapBuilder {
|
||||
/**
|
||||
* Array of entries. It is only being added into. Both {@code put} and {@code remove}
|
||||
* operations just add new entries into it using <em>linear hashing</em>.
|
||||
*/
|
||||
private final StorageEntry[] byHash;
|
||||
/** number of entries in the {@code byHash} array. With every change to the builder,
|
||||
* the generation increases by one. Once the generation reaches 75% of {@code byHash.length}
|
||||
* it is time to <em>rehash</em> into new builder.
|
||||
*/
|
||||
private int generation;
|
||||
/** the actual number of entries in the builder at the latest {@code generation}.
|
||||
* <ul>
|
||||
* <li>{@code put} of new key increases it</li>
|
||||
* <li>{@code put} over existing key doesn't change it</li>
|
||||
* <li>{@code remove} of a key decreases it</li>
|
||||
* </ul>
|
||||
*/
|
||||
private int actualSize;
|
||||
|
||||
private final HashCodeNode hashCodeNode;
|
||||
private final EqualsNode equalsNode;
|
||||
private int size;
|
||||
|
||||
private EnsoHashMapBuilder(HashCodeNode hashCodeNode, EqualsNode equalsNode) {
|
||||
this.storage = EconomicMap.create(new StorageStrategy(equalsNode, hashCodeNode));
|
||||
this.sequentialEntries = new ArrayList<>();
|
||||
this.hashCodeNode = hashCodeNode;
|
||||
this.equalsNode = equalsNode;
|
||||
}
|
||||
|
||||
private EnsoHashMapBuilder(EnsoHashMapBuilder other, int numEntries) {
|
||||
assert 0 < numEntries && numEntries <= other.size;
|
||||
this.storage = EconomicMap.create(new StorageStrategy(other.equalsNode, other.hashCodeNode));
|
||||
var entriesToBeDuplicated = other.sequentialEntries.subList(0, numEntries);
|
||||
this.sequentialEntries = new ArrayList<>(entriesToBeDuplicated);
|
||||
entriesToBeDuplicated.forEach(entry -> this.storage.put(entry.key, entry));
|
||||
this.hashCodeNode = other.hashCodeNode;
|
||||
this.equalsNode = other.equalsNode;
|
||||
this.size = numEntries;
|
||||
}
|
||||
|
||||
private EnsoHashMapBuilder(EnsoHashMapBuilder other) {
|
||||
this.storage =
|
||||
EconomicMap.create(
|
||||
new StorageStrategy(other.equalsNode, other.hashCodeNode), other.storage);
|
||||
this.sequentialEntries = new ArrayList<>(other.sequentialEntries);
|
||||
this.hashCodeNode = other.hashCodeNode;
|
||||
this.equalsNode = other.equalsNode;
|
||||
this.size = other.size;
|
||||
/** Creates an empty builder with given capacity. The capacity specifies
|
||||
* the size of array of {@link StorageEntry} instances. The {@code put}
|
||||
* and {@code remove} operations add entries into the array until it is
|
||||
* 75% full.
|
||||
*/
|
||||
private EnsoHashMapBuilder(int initialCapacity) {
|
||||
this.byHash = new StorageEntry[initialCapacity];
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new builder with stored nodes.
|
||||
*
|
||||
* @param hashCodeNode Node that will be stored in the storage for invoking `hash_code` on keys.
|
||||
* @param equalsNode Node that will be stored in the storage for invoking `==` on keys.
|
||||
* Create a new builder with default size being {@code 11}.
|
||||
*/
|
||||
public static EnsoHashMapBuilder create(HashCodeNode hashCodeNode, EqualsNode equalsNode) {
|
||||
return new EnsoHashMapBuilder(hashCodeNode, equalsNode);
|
||||
public static EnsoHashMapBuilder create() {
|
||||
return new EnsoHashMapBuilder(11);
|
||||
}
|
||||
|
||||
/** Returns count of elements in the storage. */
|
||||
public int getSize() {
|
||||
return size;
|
||||
public int generation() {
|
||||
return generation;
|
||||
}
|
||||
|
||||
public EconomicMap<Object, StorageEntry> getStorage() {
|
||||
return storage;
|
||||
/** Returns the actual number of visible elements in current generation. */
|
||||
public int size() {
|
||||
return actualSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Duplicates the MapBuilder with just first {@code numEntries} number of entries.
|
||||
*
|
||||
* @param numEntries Number of entries to take from this MapBuilder.
|
||||
/** Provides access to all {@code StorageEntry} in this builder
|
||||
* at given {@code atGeneration}.
|
||||
* Classical usage is to {@code for (var e : this) if (e.isVisible(atGeneration) operation(e))}.
|
||||
*/
|
||||
public EnsoHashMapBuilder duplicatePartial(int numEntries) {
|
||||
return new EnsoHashMapBuilder(this, numEntries);
|
||||
public StorageEntry[] getEntries(int atGeneration, int size) {
|
||||
var arr = new StorageEntry[size];
|
||||
var at = 0;
|
||||
for (var i = 0; i < byHash.length; i++) {
|
||||
var e = byHash[i];
|
||||
if (e != null && e.isVisible(atGeneration)) {
|
||||
arr[at++] = e;
|
||||
}
|
||||
|
||||
/** Duplicates this builder with all its entries. */
|
||||
@TruffleBoundary
|
||||
public EnsoHashMapBuilder duplicate() {
|
||||
return new EnsoHashMapBuilder(this);
|
||||
}
|
||||
|
||||
/** Adds a key-value mapping, overriding any existing value. */
|
||||
@TruffleBoundary(allowInlining = true)
|
||||
public void add(Object key, Object value) {
|
||||
var oldEntry = storage.get(key);
|
||||
int newEntryIndex = oldEntry != null ? oldEntry.index : size;
|
||||
var newEntry = new StorageEntry(key, value, newEntryIndex);
|
||||
storage.put(key, newEntry);
|
||||
if (oldEntry == null) {
|
||||
assert newEntry.index == size;
|
||||
sequentialEntries.add(newEntry);
|
||||
size++;
|
||||
if (at != arr.length) {
|
||||
return Arrays.copyOf(arr, at);
|
||||
} else {
|
||||
sequentialEntries.set(newEntryIndex, newEntry);
|
||||
return arr;
|
||||
}
|
||||
}
|
||||
|
||||
@TruffleBoundary(allowInlining = true)
|
||||
public StorageEntry get(Object key) {
|
||||
return storage.get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes an entry denoted by the given key.
|
||||
*
|
||||
* @return true if the removal was successful, i.e., the key was in the map and was removed, false
|
||||
* otherwise.
|
||||
* Prepares a builder ready for modification at given generation.
|
||||
* It may return {@code this} if the {@code atGeneration == this.generation}
|
||||
* and the {@code byHash} array is less than 75% full. Otherwise
|
||||
* it may return new builder suitable for additions.
|
||||
*/
|
||||
@TruffleBoundary
|
||||
public boolean remove(Object key) {
|
||||
var oldEntry = storage.removeKey(key);
|
||||
if (oldEntry == null) {
|
||||
public EnsoHashMapBuilder asModifiable(int atGeneration, HashCodeNode hashCodeNode, EqualsNode equalsNode) {
|
||||
if (atGeneration != generation || generation * 4 > byHash.length * 3) {
|
||||
var newSize = Math.max(actualSize * 2, byHash.length);
|
||||
return rehash(newSize, atGeneration, hashCodeNode, equalsNode);
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
/** Adds a key-value mapping. Uses {@code hashCodeNode} to compute
|
||||
* hash and based on it location in the array. Then it searches for
|
||||
* first empty slot after the identified location. If it finds an
|
||||
* equal key, it marks it as removed, if it hasn't been removed yet.
|
||||
* Once it finds an empty slot, it puts there a new entry with
|
||||
* the next generation.
|
||||
*/
|
||||
public void put(
|
||||
Object key, Object value,
|
||||
HashCodeNode hashCodeNode, EqualsNode equalsNode
|
||||
) {
|
||||
var at = findWhereToStart(key, hashCodeNode);
|
||||
var nextGeneration = ++generation;
|
||||
var replacingExistingKey = false;
|
||||
for (var i = 0; i < byHash.length; i++) {
|
||||
if (byHash[at] == null) {
|
||||
if (!replacingExistingKey) {
|
||||
actualSize++;
|
||||
}
|
||||
byHash[at] = new StorageEntry(key, value, nextGeneration);
|
||||
return;
|
||||
}
|
||||
if (compare(equalsNode, byHash[at].key(), key)) {
|
||||
var invalidatedEntry = byHash[at].markRemoved(nextGeneration);
|
||||
if (invalidatedEntry != byHash[at]) {
|
||||
byHash[at] = invalidatedEntry;
|
||||
replacingExistingKey = true;
|
||||
}
|
||||
}
|
||||
if (++at == byHash.length) {
|
||||
at = 0;
|
||||
}
|
||||
}
|
||||
throw CompilerDirectives.shouldNotReachHere("byHash array is full!");
|
||||
}
|
||||
|
||||
/** Finds storage entry for given key or {@code null}.
|
||||
* Searches only entries that are visible for given {@code generation}.
|
||||
*/
|
||||
public StorageEntry get(
|
||||
Object key, int generation,
|
||||
HashCodeNode hashCodeNode, EqualsNode equalsNode
|
||||
) {
|
||||
var at = findWhereToStart(key, hashCodeNode);
|
||||
for (var i = 0; i < byHash.length; i++) {
|
||||
if (byHash[at] == null) {
|
||||
return null;
|
||||
}
|
||||
if (byHash[at].isVisible(generation)) {
|
||||
if (compare(equalsNode, key, byHash[at].key())) {
|
||||
return byHash[at];
|
||||
}
|
||||
}
|
||||
if (++at == byHash.length) {
|
||||
at = 0;
|
||||
}
|
||||
}
|
||||
throw CompilerDirectives.shouldNotReachHere("byHash array is full!");
|
||||
}
|
||||
|
||||
private int findWhereToStart(Object key, HashCodeNode hashCodeNode) {
|
||||
var hash = Math.abs(hashCodeNode.execute(key));
|
||||
var at = (int) (hash % byHash.length);
|
||||
return at;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes an entry denoted by the given key. Removal is "non-destrutive" - the
|
||||
* "removed" entry stays in the array - only its {@link StorageEntry#removed()}
|
||||
* value is set to the next generation.
|
||||
*
|
||||
* @return true if the removal was successful false otherwise.
|
||||
*/
|
||||
public boolean remove(
|
||||
Object key,
|
||||
HashCodeNode hashCodeNode, EqualsNode equalsNode
|
||||
) {
|
||||
var at = findWhereToStart(key, hashCodeNode);
|
||||
var nextGeneration = ++generation;
|
||||
for (var i = 0; i < byHash.length; i++) {
|
||||
if (byHash[at] == null) {
|
||||
return false;
|
||||
} else {
|
||||
sequentialEntries.remove(oldEntry.index);
|
||||
// Rewrite rest of the sequentialEntries list and repair indexes in storage
|
||||
for (int i = oldEntry.index; i < sequentialEntries.size(); i++) {
|
||||
var entry = sequentialEntries.get(i);
|
||||
StorageEntry newEntry = new StorageEntry(entry.key, entry.value, i);
|
||||
sequentialEntries.set(i, newEntry);
|
||||
storage.put(newEntry.key, newEntry);
|
||||
}
|
||||
size--;
|
||||
if (compare(equalsNode, key, byHash[at].key())) {
|
||||
var invalidatedEntry = byHash[at].markRemoved(nextGeneration);
|
||||
if (invalidatedEntry != byHash[at]) {
|
||||
byHash[at] = invalidatedEntry;
|
||||
actualSize--;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@TruffleBoundary(allowInlining = true)
|
||||
public boolean containsKey(Object key) {
|
||||
return storage.containsKey(key);
|
||||
if (++at == byHash.length) {
|
||||
at = 0;
|
||||
}
|
||||
}
|
||||
throw CompilerDirectives.shouldNotReachHere("byHash array is full!");
|
||||
}
|
||||
|
||||
/** Builds a new builder with given array size and puts into it all entries
|
||||
* that are valid {@code atGeneration}.
|
||||
*/
|
||||
private EnsoHashMapBuilder rehash(int size, int atGeneration, HashCodeNode hashCodeNode, EqualsNode equalsNode) {
|
||||
var newBuilder = new EnsoHashMapBuilder(size);
|
||||
for (var i = 0; i < byHash.length; i++) {
|
||||
var entry = byHash[i];
|
||||
if (entry != null && entry.isVisible(atGeneration)) {
|
||||
newBuilder.put(entry.key(), entry.value(), hashCodeNode, equalsNode);
|
||||
}
|
||||
}
|
||||
return newBuilder;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a snapshot with the current size. The created snapshot contains all the entries that
|
||||
* are in the storage as of this moment, i.e., all the entries with their indexes lesser than
|
||||
* {@code size}.
|
||||
* {@code generation}.
|
||||
*
|
||||
* <p>Should be called at most once for a particular {@code size}.
|
||||
* <p>Should be called where most once for a particular {@code generation}.
|
||||
*
|
||||
* @return A new hash map snapshot.
|
||||
*/
|
||||
public EnsoHashMap build() {
|
||||
return EnsoHashMap.createWithBuilder(this, size);
|
||||
return EnsoHashMap.createWithBuilder(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "EnsoHashMapBuilder{size = " + size + ", storage = " + storage + "}";
|
||||
return "EnsoHashMapBuilder{size = " + generation + ", storage = " + Arrays.toString(byHash) + "}";
|
||||
}
|
||||
|
||||
private static boolean compare(EqualsNode equalsNode, Object a, Object b) {
|
||||
if (a instanceof Double aDbl && b instanceof Double bDbl && aDbl.isNaN() && bDbl.isNaN()) {
|
||||
return true;
|
||||
} else {
|
||||
return equalsNode.execute(a, b);
|
||||
}
|
||||
}
|
||||
|
||||
record StorageEntry(
|
||||
Object key,
|
||||
Object value,
|
||||
/**
|
||||
* A sequential index of the entry within this map. {@link EnsoHashMap} uses it for checking
|
||||
* A generation the entry got into this map. {@link EnsoHashMap} uses it for checking
|
||||
* whether a certain key belongs in that map.
|
||||
*/
|
||||
int index) {}
|
||||
|
||||
/**
|
||||
* Custom {@link Equivalence} used for the {@link EconomicMap} that delegates {@code equals} to
|
||||
* {@link EqualsNode} and {@code hash_code} to {@link HashCodeNode}.
|
||||
*/
|
||||
private static final class StorageStrategy extends Equivalence {
|
||||
private final EqualsNode equalsNode;
|
||||
private final HashCodeNode hashCodeNode;
|
||||
|
||||
private StorageStrategy(EqualsNode equalsNode, HashCodeNode hashCodeNode) {
|
||||
this.equalsNode = equalsNode;
|
||||
this.hashCodeNode = hashCodeNode;
|
||||
int added,
|
||||
/** Remove at a generation. */
|
||||
int removed
|
||||
) {
|
||||
StorageEntry(Object key, Object value, int added) {
|
||||
this(key, value, added, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object a, Object b) {
|
||||
// Special handling for NaN added as a key inside a map.
|
||||
if (a instanceof Double aDbl && b instanceof Double bDbl && aDbl.isNaN() && bDbl.isNaN()) {
|
||||
return true;
|
||||
boolean isVisible(int generation) {
|
||||
return added() <= generation && generation < removed();
|
||||
}
|
||||
|
||||
StorageEntry markRemoved(int when) {
|
||||
if (removed() <= when) {
|
||||
return this;
|
||||
} else {
|
||||
return Boolean.TRUE.equals(equalsNode.execute(a, b));
|
||||
return new StorageEntry(key(), value(), added(), when);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode(Object o) {
|
||||
return (int) hashCodeNode.execute(o);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -11,10 +11,13 @@ import com.oracle.truffle.api.interop.UnknownKeyException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.dsl.Suspend;
|
||||
import org.enso.interpreter.node.BaseNode.TailStatus;
|
||||
import org.enso.interpreter.node.callable.thunk.ThunkExecutorNode;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
|
||||
@BuiltinMethod(
|
||||
@ -40,15 +43,14 @@ public abstract class HashMapGetNode extends Node {
|
||||
VirtualFrame frame,
|
||||
State state, Object self, Object key, Object defaultValue,
|
||||
@CachedLibrary("self") InteropLibrary interop,
|
||||
@Shared @Cached("build()") ThunkExecutorNode thunkExecutorNode) {
|
||||
if (interop.isHashEntryReadable(self, key)) {
|
||||
@Shared @Cached("build()") ThunkExecutorNode thunkExecutorNode
|
||||
) {
|
||||
try {
|
||||
return interop.readHashValue(self, key);
|
||||
} catch (UnsupportedMessageException | UnknownKeyException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
} else {
|
||||
} catch (UnknownKeyException e) {
|
||||
return thunkExecutorNode.executeThunk(frame, defaultValue, state, TailStatus.NOT_TAIL);
|
||||
} catch (UnsupportedMessageException e) {
|
||||
throw new PanicException(Text.create(e.getMessage()), this);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,14 @@
|
||||
package org.enso.interpreter.runtime.data.hash;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.expression.builtin.meta.EqualsNode;
|
||||
import org.enso.interpreter.node.expression.builtin.meta.HashCodeNode;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.InvalidArrayIndexException;
|
||||
@ -9,9 +16,6 @@ import com.oracle.truffle.api.interop.StopIterationException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.expression.builtin.meta.EqualsNode;
|
||||
import org.enso.interpreter.node.expression.builtin.meta.HashCodeNode;
|
||||
|
||||
@BuiltinMethod(
|
||||
type = "Map",
|
||||
@ -30,28 +34,16 @@ public abstract class HashMapInsertNode extends Node {
|
||||
public abstract EnsoHashMap execute(Object self, Object key, Object value);
|
||||
|
||||
@Specialization
|
||||
@TruffleBoundary
|
||||
EnsoHashMap doEnsoHashMap(EnsoHashMap hashMap, Object key, Object value) {
|
||||
EnsoHashMapBuilder mapBuilder = hashMap.getMapBuilder();
|
||||
boolean containsKey = mapBuilder.get(key) != null;
|
||||
boolean insertCalledOnMap = hashMap.isInsertCalled();
|
||||
if (insertCalledOnMap || containsKey) {
|
||||
// insert was already called on this map => We need to duplicate MapBuilder
|
||||
// If a key is already contained in the Map there is no way telling whether there is another
|
||||
// binding pointing to the Map, and we do not want to mutate this older binding.
|
||||
var newMapBuilder = hashMap.getHashSize() < mapBuilder.getSize() ?
|
||||
mapBuilder.duplicatePartial(hashMap.getHashSize()) :
|
||||
mapBuilder.duplicate();
|
||||
newMapBuilder.add(key, value);
|
||||
return newMapBuilder.build();
|
||||
} else {
|
||||
// Do not duplicate the builder, just create a snapshot.
|
||||
mapBuilder.add(key, value);
|
||||
EnsoHashMap doEnsoHashMap(
|
||||
EnsoHashMap hashMap, Object key, Object value,
|
||||
@Shared("hash") @Cached HashCodeNode hashCodeNode,
|
||||
@Shared("equals") @Cached EqualsNode equalsNode
|
||||
) {
|
||||
var mapBuilder = hashMap.getMapBuilder(false, hashCodeNode, equalsNode);
|
||||
mapBuilder.put(key, value, hashCodeNode, equalsNode);
|
||||
var newMap = mapBuilder.build();
|
||||
hashMap.setInsertCalled();
|
||||
return newMap;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link EnsoHashMapBuilder} for the given {@code foreignMap} - iterates through
|
||||
@ -61,24 +53,25 @@ public abstract class HashMapInsertNode extends Node {
|
||||
EnsoHashMap doForeign(Object foreignMap, Object keyToInsert, Object valueToInsert,
|
||||
@CachedLibrary("foreignMap") InteropLibrary mapInterop,
|
||||
@CachedLibrary(limit = "3") InteropLibrary iteratorInterop,
|
||||
@Cached HashCodeNode hashCodeNode,
|
||||
@Cached EqualsNode equalsNode) {
|
||||
var mapBuilder = EnsoHashMapBuilder.create(hashCodeNode, equalsNode);
|
||||
@Shared("hash") @Cached HashCodeNode hashCodeNode,
|
||||
@Shared("equals") @Cached EqualsNode equalsNode) {
|
||||
var mapBuilder = EnsoHashMapBuilder.create();
|
||||
try {
|
||||
Object entriesIterator = mapInterop.getHashEntriesIterator(foreignMap);
|
||||
while (iteratorInterop.hasIteratorNextElement(entriesIterator)) {
|
||||
Object keyValueArr = iteratorInterop.getIteratorNextElement(entriesIterator);
|
||||
Object key = iteratorInterop.readArrayElement(keyValueArr, 0);
|
||||
Object value = iteratorInterop.readArrayElement(keyValueArr, 1);
|
||||
mapBuilder.add(key, value);
|
||||
mapBuilder = mapBuilder.asModifiable(mapBuilder.generation(), hashCodeNode, equalsNode);
|
||||
mapBuilder.put(key, value, hashCodeNode, equalsNode);
|
||||
}
|
||||
} catch (UnsupportedMessageException | StopIterationException | InvalidArrayIndexException e) {
|
||||
throw new IllegalStateException(
|
||||
"Polyglot hash map " + foreignMap + " has wrongly specified Interop API (hash entries iterator)",
|
||||
e
|
||||
);
|
||||
CompilerDirectives.transferToInterpreter();
|
||||
var msg = "Polyglot hash map " + foreignMap + " has wrongly specified Interop API (hash entries iterator)";
|
||||
throw new PanicException(Text.create(msg), this);
|
||||
}
|
||||
mapBuilder.add(keyToInsert, valueToInsert);
|
||||
return EnsoHashMap.createWithBuilder(mapBuilder, mapBuilder.getSize());
|
||||
mapBuilder = mapBuilder.asModifiable(mapBuilder.generation(), hashCodeNode, equalsNode);
|
||||
mapBuilder.put(keyToInsert, valueToInsert, hashCodeNode, equalsNode);
|
||||
return EnsoHashMap.createWithBuilder(mapBuilder);
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,15 @@
|
||||
package org.enso.interpreter.runtime.data.hash;
|
||||
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.expression.builtin.meta.EqualsNode;
|
||||
import org.enso.interpreter.node.expression.builtin.meta.HashCodeNode;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.DataflowError;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.GenerateUncached;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
@ -10,10 +18,6 @@ import com.oracle.truffle.api.interop.StopIterationException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.expression.builtin.meta.EqualsNode;
|
||||
import org.enso.interpreter.node.expression.builtin.meta.HashCodeNode;
|
||||
import org.enso.interpreter.runtime.error.DataflowError;
|
||||
|
||||
@BuiltinMethod(
|
||||
type = "Map",
|
||||
@ -31,16 +35,16 @@ public abstract class HashMapRemoveNode extends Node {
|
||||
public abstract EnsoHashMap execute(Object self, Object key);
|
||||
|
||||
@Specialization
|
||||
EnsoHashMap removeFromEnsoMap(EnsoHashMap ensoMap, Object key) {
|
||||
var oldEntry = ensoMap.getMapBuilder().get(key);
|
||||
if (oldEntry == null) {
|
||||
throw DataflowError.withoutTrace("No such key", null);
|
||||
EnsoHashMap removeFromEnsoMap(
|
||||
EnsoHashMap ensoMap, Object key,
|
||||
@Shared("hash") @Cached HashCodeNode hashCodeNode,
|
||||
@Shared("equals") @Cached EqualsNode equalsNode
|
||||
) {
|
||||
var mapBuilder = ensoMap.getMapBuilder(false, hashCodeNode, equalsNode);
|
||||
if (mapBuilder.remove(key, hashCodeNode, equalsNode)) {
|
||||
return mapBuilder.build();
|
||||
} else {
|
||||
var newBuilder = ensoMap.getMapBuilder().duplicate();
|
||||
if (!newBuilder.remove(key)) {
|
||||
throw new IllegalStateException("Key '" + key + "' should be in the map");
|
||||
}
|
||||
return EnsoHashMap.createWithBuilder(newBuilder, newBuilder.getSize());
|
||||
throw DataflowError.withoutTrace("No such key", null);
|
||||
}
|
||||
}
|
||||
|
||||
@ -49,13 +53,13 @@ public abstract class HashMapRemoveNode extends Node {
|
||||
)
|
||||
EnsoHashMap removeFromInteropMap(Object map, Object keyToRemove,
|
||||
@CachedLibrary(limit = "5") InteropLibrary interop,
|
||||
@Cached HashCodeNode hashCodeNode,
|
||||
@Cached EqualsNode equalsNode) {
|
||||
@Shared("hash") @Cached HashCodeNode hashCodeNode,
|
||||
@Shared("equals") @Cached EqualsNode equalsNode) {
|
||||
// We cannot simply call interop.isHashEntryExisting, because it would, most likely
|
||||
// use the default `hashCode` and `equals` Java methods. But we need to use our
|
||||
// EqualsNode, so we do the check for non-existing key inside the while loop.
|
||||
boolean keyToRemoveFound = false;
|
||||
var mapBuilder = EnsoHashMapBuilder.create(hashCodeNode, equalsNode);
|
||||
var mapBuilder = EnsoHashMapBuilder.create();
|
||||
try {
|
||||
Object entriesIterator = interop.getHashEntriesIterator(map);
|
||||
while (interop.hasIteratorNextElement(entriesIterator)) {
|
||||
@ -63,23 +67,24 @@ public abstract class HashMapRemoveNode extends Node {
|
||||
Object key = interop.readArrayElement(keyValueArr, 0);
|
||||
if ((boolean) equalsNode.execute(keyToRemove, key)) {
|
||||
if (keyToRemoveFound) {
|
||||
throw new IllegalStateException("Key " + key + " found twice");
|
||||
CompilerDirectives.transferToInterpreter();
|
||||
throw new PanicException(Text.create("Key " + key + " found twice"), this);
|
||||
} else {
|
||||
keyToRemoveFound = true;
|
||||
}
|
||||
} else {
|
||||
Object value = interop.readArrayElement(keyValueArr, 1);
|
||||
mapBuilder.add(key, value);
|
||||
mapBuilder = mapBuilder.asModifiable(mapBuilder.generation(), hashCodeNode, equalsNode);
|
||||
mapBuilder.put(key, value, hashCodeNode, equalsNode);
|
||||
}
|
||||
}
|
||||
} catch (UnsupportedMessageException | StopIterationException | InvalidArrayIndexException e) {
|
||||
throw new IllegalStateException(
|
||||
"Polyglot hash map " + map + " has wrongly specified Interop API (hash entries iterator)",
|
||||
e
|
||||
);
|
||||
CompilerDirectives.transferToInterpreter();
|
||||
var msg = "Polyglot hash map " + map + " has wrongly specified Interop API (hash entries iterator)";
|
||||
throw new PanicException(Text.create(msg), this);
|
||||
}
|
||||
if (keyToRemoveFound) {
|
||||
return EnsoHashMap.createWithBuilder(mapBuilder, mapBuilder.getSize());
|
||||
return EnsoHashMap.createWithBuilder(mapBuilder);
|
||||
} else {
|
||||
CompilerDirectives.transferToInterpreter();
|
||||
throw DataflowError.withoutTrace("No such key " + keyToRemove, interop);
|
||||
|
@ -8,6 +8,8 @@ import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
|
||||
@BuiltinMethod(
|
||||
type = "Map",
|
||||
@ -28,7 +30,7 @@ public abstract class HashMapSizeNode extends Node {
|
||||
try {
|
||||
return interop.getHashSize(hashMap);
|
||||
} catch (UnsupportedMessageException e) {
|
||||
throw new IllegalStateException(e);
|
||||
throw new PanicException(Text.create(e.getMessage()), this);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,12 @@ import com.oracle.truffle.api.interop.StopIterationException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
|
||||
@BuiltinMethod(
|
||||
type = "Map",
|
||||
@ -45,10 +50,9 @@ public abstract class HashMapToTextNode extends Node {
|
||||
sb.delete(sb.length() - 2, sb.length());
|
||||
}
|
||||
} catch (UnsupportedMessageException | StopIterationException | InvalidArrayIndexException e) {
|
||||
throw new IllegalStateException(
|
||||
"hashMap " + hashMap + " probably implements interop API incorrectly",
|
||||
e
|
||||
);
|
||||
CompilerDirectives.transferToInterpreter();
|
||||
var msg = "hashMap " + hashMap + " probably implements interop API incorrectly";
|
||||
throw new PanicException(Text.create(msg), this);
|
||||
}
|
||||
sb.append("}");
|
||||
return sb.toString();
|
||||
|
@ -1,8 +1,11 @@
|
||||
package org.enso.interpreter.runtime.data.hash;
|
||||
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.runtime.data.text.Text;
|
||||
import org.enso.interpreter.runtime.data.vector.ArrayLikeHelpers;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Fallback;
|
||||
import com.oracle.truffle.api.dsl.GenerateUncached;
|
||||
@ -71,7 +74,9 @@ public abstract class HashMapToVectorNode extends Node {
|
||||
HashEntriesVector.createFromKeysAndValues(keys, values)
|
||||
);
|
||||
} catch (UnsupportedMessageException | StopIterationException | InvalidArrayIndexException e) {
|
||||
throw new IllegalStateException("hashMap: " + hashMap + " has probably wrong hash interop API", e);
|
||||
CompilerDirectives.transferToInterpreter();
|
||||
var msg = "hashMap: " + hashMap + " has probably wrong hash interop API";
|
||||
throw new PanicException(Text.create(msg), mapInterop);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -425,7 +425,7 @@ spec setup =
|
||||
t2.should_fail_with Ambiguous_Column_Rename
|
||||
err = t2.catch
|
||||
err.column_name . should_equal "alpha"
|
||||
err.new_names . should_equal ["StartsWithA", "EndsWithA"]
|
||||
err.new_names . sort . should_equal ["EndsWithA", "StartsWithA"]
|
||||
|
||||
t3 = table_builder [["aaa", [1]], ["bbb", [2]]]
|
||||
## The rename patterns are deliberately prepared so that both will
|
||||
@ -459,7 +459,7 @@ spec setup =
|
||||
Test.specify "should correctly handle problems: duplicate names" <|
|
||||
map = ["Test", "Test", "Test", "Test"]
|
||||
action = table.rename_columns map on_problems=_
|
||||
tester = expect_column_names ["Test", "Test 1", "Test 2", "Test 3"]
|
||||
tester = expect_column_names ["Test 1", "Test 2", "Test 3", "Test"]
|
||||
problems = [Duplicate_Output_Column_Names.Error ["Test", "Test", "Test"]]
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
||||
|
@ -264,7 +264,7 @@ spec =
|
||||
|
||||
Test.specify "should convert the whole map to a vector" <|
|
||||
m = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||
m.to_vector.should_equal [[0, 0], [3, -5], [1, 2]]
|
||||
m.to_vector.sort on=_.first . should_equal [[0, 0], [1, 2], [3, -5]]
|
||||
|
||||
Test.specify "should allow building the map from two vectors" <|
|
||||
expected = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||
@ -300,7 +300,9 @@ spec =
|
||||
|
||||
Test.specify "should define a well-defined text conversion" <|
|
||||
m = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||
m.to_text . should_equal "{0=0, 3=-5, 1=2}"
|
||||
m.to_text . should_contain "0=0"
|
||||
m.to_text . should_contain "3=-5"
|
||||
m.to_text . should_contain "1=2"
|
||||
|
||||
Test.specify "should define structural equality" <|
|
||||
map_1 = Map.empty . insert "1" 2 . insert "2" "1"
|
||||
@ -564,7 +566,7 @@ spec =
|
||||
js_dict = js_dict_from_vec ["A", 1, "B", 2]
|
||||
map = js_dict.insert "C" 3
|
||||
js_dict.to_vector.should_equal [["A", 1], ["B", 2]]
|
||||
map.to_vector.should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||
map.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||
|
||||
Test.specify "should treat Java Map as Enso map" <|
|
||||
sort_by_keys vec = vec.sort by=x-> y-> Ordering.compare x.first y.first
|
||||
|
@ -107,7 +107,7 @@ spec =
|
||||
data = json.get 'data'
|
||||
data.should_be_a Vector
|
||||
data.length . should_equal 10
|
||||
(data.take (First 3)).to_text . should_equal '[{"x":0,"y":225}, {"x":29,"y":196}, {"x":15,"y":0}]'
|
||||
(data.take (First 3) . sort on=(_.get "x")).to_text . should_equal '[{"x":0,"y":225}, {"x":15,"y":0}, {"x":29,"y":196}]'
|
||||
|
||||
Test.specify "filter the elements" <|
|
||||
vector = [0,10,20,30]
|
||||
|
Loading…
Reference in New Issue
Block a user