From fc7e9312d00e60171ed77c477ed91231d3dbfff9 Mon Sep 17 00:00:00 2001 From: Kristóf Marussy Date: Sun, 12 Dec 2021 17:48:47 +0100 Subject: build: move modules into subproject directory --- subprojects/store/build.gradle | 9 + .../map/benchmarks/ImmutablePutBenchmark.java | 77 ++++ .../map/benchmarks/ImmutablePutExecutionPlan.java | 57 +++ .../refinery/store/map/ContinousHashProvider.java | 69 ++++ .../main/java/tools/refinery/store/map/Cursor.java | 14 + .../tools/refinery/store/map/CursorAsIterator.java | 57 +++ .../java/tools/refinery/store/map/DiffCursor.java | 6 + .../tools/refinery/store/map/MapAsIterable.java | 26 ++ .../java/tools/refinery/store/map/Versioned.java | 7 + .../tools/refinery/store/map/VersionedMap.java | 13 + .../refinery/store/map/VersionedMapStore.java | 14 + .../store/map/VersionedMapStoreConfiguration.java | 48 +++ .../refinery/store/map/VersionedMapStoreImpl.java | 135 ++++++ .../refinery/store/map/internal/HashClash.java | 18 + .../refinery/store/map/internal/ImmutableNode.java | 378 +++++++++++++++++ .../refinery/store/map/internal/MapCursor.java | 131 ++++++ .../refinery/store/map/internal/MapDiffCursor.java | 221 ++++++++++ .../refinery/store/map/internal/MutableNode.java | 454 +++++++++++++++++++++ .../tools/refinery/store/map/internal/Node.java | 85 ++++ .../refinery/store/map/internal/OldValueBox.java | 19 + .../store/map/internal/VersionedMapImpl.java | 171 ++++++++ .../java/tools/refinery/store/model/Model.java | 20 + .../tools/refinery/store/model/ModelCursor.java | 25 ++ .../refinery/store/model/ModelDiffCursor.java | 26 ++ .../tools/refinery/store/model/ModelStore.java | 16 + .../tools/refinery/store/model/ModelStoreImpl.java | 122 ++++++ .../java/tools/refinery/store/model/Tuple.java | 148 +++++++ .../refinery/store/model/TupleHashProvider.java | 65 +++ .../store/model/TupleHashProviderBitMagic.java | 28 ++ .../refinery/store/model/internal/ModelImpl.java | 124 ++++++ .../internal/SimilarRelationEquivalenceClass.java | 33 ++ .../store/model/representation/AuxilaryData.java | 22 + .../model/representation/DataRepresentation.java | 24 ++ .../store/model/representation/Relation.java | 31 ++ .../store/model/representation/TruthValue.java | 51 +++ .../tools/refinery/store/query/QueriableModel.java | 30 ++ .../refinery/store/query/QueriableModelStore.java | 23 ++ .../store/query/QueriableModelStoreImpl.java | 127 ++++++ .../refinery/store/query/building/DNFAnd.java | 37 ++ .../refinery/store/query/building/DNFAtom.java | 33 ++ .../store/query/building/DNFPredicate.java | 72 ++++ .../store/query/building/EquivalenceAtom.java | 44 ++ .../store/query/building/PredicateAtom.java | 66 +++ .../store/query/building/RelationAtom.java | 49 +++ .../refinery/store/query/building/Variable.java | 22 + .../refinery/store/query/internal/DNF2PQuery.java | 189 +++++++++ .../store/query/internal/DummyBaseIndexer.java | 59 +++ .../store/query/internal/ModelUpdateListener.java | 103 +++++ .../store/query/internal/PredicateResult.java | 24 ++ .../store/query/internal/QueriableModelImpl.java | 212 ++++++++++ .../store/query/internal/RawPatternMatcher.java | 57 +++ .../query/internal/RelationalEngineContext.java | 33 ++ .../query/internal/RelationalQueryMetaContext.java | 58 +++ .../query/internal/RelationalRuntimeContext.java | 178 ++++++++ .../store/query/internal/RelationalScope.java | 43 ++ .../refinery/store/query/internal/ViewUpdate.java | 34 ++ .../store/query/internal/ViewUpdateBuffer.java | 46 +++ .../store/query/internal/ViewUpdateTranslator.java | 57 +++ .../store/query/view/FilteredRelationView.java | 48 +++ .../store/query/view/FunctionalRelationView.java | 50 +++ .../store/query/view/KeyOnlyRelationView.java | 16 + .../refinery/store/query/view/RelationView.java | 85 ++++ .../tools/refinery/store/util/CollectionsUtil.java | 72 ++++ .../refinery/store/map/tests/MapUnitTests.java | 22 + .../store/map/tests/fuzz/CommitFuzzTest.java | 96 +++++ .../map/tests/fuzz/ContentEqualsFuzzTest.java | 143 +++++++ .../store/map/tests/fuzz/DiffCursorFuzzTest.java | 117 ++++++ .../store/map/tests/fuzz/MultiThreadFuzzTest.java | 97 +++++ .../map/tests/fuzz/MultiThreadTestRunnable.java | 101 +++++ .../store/map/tests/fuzz/MutableFuzzTest.java | 92 +++++ .../fuzz/MutableImmutableCompareFuzzTest.java | 89 ++++ .../store/map/tests/fuzz/RestoreFuzzTest.java | 109 +++++ .../store/map/tests/fuzz/SharedStoreFuzzTest.java | 113 +++++ .../store/map/tests/fuzz/utils/FuzzTestUtils.java | 64 +++ .../map/tests/fuzz/utils/FuzzTestUtilsTest.java | 33 ++ .../store/map/tests/utils/MapTestEnvironment.java | 214 ++++++++++ .../store/model/hashTests/HashEfficiencyTest.java | 161 ++++++++ .../refinery/store/model/tests/ModelTest.java | 148 +++++++ .../tools/refinery/store/query/test/QueryTest.java | 445 ++++++++++++++++++++ .../store/query/test/QueryTransactionTest.java | 58 +++ .../refinery/store/util/CollectionsUtilTests.java | 78 ++++ 81 files changed, 6791 insertions(+) create mode 100644 subprojects/store/build.gradle create mode 100644 subprojects/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutBenchmark.java create mode 100644 subprojects/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutExecutionPlan.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/ContinousHashProvider.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/Cursor.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/CursorAsIterator.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/DiffCursor.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/MapAsIterable.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/Versioned.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/VersionedMap.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStore.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStoreConfiguration.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStoreImpl.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/internal/HashClash.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/internal/ImmutableNode.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/internal/MapCursor.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/internal/MapDiffCursor.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/internal/MutableNode.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/internal/Node.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/internal/OldValueBox.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/map/internal/VersionedMapImpl.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/Model.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/ModelCursor.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/ModelDiffCursor.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/ModelStore.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/ModelStoreImpl.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/Tuple.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/TupleHashProvider.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/TupleHashProviderBitMagic.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/internal/ModelImpl.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/internal/SimilarRelationEquivalenceClass.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/representation/AuxilaryData.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/representation/DataRepresentation.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/representation/Relation.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/model/representation/TruthValue.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/QueriableModel.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/QueriableModelStore.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/QueriableModelStoreImpl.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/building/DNFAnd.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/building/DNFAtom.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/building/DNFPredicate.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/building/EquivalenceAtom.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/building/PredicateAtom.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/building/RelationAtom.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/building/Variable.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/DNF2PQuery.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/DummyBaseIndexer.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/ModelUpdateListener.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/PredicateResult.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/QueriableModelImpl.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/RawPatternMatcher.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalEngineContext.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalQueryMetaContext.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalRuntimeContext.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalScope.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdate.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdateBuffer.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdateTranslator.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/view/FilteredRelationView.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/view/FunctionalRelationView.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/view/KeyOnlyRelationView.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/query/view/RelationView.java create mode 100644 subprojects/store/src/main/java/tools/refinery/store/util/CollectionsUtil.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/MapUnitTests.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/CommitFuzzTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/ContentEqualsFuzzTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/DiffCursorFuzzTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadFuzzTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadTestRunnable.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableFuzzTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableImmutableCompareFuzzTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/RestoreFuzzTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/SharedStoreFuzzTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtils.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtilsTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/map/tests/utils/MapTestEnvironment.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/model/hashTests/HashEfficiencyTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/model/tests/ModelTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/query/test/QueryTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/query/test/QueryTransactionTest.java create mode 100644 subprojects/store/src/test/java/tools/refinery/store/util/CollectionsUtilTests.java (limited to 'subprojects/store') diff --git a/subprojects/store/build.gradle b/subprojects/store/build.gradle new file mode 100644 index 00000000..8d091a81 --- /dev/null +++ b/subprojects/store/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'refinery-java-library' + id 'refinery-jmh' +} + +dependencies { + implementation libs.ecore + implementation libs.viatra +} diff --git a/subprojects/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutBenchmark.java b/subprojects/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutBenchmark.java new file mode 100644 index 00000000..cdf3d3c8 --- /dev/null +++ b/subprojects/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutBenchmark.java @@ -0,0 +1,77 @@ +package tools.refinery.store.map.benchmarks; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.concurrent.TimeUnit; + +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; + +@Fork(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Measurement(time = 1, timeUnit = TimeUnit.SECONDS) +@Warmup(time = 1, timeUnit = TimeUnit.SECONDS) +public class ImmutablePutBenchmark { + @Benchmark + public void immutablePutBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { + var sut = executionPlan.createSut(); + for (int i = 0; i < executionPlan.nPut; i++) { + sut.put(executionPlan.nextKey(), executionPlan.nextValue()); + } + blackhole.consume(sut); + } + + @Benchmark + public void immutablePutAndCommitBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { + var sut = executionPlan.createSut(); + for (int i = 0; i < executionPlan.nPut; i++) { + sut.put(executionPlan.nextKey(), executionPlan.nextValue()); + if (i % 10 == 0) { + blackhole.consume(sut.commit()); + } + } + blackhole.consume(sut); + } + + @Benchmark + public void baselinePutBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { + var sut = new HashMap(); + for (int i = 0; i < executionPlan.nPut; i++) { + var key = executionPlan.nextKey(); + var value = executionPlan.nextValue(); + if (executionPlan.isDefault(value)) { + sut.remove(key); + } else { + sut.put(key, value); + } + } + blackhole.consume(sut); + } + + @Benchmark + public void baselinePutAndCommitBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { + var sut = new HashMap(); + var store = new ArrayList>(); + for (int i = 0; i < executionPlan.nPut; i++) { + var key = executionPlan.nextKey(); + var value = executionPlan.nextValue(); + if (executionPlan.isDefault(value)) { + sut.remove(key); + } else { + sut.put(key, value); + } + if (i % 10 == 0) { + store.add(new HashMap<>(sut)); + } + } + blackhole.consume(sut); + blackhole.consume(store); + } +} diff --git a/subprojects/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutExecutionPlan.java b/subprojects/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutExecutionPlan.java new file mode 100644 index 00000000..756d504e --- /dev/null +++ b/subprojects/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutExecutionPlan.java @@ -0,0 +1,57 @@ +package tools.refinery.store.map.benchmarks; + +import java.util.Random; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; + +@State(Scope.Benchmark) +public class ImmutablePutExecutionPlan { + + @Param({ "100", "10000" }) + public int nPut; + + @Param({ "32", "1000", "100000" }) + public int nKeys; + + @Param({ "2", "3" }) + public int nValues; + + private Random random; + + private String[] values; + + private ContinousHashProvider hashProvider = MapTestEnvironment.prepareHashProvider(false); + + @Setup(Level.Trial) + public void setUpTrial() { + random = new Random(); + values = MapTestEnvironment.prepareValues(nValues); + } + + public VersionedMapImpl createSut() { + VersionedMapStore store = new VersionedMapStoreImpl(hashProvider, values[0]); + return (VersionedMapImpl) store.createMap(); + } + + public Integer nextKey() { + return random.nextInt(nKeys); + } + + public boolean isDefault(String value) { + return value == values[0]; + } + + public String nextValue() { + return values[random.nextInt(nValues)]; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/ContinousHashProvider.java b/subprojects/store/src/main/java/tools/refinery/store/map/ContinousHashProvider.java new file mode 100644 index 00000000..75f1e2ab --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/ContinousHashProvider.java @@ -0,0 +1,69 @@ +package tools.refinery.store.map; + +import tools.refinery.store.map.internal.Node; + +/** + * A class representing an equivalence relation for a type {@code K} with a + * continuous hash function. + * + * @author Oszkar Semerath + * + * @param Target java type. + */ +public interface ContinousHashProvider { + public static final int EFFECTIVE_BITS = Node.EFFECTIVE_BITS; + public static final int EFFECTIVE_BIT_MASK = (1 << (EFFECTIVE_BITS)) - 1; + + /** + * Maximal practical depth for differentiating keys. If two keys have the same + * hash code until that depth, the algorithm can stop. + */ + public static final int MAX_PRACTICAL_DEPTH = 500; + + /** + * Provides a hash code for a object {@code key} with a given {@code index}. It + * has the following contracts: + *
    + *
  • If {@link #equals}{@code (key1,key2)}, then + * {@code getHash(key1, index) == getHash(key2, index)} for all values of + * {@code index}.
  • + *
  • If {@code getHash(key1,index) == getHash(key2, index)} for all values of + * {@code index}, then {@link #equals}{@code (key1, key2)}
  • + *
  • In current implementation, we use only the least significant + * {@link #EFFECTIVE_BITS} + *
+ * Check {@link #equals} for further details. + * + * @param key The target data object. + * @param index The depth of the the hash code. Needs to be non-negative. + * @return A hash code. + */ + public int getHash(K key, int index); + + public default int getEffectiveHash(K key, int index) { + return getHash(key, index) & EFFECTIVE_BIT_MASK; + } + + public default int compare(K key1, K key2) { + if (key1.equals(key2)) { + return 0; + } else { + for (int i = 0; i < ContinousHashProvider.MAX_PRACTICAL_DEPTH; i++) { + int hash1 = getEffectiveHash(key1, i); + int hash2 = getEffectiveHash(key2, i); + for(int j = 0; j>>j*Node.BRANCHING_FACTOR_BITS) & factorMask; + int hashFragment2 = (hash2>>>j*Node.BRANCHING_FACTOR_BITS) & factorMask; + var result = Integer.compare(hashFragment1, hashFragment2); + if (result != 0) { + return result; + } + } + } + throw new IllegalArgumentException("Two different keys (" + key1 + " and " + key2 + + ") have the same hashcode over the practical depth limitation (" + + ContinousHashProvider.MAX_PRACTICAL_DEPTH + ")!"); + } + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/Cursor.java b/subprojects/store/src/main/java/tools/refinery/store/map/Cursor.java new file mode 100644 index 00000000..9c465ddc --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/Cursor.java @@ -0,0 +1,14 @@ +package tools.refinery.store.map; + +import java.util.List; + +public interface Cursor { + public K getKey(); + public V getValue(); + public boolean isTerminated(); + public boolean move(); + public boolean isDirty(); + + @SuppressWarnings("squid:S1452") + public List> getDependingMaps(); +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/CursorAsIterator.java b/subprojects/store/src/main/java/tools/refinery/store/map/CursorAsIterator.java new file mode 100644 index 00000000..65ae6648 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/CursorAsIterator.java @@ -0,0 +1,57 @@ +package tools.refinery.store.map; + +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.function.BiFunction; +import java.util.function.BiPredicate; + +public class CursorAsIterator implements Iterator { + private final Cursor internal; + private final BiFunction entryTransformation; + private final BiPredicate filtering; + + D lastValidElement; + + public CursorAsIterator(Cursor internal, BiFunction entryTransformation, BiPredicate filtering) { + this.internal = internal; + this.entryTransformation = entryTransformation; + this.filtering = filtering; + + moveToNext(); + } + public CursorAsIterator(Cursor internal, BiFunction entryTransformation) { + this.internal = internal; + this.entryTransformation = entryTransformation; + this.filtering = ((k,v)->true); + + moveToNext(); + } + + private void moveToNext() { + internal.move(); + while(!internal.isTerminated() && !filtering.test(internal.getKey(), internal.getValue())) { + internal.move(); + } + if(!internal.isTerminated()) { + lastValidElement = entryTransformation.apply(internal.getKey(), internal.getValue()); + } + } + + + @Override + public boolean hasNext() { + return !internal.isTerminated(); + } + @Override + public D next() { + if(hasNext()) { + D last = lastValidElement; + moveToNext(); + return last; + } else { + throw new NoSuchElementException(); + } + + } + +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/DiffCursor.java b/subprojects/store/src/main/java/tools/refinery/store/map/DiffCursor.java new file mode 100644 index 00000000..701f3ec8 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/DiffCursor.java @@ -0,0 +1,6 @@ +package tools.refinery.store.map; + +public interface DiffCursor extends Cursor { + public V getFromValue(); + public V getToValue(); +} \ No newline at end of file diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/MapAsIterable.java b/subprojects/store/src/main/java/tools/refinery/store/map/MapAsIterable.java new file mode 100644 index 00000000..6b986732 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/MapAsIterable.java @@ -0,0 +1,26 @@ +package tools.refinery.store.map; + +import java.util.Iterator; +import java.util.function.BiFunction; +import java.util.function.BiPredicate; + +public class MapAsIterable implements Iterable { + private final VersionedMap internal; + private final BiFunction entryTransformation; + private final BiPredicate filtering; + + public MapAsIterable(VersionedMap internal, BiFunction entryTransformation, BiPredicate filtering) { + this.internal = internal; + this.entryTransformation = entryTransformation; + this.filtering = filtering; + } + public MapAsIterable(VersionedMap internal, BiFunction entryTransformation) { + this.internal = internal; + this.entryTransformation = entryTransformation; + this.filtering = ((k,v)->true); + } + @Override + public Iterator iterator() { + return new CursorAsIterator<>(internal.getAll(), entryTransformation, filtering); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/Versioned.java b/subprojects/store/src/main/java/tools/refinery/store/map/Versioned.java new file mode 100644 index 00000000..6a23e9d5 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/Versioned.java @@ -0,0 +1,7 @@ +package tools.refinery.store.map; + +public interface Versioned { + public long commit(); + //maybe revert()? + public void restore(long state); +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMap.java b/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMap.java new file mode 100644 index 00000000..a8a64d08 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMap.java @@ -0,0 +1,13 @@ +package tools.refinery.store.map; + +public interface VersionedMap extends Versioned{ + public V get(K key); + public Cursor getAll(); + + public V put(K key, V value); + public void putAll(Cursor cursor); + + public long getSize(); + + public DiffCursor getDiffCursor(long state); +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStore.java b/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStore.java new file mode 100644 index 00000000..a8d7fb1a --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStore.java @@ -0,0 +1,14 @@ +package tools.refinery.store.map; + +import java.util.Set; + +public interface VersionedMapStore { + + public VersionedMap createMap(); + + public VersionedMap createMap(long state); + + public Set getStates(); + + public DiffCursor getDiffCursor(long fromState, long toState); +} \ No newline at end of file diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStoreConfiguration.java b/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStoreConfiguration.java new file mode 100644 index 00000000..723e5ec4 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStoreConfiguration.java @@ -0,0 +1,48 @@ +package tools.refinery.store.map; + +public class VersionedMapStoreConfiguration { + + public VersionedMapStoreConfiguration() { + + } + public VersionedMapStoreConfiguration(boolean immutableWhenCommiting, boolean sharedNodeCacheInStore, + boolean sharedNodeCacheInStoreGroups) { + super(); + this.immutableWhenCommiting = immutableWhenCommiting; + this.sharedNodeCacheInStore = sharedNodeCacheInStore; + this.sharedNodeCacheInStoreGroups = sharedNodeCacheInStoreGroups; + } + + /** + * If true root is replaced with immutable node when committed. Frees up memory + * by releasing immutable nodes, but it may decrease performance by recreating + * immutable nodes upon changes (some evidence). + */ + private boolean immutableWhenCommiting = true; + public boolean isImmutableWhenCommiting() { + return immutableWhenCommiting; + } + + /** + * If true, all subnodes are cached within a {@link VersionedMapStore}. It + * decreases the memory requirements. It may increase performance by discovering + * existing immutable copy of a node (some evidence). Additional overhead may + * decrease performance (no example found). The option permits the efficient + * implementation of version deletion. + */ + private boolean sharedNodeCacheInStore = true; + public boolean isSharedNodeCacheInStore() { + return sharedNodeCacheInStore; + } + + /** + * If true, all subnodes are cached within a group of + * {@link VersionedMapStoreImpl#createSharedVersionedMapStores(int, ContinousHashProvider, Object, VersionedMapStoreConfiguration)}. + * If {@link VersionedMapStoreConfiguration#sharedNodeCacheInStore} is + * false, then it has currently no impact. + */ + private boolean sharedNodeCacheInStoreGroups = true; + public boolean isSharedNodeCacheInStoreGroups() { + return sharedNodeCacheInStoreGroups; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStoreImpl.java b/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStoreImpl.java new file mode 100644 index 00000000..a626a5e8 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/VersionedMapStoreImpl.java @@ -0,0 +1,135 @@ +package tools.refinery.store.map; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import tools.refinery.store.map.internal.ImmutableNode; +import tools.refinery.store.map.internal.MapDiffCursor; +import tools.refinery.store.map.internal.Node; +import tools.refinery.store.map.internal.VersionedMapImpl; + +public class VersionedMapStoreImpl implements VersionedMapStore { + // Configuration + private final boolean immutableWhenCommiting; + + // Static data + protected final ContinousHashProvider hashProvider; + protected final V defaultValue; + + // Dynamic data + protected final Map> states = new HashMap<>(); + protected final Map, ImmutableNode> nodeCache; + protected long nextID = 0; + + public VersionedMapStoreImpl(ContinousHashProvider hashProvider, V defaultValue, + VersionedMapStoreConfiguration config) { + this.immutableWhenCommiting = config.isImmutableWhenCommiting(); + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + if (config.isSharedNodeCacheInStore()) { + nodeCache = new HashMap<>(); + } else { + nodeCache = null; + } + } + + private VersionedMapStoreImpl(ContinousHashProvider hashProvider, V defaultValue, + Map, ImmutableNode> nodeCache, VersionedMapStoreConfiguration config) { + this.immutableWhenCommiting = config.isImmutableWhenCommiting(); + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + this.nodeCache = nodeCache; + } + + public VersionedMapStoreImpl(ContinousHashProvider hashProvider, V defaultValue) { + this(hashProvider, defaultValue, new VersionedMapStoreConfiguration()); + } + + public static List> createSharedVersionedMapStores(int amount, + ContinousHashProvider hashProvider, V defaultValue, + VersionedMapStoreConfiguration config) { + List> result = new ArrayList<>(amount); + if (config.isSharedNodeCacheInStoreGroups()) { + Map, ImmutableNode> nodeCache; + if (config.isSharedNodeCacheInStore()) { + nodeCache = new HashMap<>(); + } else { + nodeCache = null; + } + for (int i = 0; i < amount; i++) { + result.add(new VersionedMapStoreImpl<>(hashProvider, defaultValue, nodeCache, config)); + } + } else { + for (int i = 0; i < amount; i++) { + result.add(new VersionedMapStoreImpl<>(hashProvider, defaultValue, config)); + } + } + return result; + } + + public static List> createSharedVersionedMapStores(int amount, + ContinousHashProvider hashProvider, V defaultValue) { + return createSharedVersionedMapStores(amount, hashProvider, defaultValue, new VersionedMapStoreConfiguration()); + } + + @Override + public synchronized Set getStates() { + return new HashSet<>(states.keySet()); + } + + @Override + public VersionedMap createMap() { + return new VersionedMapImpl<>(this, hashProvider, defaultValue); + } + + @Override + public VersionedMap createMap(long state) { + ImmutableNode data = revert(state); + return new VersionedMapImpl<>(this, hashProvider, defaultValue, data); + } + + + public synchronized ImmutableNode revert(long state) { + if (states.containsKey(state)) { + return states.get(state); + } else { + ArrayList existingKeys = new ArrayList<>(states.keySet()); + Collections.sort(existingKeys); + throw new IllegalArgumentException("Store does not contain state " + state + "! Avaliable states: " + + Arrays.toString(existingKeys.toArray())); + } + } + + public synchronized long commit(Node data, VersionedMapImpl mapToUpdateRoot) { + ImmutableNode immutable; + if (data != null) { + immutable = data.toImmutable(this.nodeCache); + } else { + immutable = null; + } + + if (nextID == Long.MAX_VALUE) + throw new IllegalStateException("Map store run out of Id-s"); + long id = nextID++; + this.states.put(id, immutable); + if (this.immutableWhenCommiting) { + mapToUpdateRoot.setRoot(immutable); + } + return id; + } + + @Override + public DiffCursor getDiffCursor(long fromState, long toState) { + VersionedMap map1 = createMap(fromState); + VersionedMap map2 = createMap(toState); + Cursor cursor1 = map1.getAll(); + Cursor cursor2 = map2.getAll(); + return new MapDiffCursor<>(this.hashProvider, this.defaultValue, cursor1, cursor2); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/internal/HashClash.java b/subprojects/store/src/main/java/tools/refinery/store/map/internal/HashClash.java new file mode 100644 index 00000000..5402ed4a --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/internal/HashClash.java @@ -0,0 +1,18 @@ +package tools.refinery.store.map.internal; + +enum HashClash { + /** + * Not stuck. + */ + NONE, + + /** + * Clashed, next we should return the key of cursor 1. + */ + STUCK_CURSOR_1, + + /** + * Clashed, next we should return the key of cursor 2. + */ + STUCK_CURSOR_2 +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/internal/ImmutableNode.java b/subprojects/store/src/main/java/tools/refinery/store/map/internal/ImmutableNode.java new file mode 100644 index 00000000..f68734ab --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/internal/ImmutableNode.java @@ -0,0 +1,378 @@ +package tools.refinery.store.map.internal; + +import java.util.Arrays; +import java.util.Map; + +import tools.refinery.store.map.ContinousHashProvider; + +public class ImmutableNode extends Node { + /** + * Bitmap defining the stored key and values. + */ + final int dataMap; + /** + * Bitmap defining the positions of further nodes. + */ + final int nodeMap; + /** + * Stores Keys, Values, and subnodes. Structure: (K,V)*,NODE; NODES are stored + * backwards. + */ + final Object[] content; + + /** + * Hash code derived from immutable hash code + */ + final int precalculatedHash; + + private ImmutableNode(int dataMap, int nodeMap, Object[] content, int precalculatedHash) { + super(); + this.dataMap = dataMap; + this.nodeMap = nodeMap; + this.content = content; + this.precalculatedHash = precalculatedHash; + } + + /** + * Constructor that copies a mutable node to an immutable. + * + * @param node A mutable node. + * @param cache A cache of existing immutable nodes. It can be used to search + * and place reference immutable nodes. It can be null, if no cache + * available. + * @return an immutable version of the input node. + */ + static ImmutableNode constructImmutable(MutableNode node, + Map, ImmutableNode> cache) { + // 1. try to return from cache + if (cache != null) { + ImmutableNode cachedResult = cache.get(node); + if (cachedResult != null) { + // 1.1 Already cached, return from cache. + return cachedResult; + } + } + + // 2. otherwise construct a new ImmutableNode + int size = 0; + for (int i = 0; i < node.content.length; i++) { + if (node.content[i] != null) { + size++; + } + } + + int datas = 0; + int nodes = 0; + int resultDataMap = 0; + int resultNodeMap = 0; + final Object[] resultContent = new Object[size]; + int bitposition = 1; + for (int i = 0; i < FACTOR; i++) { + Object key = node.content[i * 2]; + if (key != null) { + resultDataMap |= bitposition; + resultContent[datas * 2] = key; + resultContent[datas * 2 + 1] = node.content[i * 2 + 1]; + datas++; + } else { + @SuppressWarnings("unchecked") + var subnode = (Node) node.content[i * 2 + 1]; + if (subnode != null) { + ImmutableNode immutableSubnode = subnode.toImmutable(cache); + resultNodeMap |= bitposition; + resultContent[size - 1 - nodes] = immutableSubnode; + nodes++; + } + } + bitposition <<= 1; + } + final int resultHash = node.hashCode(); + var newImmutable = new ImmutableNode(resultDataMap, resultNodeMap, resultContent, resultHash); + + // 3. save new immutable. + if (cache != null) { + cache.put(newImmutable, newImmutable); + } + return newImmutable; + } + + private int index(int bitmap, int bitpos) { + return Integer.bitCount(bitmap & (bitpos - 1)); + } + + @Override + public V getValue(K key, ContinousHashProvider hashProvider, V defaultValue, int hash, int depth) { + int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); + int bitposition = 1 << selectedHashFragment; + // If the key is stored as a data + if ((dataMap & bitposition) != 0) { + int keyIndex = 2 * index(dataMap, bitposition); + @SuppressWarnings("unchecked") + K keyCandidate = (K) content[keyIndex]; + if (keyCandidate.equals(key)) { + @SuppressWarnings("unchecked") + V value = (V) content[keyIndex + 1]; + return value; + } else { + return defaultValue; + } + } + // the key is stored as a node + else if ((nodeMap & bitposition) != 0) { + int keyIndex = content.length - 1 - index(nodeMap, bitposition); + @SuppressWarnings("unchecked") + var subNode = (ImmutableNode) content[keyIndex]; + int newDepth = depth + 1; + int newHash = newHash(hashProvider, key, hash, newDepth); + return subNode.getValue(key, hashProvider, defaultValue, newHash, newDepth); + } + // the key is not stored at all + else { + return defaultValue; + } + } + + @Override + public Node putValue(K key, V value, OldValueBox oldValue, ContinousHashProvider hashProvider, + V defaultValue, int hash, int depth) { + int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); + int bitposition = 1 << selectedHashFragment; + if ((dataMap & bitposition) != 0) { + int keyIndex = 2 * index(dataMap, bitposition); + @SuppressWarnings("unchecked") + K keyCandidate = (K) content[keyIndex]; + if (keyCandidate.equals(key)) { + if (value == defaultValue) { + // delete + MutableNode mutable = this.toMutable(); + return mutable.removeEntry(selectedHashFragment, oldValue); + } else if (value == content[keyIndex + 1]) { + // dont change + oldValue.setOldValue(value); + return this; + } else { + // update existing value + MutableNode mutable = this.toMutable(); + return mutable.updateValue(value, oldValue, selectedHashFragment); + } + } else { + if (value == defaultValue) { + // dont change + oldValue.setOldValue(defaultValue); + return this; + } else { + // add new key + value + MutableNode mutable = this.toMutable(); + return mutable.putValue(key, value, oldValue, hashProvider, defaultValue, hash, depth); + } + } + } else if ((nodeMap & bitposition) != 0) { + int keyIndex = content.length - 1 - index(nodeMap, bitposition); + @SuppressWarnings("unchecked") + var subNode = (ImmutableNode) content[keyIndex]; + int newDepth = depth + 1; + int newHash = newHash(hashProvider, key, hash, newDepth); + var newsubNode = subNode.putValue(key, value, oldValue, hashProvider, defaultValue, newHash, newDepth); + + if (subNode == newsubNode) { + // nothing changed + return this; + } else { + MutableNode mutable = toMutable(); + return mutable.updateWithSubNode(selectedHashFragment, newsubNode, value.equals(defaultValue)); + } + } else { + // add new key + value + MutableNode mutable = this.toMutable(); + return mutable.putValue(key, value, oldValue, hashProvider, defaultValue, hash, depth); + } + } + + @Override + public long getSize() { + int result = Integer.bitCount(this.dataMap); + for (int subnodeIndex = 0; subnodeIndex < Integer.bitCount(this.nodeMap); subnodeIndex++) { + @SuppressWarnings("unchecked") + var subnode = (ImmutableNode) this.content[this.content.length - 1 - subnodeIndex]; + result += subnode.getSize(); + } + return result; + } + + @Override + protected MutableNode toMutable() { + return new MutableNode<>(this); + } + + @Override + public ImmutableNode toImmutable(Map, ImmutableNode> cache) { + return this; + } + + @Override + protected MutableNode isMutable() { + return null; + } + + @SuppressWarnings("unchecked") + @Override + boolean moveToNext(MapCursor cursor) { + // 1. try to move to data + int datas = Integer.bitCount(this.dataMap); + if (cursor.dataIndex != MapCursor.INDEX_FINISH) { + int newDataIndex = cursor.dataIndex + 1; + if (newDataIndex < datas) { + cursor.dataIndex = newDataIndex; + cursor.key = (K) this.content[newDataIndex * 2]; + cursor.value = (V) this.content[newDataIndex * 2 + 1]; + return true; + } else { + cursor.dataIndex = MapCursor.INDEX_FINISH; + } + } + + // 2. look inside the subnodes + int nodes = Integer.bitCount(this.nodeMap); + int newNodeIndex = cursor.nodeIndexStack.peek() + 1; + if (newNodeIndex < nodes) { + // 2.1 found next subnode, move down to the subnode + Node subnode = (Node) this.content[this.content.length - 1 - newNodeIndex]; + cursor.dataIndex = MapCursor.INDEX_START; + cursor.nodeIndexStack.pop(); + cursor.nodeIndexStack.push(newNodeIndex); + cursor.nodeIndexStack.push(MapCursor.INDEX_START); + cursor.nodeStack.push(subnode); + return subnode.moveToNext(cursor); + } else { + // 3. no subnode found, move up + cursor.nodeStack.pop(); + cursor.nodeIndexStack.pop(); + if (!cursor.nodeStack.isEmpty()) { + Node supernode = cursor.nodeStack.peek(); + return supernode.moveToNext(cursor); + } else { + cursor.key = null; + cursor.value = null; + return false; + } + } + } + + @Override + public void prettyPrint(StringBuilder builder, int depth, int code) { + for (int i = 0; i < depth; i++) { + builder.append("\t"); + } + if (code >= 0) { + builder.append(code); + builder.append(":"); + } + builder.append("Immutable("); + boolean hadContent = false; + int dataMask = 1; + for (int i = 0; i < FACTOR; i++) { + if ((dataMask & dataMap) != 0) { + if (hadContent) { + builder.append(","); + } + builder.append(i); + builder.append(":["); + builder.append(content[2 * index(dataMap, dataMask)].toString()); + builder.append("]->["); + builder.append(content[2 * index(dataMap, dataMask) + 1].toString()); + builder.append("]"); + hadContent = true; + } + dataMask <<= 1; + } + builder.append(")"); + int nodeMask = 1; + for (int i = 0; i < FACTOR; i++) { + if ((nodeMask & nodeMap) != 0) { + @SuppressWarnings("unchecked") + Node subNode = (Node) content[content.length - 1 - index(nodeMap, nodeMask)]; + builder.append("\n"); + subNode.prettyPrint(builder, depth + 1, i); + } + nodeMask <<= 1; + } + } + + @Override + public void checkIntegrity(ContinousHashProvider hashProvider, V defaultValue, int depth) { + if (depth > 0) { + boolean orphaned = Integer.bitCount(dataMap) == 1 && nodeMap == 0; + if (orphaned) { + throw new IllegalStateException("Orphaned node! " + dataMap + ": " + content[0]); + } + } + // check the place of data + + // check subnodes + for (int i = 0; i < Integer.bitCount(nodeMap); i++) { + @SuppressWarnings("unchecked") + var subnode = (Node) this.content[this.content.length - 1 - i]; + if (!(subnode instanceof ImmutableNode)) { + throw new IllegalStateException("Immutable node contains mutable subnodes!"); + } else { + subnode.checkIntegrity(hashProvider, defaultValue, depth + 1); + } + } + } + + @Override + public int hashCode() { + return this.precalculatedHash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (obj instanceof ImmutableNode other) { + return precalculatedHash == other.precalculatedHash && dataMap == other.dataMap && nodeMap == other.nodeMap + && Arrays.deepEquals(content, other.content); + } else if (obj instanceof MutableNode mutableObj) { + return ImmutableNode.compareImmutableMutable(this, mutableObj); + } else { + return false; + } + } + + public static boolean compareImmutableMutable(ImmutableNode immutable, MutableNode mutable) { + int datas = 0; + int nodes = 0; + final int immutableLength = immutable.content.length; + for (int i = 0; i < FACTOR; i++) { + Object key = mutable.content[i * 2]; + // For each key candidate + if (key != null) { + // Check whether a new Key-Value pair can fit into the immutable container + if (datas * 2 + nodes + 2 <= immutableLength) { + if (!immutable.content[datas * 2].equals(key) + || !immutable.content[datas * 2 + 1].equals(mutable.content[i * 2 + 1])) { + return false; + } + } else + return false; + datas++; + } else { + var mutableSubnode = (Node) mutable.content[i * 2 + 1]; + if (mutableSubnode != null) { + if (datas * 2 + nodes + 1 <= immutableLength) { + Object immutableSubnode = immutable.content[immutableLength - 1 - nodes]; + if (!mutableSubnode.equals(immutableSubnode)) { + return false; + } + nodes++; + } else { + return false; + } + } + } + } + return true; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/internal/MapCursor.java b/subprojects/store/src/main/java/tools/refinery/store/map/internal/MapCursor.java new file mode 100644 index 00000000..b90f5b71 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/internal/MapCursor.java @@ -0,0 +1,131 @@ +package tools.refinery.store.map.internal; + +import java.util.ArrayDeque; +import java.util.ConcurrentModificationException; +import java.util.Iterator; +import java.util.List; + +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.VersionedMap; + +public class MapCursor implements Cursor { + // Constants + static final int INDEX_START = -1; + static final int INDEX_FINISH = -2; + + // Tree stack + ArrayDeque> nodeStack; + ArrayDeque nodeIndexStack; + int dataIndex; + + // Values + K key; + V value; + + // Hash code for checking concurrent modifications + final VersionedMap map; + final int creationHash; + + public MapCursor(Node root, VersionedMap map) { + // Initializing tree stack + super(); + this.nodeStack = new ArrayDeque<>(); + this.nodeIndexStack = new ArrayDeque<>(); + if(root != null) { + this.nodeStack.add(root); + this.nodeIndexStack.push(INDEX_START); + } + + this.dataIndex = INDEX_START; + + // Initializing cache + this.key = null; + this.value = null; + + // Initializing state + this.map=map; + this.creationHash = map.hashCode(); + } + + public K getKey() { + return key; + } + + public V getValue() { + return value; + } + + public boolean isTerminated() { + return this.nodeStack.isEmpty(); + } + + public boolean move() { + if(isDirty()) { + throw new ConcurrentModificationException(); + } + if(!isTerminated()) { + boolean result = this.nodeStack.peek().moveToNext(this); + if(this.nodeIndexStack.size() != this.nodeStack.size()) { + throw new IllegalArgumentException("Node stack is corrupted by illegal moves!"); + } + return result; + } + return false; + } + public boolean skipCurrentNode() { + nodeStack.pop(); + nodeIndexStack.pop(); + dataIndex = INDEX_FINISH; + return move(); + } + @Override + public boolean isDirty() { + return this.map.hashCode() != this.creationHash; + } + @Override + public List> getDependingMaps() { + return List.of(this.map); + } + + public static boolean sameSubnode(MapCursor cursor1, MapCursor cursor2) { + Node nodeOfCursor1 = cursor1.nodeStack.peek(); + Node nodeOfCursor2 = cursor2.nodeStack.peek(); + if(nodeOfCursor1 != null && nodeOfCursor2 != null) { + return nodeOfCursor1.equals(nodeOfCursor2); + } else { + return false; + } + } + + /** + * + * @param + * @param + * @param cursor1 + * @param cursor2 + * @return Positive number if cursor 1 is behind, negative number if cursor 2 is behind, and 0 if they are at the same position. + */ + public static int compare(MapCursor cursor1, MapCursor cursor2) { + // two cursors are equally deep + Iterator stack1 = cursor1.nodeIndexStack.descendingIterator(); + Iterator stack2 = cursor2.nodeIndexStack.descendingIterator(); + if(stack1.hasNext()) { + if(!stack2.hasNext()) { + // stack 2 has no more element, thus stack 1 is deeper + return 1; + } + int val1 = stack1.next(); + int val2 = stack2.next(); + if(val1 < val2) { + return -1; + } else if(val2 < val1) { + return 1; + } + } + if(stack2.hasNext()) { + // stack 2 has more element, thus stack 2 is deeper + return 1; + } + return Integer.compare(cursor1.dataIndex, cursor2.dataIndex); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/internal/MapDiffCursor.java b/subprojects/store/src/main/java/tools/refinery/store/map/internal/MapDiffCursor.java new file mode 100644 index 00000000..42333635 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/internal/MapDiffCursor.java @@ -0,0 +1,221 @@ +package tools.refinery.store.map.internal; + +import java.util.List; +import java.util.stream.Stream; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.map.VersionedMap; + +/** + * A cursor representing the difference between two states of a map. + * + * @author Oszkar Semerath + * + */ +public class MapDiffCursor implements DiffCursor, Cursor { + /** + * Default value representing missing elements. + */ + private V defaultValue; + private MapCursor cursor1; + private MapCursor cursor2; + private ContinousHashProvider hashProvider; + + // Values + private K key; + private V fromValue; + private V toValue; + + // State + /** + * Positive number if cursor 1 is behind, negative number if cursor 2 is behind, + * and 0 if they are at the same position. + */ + private int cursorRelation; + private HashClash hashClash = HashClash.NONE; + + public MapDiffCursor(ContinousHashProvider hashProvider, V defaultValue, Cursor cursor1, + Cursor cursor2) { + super(); + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + this.cursor1 = (MapCursor) cursor1; + this.cursor2 = (MapCursor) cursor2; + } + + @Override + public K getKey() { + return key; + } + + @Override + public V getFromValue() { + return fromValue; + } + + @Override + public V getToValue() { + return toValue; + } + + @Override + public V getValue() { + return getToValue(); + } + + public boolean isTerminated() { + return cursor1.isTerminated() && cursor2.isTerminated(); + } + + @Override + public boolean isDirty() { + return this.cursor1.isDirty() || this.cursor2.isDirty(); + } + + @Override + public List> getDependingMaps() { + return Stream.concat(cursor1.getDependingMaps().stream(), cursor2.getDependingMaps().stream()).toList(); + } + + protected void updateState() { + if (!isTerminated()) { + this.cursorRelation = MapCursor.compare(cursor1, cursor2); + if (cursorRelation > 0 || cursor2.isTerminated()) { + this.key = cursor1.getKey(); + this.fromValue = cursor1.getValue(); + this.toValue = defaultValue; + } else if (cursorRelation < 0 || cursor1.isTerminated()) { + this.key = cursor2.getKey(); + this.fromValue = defaultValue; + this.toValue = cursor1.getValue(); + } else { + // cursor1 = cursor2 + if (cursor1.getKey().equals(cursor2.getKey())) { + this.key = cursor1.getKey(); + this.fromValue = cursor1.getValue(); + this.toValue = defaultValue; + } else { + resolveHashClashWithFirstEntry(); + } + } + } + } + + protected void resolveHashClashWithFirstEntry() { + int compareResult = this.hashProvider.compare(cursor1.key, cursor2.key); + if (compareResult < 0) { + this.hashClash = HashClash.STUCK_CURSOR_2; + this.cursorRelation = 0; + this.key = cursor1.key; + this.fromValue = cursor1.value; + this.toValue = defaultValue; + } else if (compareResult > 0) { + this.hashClash = HashClash.STUCK_CURSOR_1; + this.cursorRelation = 0; + this.key = cursor2.key; + this.fromValue = defaultValue; + this.toValue = cursor2.value; + } else { + throw new IllegalArgumentException("Inconsistent compare result for diffcursor"); + } + } + + protected boolean isInHashClash() { + return this.hashClash != HashClash.NONE; + } + + protected void resolveHashClashWithSecondEntry() { + switch (this.hashClash) { + case STUCK_CURSOR_1: + this.hashClash = HashClash.NONE; + this.cursorRelation = 0; + this.key = cursor1.key; + this.fromValue = cursor1.value; + this.toValue = defaultValue; + break; + case STUCK_CURSOR_2: + this.hashClash = HashClash.NONE; + this.cursorRelation = 0; + this.key = cursor2.key; + this.fromValue = defaultValue; + this.toValue = cursor2.value; + break; + default: + throw new IllegalArgumentException("Inconsistent compare result for diffcursor"); + } + } + + protected boolean sameValues() { + if (this.fromValue == null) { + return this.toValue == null; + } else { + return this.fromValue.equals(this.toValue); + } + } + + protected boolean moveOne() { + if (isTerminated()) { + return false; + } + if (this.cursorRelation > 0 || cursor2.isTerminated()) { + return cursor1.move(); + } else if (this.cursorRelation < 0 || cursor1.isTerminated()) { + return cursor2.move(); + } else { + boolean moved1 = cursor1.move(); + boolean moved2 = cursor2.move(); + return moved1 && moved2; + } + } + + private boolean skipNode() { + if (isTerminated()) { + throw new IllegalStateException("DiffCursor tries to skip when terminated!"); + } + boolean update1 = cursor1.skipCurrentNode(); + boolean update2 = cursor2.skipCurrentNode(); + updateState(); + return update1 && update2; + } + + protected boolean moveToConsistentState() { + if (!isTerminated()) { + boolean changed; + boolean lastResult = true; + do { + changed = false; + if (MapCursor.sameSubnode(cursor1, cursor2)) { + lastResult = skipNode(); + changed = true; + } + if (sameValues()) { + lastResult = moveOne(); + changed = true; + } + updateState(); + } while (changed && !isTerminated()); + return lastResult; + } else { + return false; + } + } + + public boolean move() { + if (!isTerminated()) { + if (isInHashClash()) { + this.resolveHashClashWithSecondEntry(); + return true; + } else { + if (moveOne()) { + return moveToConsistentState(); + } else { + return false; + } + } + + } else + return false; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/internal/MutableNode.java b/subprojects/store/src/main/java/tools/refinery/store/map/internal/MutableNode.java new file mode 100644 index 00000000..54853010 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/internal/MutableNode.java @@ -0,0 +1,454 @@ +package tools.refinery.store.map.internal; + +import java.util.Arrays; +import java.util.Map; + +import tools.refinery.store.map.ContinousHashProvider; + +public class MutableNode extends Node { + int cachedHash; + protected Object[] content; + + protected MutableNode() { + this.content = new Object[2 * FACTOR]; + updateHash(); + } + + public static MutableNode initialize(K key, V value, ContinousHashProvider hashProvider, + V defaultValue) { + if (value == defaultValue) { + return null; + } else { + int hash = hashProvider.getHash(key, 0); + int fragment = hashFragment(hash, 0); + MutableNode res = new MutableNode<>(); + res.content[2 * fragment] = key; + res.content[2 * fragment + 1] = value; + res.updateHash(); + return res; + } + } + + /** + * Constructs a {@link MutableNode} as a copy of an {@link ImmutableNode} + * + * @param node + */ + protected MutableNode(ImmutableNode node) { + this.content = new Object[2 * FACTOR]; + int dataUsed = 0; + int nodeUsed = 0; + for (int i = 0; i < FACTOR; i++) { + int bitposition = 1 << i; + if ((node.dataMap & bitposition) != 0) { + content[2 * i] = node.content[dataUsed * 2]; + content[2 * i + 1] = node.content[dataUsed * 2 + 1]; + dataUsed++; + } else if ((node.nodeMap & bitposition) != 0) { + content[2 * i + 1] = node.content[node.content.length - 1 - nodeUsed]; + nodeUsed++; + } + } + this.cachedHash = node.hashCode(); + } + + @Override + public V getValue(K key, ContinousHashProvider hashProvider, V defaultValue, int hash, int depth) { + int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); + @SuppressWarnings("unchecked") + K keyCandidate = (K) this.content[2 * selectedHashFragment]; + if (keyCandidate != null) { + if (keyCandidate.equals(key)) { + @SuppressWarnings("unchecked") + V value = (V) this.content[2 * selectedHashFragment + 1]; + return value; + } else { + return defaultValue; + } + } else { + @SuppressWarnings("unchecked") + var nodeCandidate = (Node) content[2 * selectedHashFragment + 1]; + if (nodeCandidate != null) { + int newDepth = depth + 1; + int newHash = newHash(hashProvider, key, hash, newDepth); + return nodeCandidate.getValue(key, hashProvider, defaultValue, newHash, newDepth); + } else { + return defaultValue; + } + } + } + + @Override + public Node putValue(K key, V value, OldValueBox oldValueBox, ContinousHashProvider hashProvider, + V defaultValue, int hash, int depth) { + int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); + @SuppressWarnings("unchecked") + K keyCandidate = (K) content[2 * selectedHashFragment]; + if (keyCandidate != null) { + // If has key + if (keyCandidate.equals(key)) { + // The key is equals to an existing key -> update entry + if (value == defaultValue) { + return removeEntry(selectedHashFragment, oldValueBox); + } else { + return updateValue(value, oldValueBox, selectedHashFragment); + } + } else { + // The key is not equivalent to an existing key on the same hash bin + // -> split entry if it is necessary + if (value == defaultValue) { + // Value is default -> do not need to add new node + oldValueBox.setOldValue(defaultValue); + return this; + } else { + // Value is not default -> Split entry data to a new node + oldValueBox.setOldValue(defaultValue); + return moveDownAndSplit(hashProvider, key, value, keyCandidate, hash, depth, selectedHashFragment); + } + } + } else { + // If it does not have key, check for value + @SuppressWarnings("unchecked") + var nodeCandidate = (Node) content[2 * selectedHashFragment + 1]; + if (nodeCandidate != null) { + // If it has value, it is a subnode -> upate that + var newNode = nodeCandidate.putValue(key, value, oldValueBox, hashProvider, defaultValue, + newHash(hashProvider, key, hash, depth + 1), depth + 1); + return updateWithSubNode(selectedHashFragment, newNode, value.equals(defaultValue)); + } else { + // If it does not have value, put it in the empty place + if (value == defaultValue) { + // dont need to add new key-value pair + oldValueBox.setOldValue(defaultValue); + return this; + } else { + return addEntry(key, value, oldValueBox, selectedHashFragment, defaultValue); + } + + } + } + } + + private Node addEntry(K key, V value, OldValueBox oldValueBox, int selectedHashFragment, V defaultValue) { + content[2 * selectedHashFragment] = key; + oldValueBox.setOldValue(defaultValue); + content[2 * selectedHashFragment + 1] = value; + updateHash(); + return this; + } + + /** + * Updates an entry in a selected hash-fragment to a non-default value. + * + * @param value + * @param selectedHashFragment + * @return + */ + @SuppressWarnings("unchecked") + Node updateValue(V value, OldValueBox oldValue, int selectedHashFragment) { + oldValue.setOldValue((V) content[2 * selectedHashFragment + 1]); + content[2 * selectedHashFragment + 1] = value; + updateHash(); + return this; + } + + /** + * + * @param selectedHashFragment + * @param newNode + * @return + */ + Node updateWithSubNode(int selectedHashFragment, Node newNode, boolean deletionHappened) { + if (deletionHappened) { + if (newNode == null) { + // Check whether this node become empty + content[2 * selectedHashFragment + 1] = null; // i.e. the new node + if (hasContent()) { + updateHash(); + return this; + } else { + return null; + } + } else { + // check whether newNode is orphan + MutableNode immutableNewNode = newNode.isMutable(); + if (immutableNewNode != null) { + int orphaned = immutableNewNode.isOrphaned(); + if (orphaned >= 0) { + // orphan subnode data is replaced with data + content[2 * selectedHashFragment] = immutableNewNode.content[orphaned * 2]; + content[2 * selectedHashFragment + 1] = immutableNewNode.content[orphaned * 2 + 1]; + updateHash(); + return this; + } + } + } + } + // normal behaviour + content[2 * selectedHashFragment + 1] = newNode; + updateHash(); + return this; + + } + + private boolean hasContent() { + for (Object element : this.content) { + if (element != null) + return true; + } + return false; + } + + @Override + protected MutableNode isMutable() { + return this; + } + + protected int isOrphaned() { + int dataFound = -2; + for (int i = 0; i < FACTOR; i++) { + if (content[i * 2] != null) { + if (dataFound >= 0) { + return -1; + } else { + dataFound = i; + } + } else if (content[i * 2 + 1] != null) { + return -3; + } + } + return dataFound; + } + + @SuppressWarnings("unchecked") + private Node moveDownAndSplit(ContinousHashProvider hashProvider, K newKey, V newValue, + K previousKey, int hashOfNewKey, int depth, int selectedHashFragmentOfCurrentDepth) { + V previousValue = (V) content[2 * selectedHashFragmentOfCurrentDepth + 1]; + + MutableNode newSubNode = newNodeWithTwoEntries(hashProvider, previousKey, previousValue, + hashProvider.getHash(previousKey, hashDepth(depth)), newKey, newValue, hashOfNewKey, depth + 1); + + content[2 * selectedHashFragmentOfCurrentDepth] = null; + content[2 * selectedHashFragmentOfCurrentDepth + 1] = newSubNode; + updateHash(); + return this; + } + + // Pass everything as parameters for performance. + @SuppressWarnings("squid:S107") + private MutableNode newNodeWithTwoEntries(ContinousHashProvider hashProvider, K key1, V value1, + int oldHash1, K key2, V value2, int oldHash2, int newdepth) { + int newHash1 = newHash(hashProvider, key1, oldHash1, newdepth); + int newHash2 = newHash(hashProvider, key2, oldHash2, newdepth); + int newFragment1 = hashFragment(newHash1, shiftDepth(newdepth)); + int newFragment2 = hashFragment(newHash2, shiftDepth(newdepth)); + + MutableNode subNode = new MutableNode<>(); + if (newFragment1 != newFragment2) { + subNode.content[newFragment1 * 2] = key1; + subNode.content[newFragment1 * 2 + 1] = value1; + + subNode.content[newFragment2 * 2] = key2; + subNode.content[newFragment2 * 2 + 1] = value2; + } else { + MutableNode subSubNode = newNodeWithTwoEntries(hashProvider, key1, value1, newHash1, key2, value2, + newHash2, newdepth + 1); + subNode.content[newFragment1 * 2 + 1] = subSubNode; + } + subNode.updateHash(); + return subNode; + } + + @SuppressWarnings("unchecked") + Node removeEntry(int selectedHashFragment, OldValueBox oldValue) { + content[2 * selectedHashFragment] = null; + oldValue.setOldValue((V) content[2 * selectedHashFragment + 1]); + content[2 * selectedHashFragment + 1] = null; + if (hasContent()) { + updateHash(); + return this; + } else { + return null; + } + } + + @SuppressWarnings("unchecked") + @Override + public long getSize() { + int size = 0; + for (int i = 0; i < FACTOR; i++) { + if (content[i * 2] != null) { + size++; + } else { + Node nodeCandidate = (Node) content[i * 2 + 1]; + if (nodeCandidate != null) { + size += nodeCandidate.getSize(); + } + } + } + return size; + } + + @Override + protected MutableNode toMutable() { + return this; + } + + @Override + public ImmutableNode toImmutable(Map, ImmutableNode> cache) { + return ImmutableNode.constructImmutable(this, cache); + } + + @SuppressWarnings("unchecked") + @Override + boolean moveToNext(MapCursor cursor) { + // 1. try to move to data + if (cursor.dataIndex != MapCursor.INDEX_FINISH) { + for (int index = cursor.dataIndex + 1; index < FACTOR; index++) { + if (this.content[index * 2] != null) { + // 1.1 found next data + cursor.dataIndex = index; + cursor.key = (K) this.content[index * 2]; + cursor.value = (V) this.content[index * 2 + 1]; + return true; + } + } + cursor.dataIndex = MapCursor.INDEX_FINISH; + } + + // 2. look inside the subnodes + for (int index = cursor.nodeIndexStack.peek() + 1; index < FACTOR; index++) { + if (this.content[index * 2] == null && this.content[index * 2 + 1] != null) { + // 2.1 found next subnode, move down to the subnode + Node subnode = (Node) this.content[index * 2 + 1]; + + cursor.dataIndex = MapCursor.INDEX_START; + cursor.nodeIndexStack.pop(); + cursor.nodeIndexStack.push(index); + cursor.nodeIndexStack.push(MapCursor.INDEX_START); + cursor.nodeStack.push(subnode); + + return subnode.moveToNext(cursor); + } + } + // 3. no subnode found, move up + cursor.nodeStack.pop(); + cursor.nodeIndexStack.pop(); + if (!cursor.nodeStack.isEmpty()) { + Node supernode = cursor.nodeStack.peek(); + return supernode.moveToNext(cursor); + } else { + cursor.key = null; + cursor.value = null; + return false; + } + } + + @Override + public void prettyPrint(StringBuilder builder, int depth, int code) { + for (int i = 0; i < depth; i++) { + builder.append("\t"); + } + if (code >= 0) { + builder.append(code); + builder.append(":"); + } + builder.append("Mutable("); + // print content + boolean hadContent = false; + for (int i = 0; i < FACTOR; i++) { + if (content[2 * i] != null) { + if (hadContent) { + builder.append(","); + } + builder.append(i); + builder.append(":["); + builder.append(content[2 * i].toString()); + builder.append("]->["); + builder.append(content[2 * i + 1].toString()); + builder.append("]"); + hadContent = true; + } + } + builder.append(")"); + // print subnodes + for (int i = 0; i < FACTOR; i++) { + if (content[2 * i] == null && content[2 * i + 1] != null) { + @SuppressWarnings("unchecked") + Node subNode = (Node) content[2 * i + 1]; + builder.append("\n"); + subNode.prettyPrint(builder, depth + 1, i); + } + } + } + + @Override + public void checkIntegrity(ContinousHashProvider hashProvider, V defaultValue, int depth) { + // check for orphan nodes + if (depth > 0) { + int orphaned = isOrphaned(); + if (orphaned >= 0) { + throw new IllegalStateException("Orphaned node! " + orphaned + ": " + content[2 * orphaned]); + } + } + // check the place of data + for (int i = 0; i < FACTOR; i++) { + if (this.content[2 * i] != null) { + @SuppressWarnings("unchecked") + K key = (K) this.content[2 * i]; + @SuppressWarnings("unchecked") + V value = (V) this.content[2 * i + 1]; + + if (value == defaultValue) { + throw new IllegalStateException("Node contains default value!"); + } + int hashCode = hashProvider.getHash(key, hashDepth(depth)); + int shiftDepth = shiftDepth(depth); + int selectedHashFragment = hashFragment(hashCode, shiftDepth); + if (i != selectedHashFragment) { + throw new IllegalStateException("Key " + key + " with hash code " + hashCode + + " is in bad place! Fragment=" + selectedHashFragment + ", Place=" + i); + } + } + } + // check subnodes + for (int i = 0; i < FACTOR; i++) { + if (this.content[2 * i + 1] != null && this.content[2 * i] == null) { + @SuppressWarnings("unchecked") + var subNode = (Node) this.content[2 * i + 1]; + subNode.checkIntegrity(hashProvider, defaultValue, depth + 1); + } + } + // check the hash + int oldHash = this.cachedHash; + updateHash(); + int newHash = this.cachedHash; + if (oldHash != newHash) { + throw new IllegalStateException("Hash code was not up to date! (old=" + oldHash + ",new=" + newHash + ")"); + } + } + + protected void updateHash() { + this.cachedHash = Arrays.hashCode(content); + } + + @Override + public int hashCode() { + return this.cachedHash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (obj instanceof MutableNode mutableObj) { + return Arrays.deepEquals(this.content, mutableObj.content); + } else if (obj instanceof ImmutableNode immutableObj) { + return ImmutableNode.compareImmutableMutable(immutableObj, this); + } else { + return false; + } + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/internal/Node.java b/subprojects/store/src/main/java/tools/refinery/store/map/internal/Node.java new file mode 100644 index 00000000..234a4ff3 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/internal/Node.java @@ -0,0 +1,85 @@ +package tools.refinery.store.map.internal; + +import java.util.Map; + +import tools.refinery.store.map.ContinousHashProvider; + +public abstract class Node{ + public static final int BRANCHING_FACTOR_BITS = 5; + public static final int FACTOR = 1< hashProvider, V defaultValue, int hash, int depth); + public abstract Node putValue(K key, V value, OldValueBox old, ContinousHashProvider hashProvider, V defaultValue, int hash, int depth); + public abstract long getSize(); + + abstract MutableNode toMutable(); + public abstract ImmutableNode toImmutable( + Map,ImmutableNode> cache); + protected abstract MutableNode isMutable(); + /** + * Moves a {@link MapCursor} to its next position. + * @param cursor the cursor + * @return Whether there was a next value to move on. + */ + abstract boolean moveToNext(MapCursor cursor); + + ///////// FOR printing + public abstract void prettyPrint(StringBuilder builder, int depth, int code); + @Override + public String toString() { + StringBuilder stringBuilder = new StringBuilder(); + prettyPrint(stringBuilder, 0, -1); + return stringBuilder.toString(); + } + public void checkIntegrity(ContinousHashProvider hashProvider, V defaultValue, int depth) {} + +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/internal/OldValueBox.java b/subprojects/store/src/main/java/tools/refinery/store/map/internal/OldValueBox.java new file mode 100644 index 00000000..5534c703 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/internal/OldValueBox.java @@ -0,0 +1,19 @@ +package tools.refinery.store.map.internal; + +public class OldValueBox{ + V oldValue; + boolean isSet = false; + + public V getOldValue() { + if(!isSet) throw new IllegalStateException(); + isSet = false; + return oldValue; + } + + public void setOldValue(V ouldValue) { + if(isSet) throw new IllegalStateException(); + this.oldValue = ouldValue; + isSet = true; + } + +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/map/internal/VersionedMapImpl.java b/subprojects/store/src/main/java/tools/refinery/store/map/internal/VersionedMapImpl.java new file mode 100644 index 00000000..346fe596 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/map/internal/VersionedMapImpl.java @@ -0,0 +1,171 @@ +package tools.refinery.store.map.internal; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.map.VersionedMap; +import tools.refinery.store.map.VersionedMapStoreImpl; + +/** + * Not threadSafe in itself + * @author Oszkar Semerath + * + * @param + * @param + */ +public class VersionedMapImpl implements VersionedMap{ + protected final VersionedMapStoreImpl store; + + protected final ContinousHashProvider hashProvider; + protected final V defaultValue; + protected Node root; + + private OldValueBox oldValueBox = new OldValueBox<>(); + + public VersionedMapImpl( + VersionedMapStoreImpl store, + ContinousHashProvider hashProvider, + V defaultValue) + { + this.store = store; + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + this.root = null; + } + public VersionedMapImpl( + VersionedMapStoreImpl store, + ContinousHashProvider hashProvider, + V defaultValue, Node data) + { + this.store = store; + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + this.root = data; + } + + public V getDefaultValue() { + return defaultValue; + } + public ContinousHashProvider getHashProvider() { + return hashProvider; + } + @Override + public V put(K key, V value) { + if(root!=null) { + root = root.putValue(key, value, oldValueBox, hashProvider, defaultValue, hashProvider.getHash(key, 0), 0); + return oldValueBox.getOldValue(); + } else { + root = MutableNode.initialize(key, value, hashProvider, defaultValue); + return defaultValue; + } + } + + @Override + public void putAll(Cursor cursor) { + if(cursor.getDependingMaps().contains(this)) { + List keys = new LinkedList<>(); + List values = new LinkedList<>(); + while(cursor.move()) { + keys.add(cursor.getKey()); + values.add(cursor.getValue()); + } + Iterator keyIterator = keys.iterator(); + Iterator valueIterator = values.iterator(); + while(keyIterator.hasNext()) { + this.put(keyIterator.next(), valueIterator.next()); + } + } else { + while(cursor.move()) { + this.put(cursor.getKey(), cursor.getValue()); + } + } + } + + @Override + public V get(K key) { + if(root!=null) { + return root.getValue(key, hashProvider, defaultValue, hashProvider.getHash(key, 0), 0); + } else { + return defaultValue; + } + } + @Override + public long getSize() { + if(root == null) { + return 0; + } else { + return root.getSize(); + } + } + + @Override + public Cursor getAll() { + return new MapCursor<>(this.root,this); + } + @Override + public DiffCursor getDiffCursor(long toVersion) { + Cursor fromCursor = this.getAll(); + VersionedMap toMap = this.store.createMap(toVersion); + Cursor toCursor = toMap.getAll(); + return new MapDiffCursor<>(this.hashProvider,this.defaultValue, fromCursor, toCursor); + + } + + + @Override + public long commit() { + return this.store.commit(root,this); + } + public void setRoot(Node root) { + this.root = root; + } + + @Override + public void restore(long state) { + root = this.store.revert(state); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((root == null) ? 0 : root.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + VersionedMapImpl other = (VersionedMapImpl) obj; + if (root == null) { + if (other.root != null) + return false; + } else if (!root.equals(other.root)) + return false; + return true; + } + public void prettyPrint() { + StringBuilder s = new StringBuilder(); + if(this.root != null) { + this.root.prettyPrint(s, 0, -1); + System.out.println(s.toString()); + } else { + System.out.println("empty tree"); + } + } + public void checkIntegrity() { + if(this.root != null) { + this.root.checkIntegrity(hashProvider, defaultValue, 0); + } + } + +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/Model.java b/subprojects/store/src/main/java/tools/refinery/store/model/Model.java new file mode 100644 index 00000000..a42d711a --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/Model.java @@ -0,0 +1,20 @@ +package tools.refinery.store.model; + +import java.util.Set; + +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.Versioned; +import tools.refinery.store.model.representation.DataRepresentation; + +public interface Model extends Versioned{ + @SuppressWarnings("squid:S1452") + Set> getDataRepresentations(); + + V get(DataRepresentation representation, K key); + Cursor getAll(DataRepresentation representation); + V put(DataRepresentation representation, K key, V value); + void putAll(DataRepresentation representation, Cursor cursor); + long getSize(DataRepresentation representation); + + ModelDiffCursor getDiffCursor(long to); +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/ModelCursor.java b/subprojects/store/src/main/java/tools/refinery/store/model/ModelCursor.java new file mode 100644 index 00000000..a835cf69 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/ModelCursor.java @@ -0,0 +1,25 @@ +package tools.refinery.store.model; + +import java.util.Map; + +import tools.refinery.store.map.Cursor; +import tools.refinery.store.model.representation.DataRepresentation; + +public class ModelCursor { + final Map,Cursor> cursors; + + public ModelCursor(Map, Cursor> cursors) { + super(); + this.cursors = cursors; + } + + @SuppressWarnings("unchecked") + public Cursor getCursor(DataRepresentation representation) { + Cursor cursor = cursors.get(representation); + if(cursor != null) { + return (Cursor) cursor; + } else { + throw new IllegalArgumentException("ModelCursor does not contain cursor for representation "+representation); + } + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/ModelDiffCursor.java b/subprojects/store/src/main/java/tools/refinery/store/model/ModelDiffCursor.java new file mode 100644 index 00000000..91990fa6 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/ModelDiffCursor.java @@ -0,0 +1,26 @@ +package tools.refinery.store.model; + +import java.util.Map; + +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.model.representation.DataRepresentation; + +public class ModelDiffCursor { + final Map,DiffCursor> diffcursors; + + public ModelDiffCursor(Map, DiffCursor> diffcursors) { + super(); + this.diffcursors = diffcursors; + } + + @SuppressWarnings("unchecked") + public DiffCursor getCursor(DataRepresentation representation) { + Cursor cursor = diffcursors.get(representation); + if(cursor != null) { + return (DiffCursor) cursor; + } else { + throw new IllegalArgumentException("ModelCursor does not contain cursor for representation "+representation); + } + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/ModelStore.java b/subprojects/store/src/main/java/tools/refinery/store/model/ModelStore.java new file mode 100644 index 00000000..682a0e78 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/ModelStore.java @@ -0,0 +1,16 @@ +package tools.refinery.store.model; + +import java.util.Set; + +import tools.refinery.store.model.representation.DataRepresentation; + +public interface ModelStore { + @SuppressWarnings("squid:S1452") + Set> getDataRepresentations(); + + Model createModel(); + Model createModel(long state); + + Set getStates(); + ModelDiffCursor getDiffCursor(long from, long to); +} \ No newline at end of file diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/ModelStoreImpl.java b/subprojects/store/src/main/java/tools/refinery/store/model/ModelStoreImpl.java new file mode 100644 index 00000000..97406cbb --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/ModelStoreImpl.java @@ -0,0 +1,122 @@ +package tools.refinery.store.model; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.map.VersionedMap; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.model.internal.ModelImpl; +import tools.refinery.store.model.internal.SimilarRelationEquivalenceClass; +import tools.refinery.store.model.representation.AuxilaryData; +import tools.refinery.store.model.representation.DataRepresentation; +import tools.refinery.store.model.representation.Relation; + +import java.util.Set; + +public class ModelStoreImpl implements ModelStore { + + private final Map, VersionedMapStore> stores; + + public ModelStoreImpl(Set> dataRepresentations) { + stores = initStores(dataRepresentations); + } + + private Map, VersionedMapStore> initStores( + Set> dataRepresentations) { + Map, VersionedMapStore> result = new HashMap<>(); + + Map>> symbolRepresentationsPerHashPerArity = new HashMap<>(); + + for (DataRepresentation dataRepresentation : dataRepresentations) { + if (dataRepresentation instanceof Relation symbolRepresentation) { + addOrCreate(symbolRepresentationsPerHashPerArity, + new SimilarRelationEquivalenceClass(symbolRepresentation), symbolRepresentation); + } else if (dataRepresentation instanceof AuxilaryData) { + VersionedMapStoreImpl store = new VersionedMapStoreImpl<>(dataRepresentation.getHashProvider(), + dataRepresentation.getDefaultValue()); + result.put(dataRepresentation, store); + } else { + throw new UnsupportedOperationException( + "Model store does not have strategy to use " + dataRepresentation.getClass() + "!"); + } + } + for (List> symbolGroup : symbolRepresentationsPerHashPerArity.values()) { + initRepresentationGroup(result, symbolGroup); + } + + return result; + } + + private void initRepresentationGroup(Map, VersionedMapStore> result, + List> symbolGroup) { + final ContinousHashProvider hashProvider = symbolGroup.get(0).getHashProvider(); + final Object defaultValue = symbolGroup.get(0).getDefaultValue(); + + List> maps = VersionedMapStoreImpl + .createSharedVersionedMapStores(symbolGroup.size(), hashProvider, defaultValue); + + for (int i = 0; i < symbolGroup.size(); i++) { + result.put(symbolGroup.get(i), maps.get(i)); + } + } + + private static void addOrCreate(Map> map, K key, V value) { + List list; + if (map.containsKey(key)) { + list = map.get(key); + } else { + list = new LinkedList<>(); + map.put(key, list); + } + list.add(value); + } + + @Override + public Set> getDataRepresentations() { + return this.stores.keySet(); + } + + @Override + public ModelImpl createModel() { + Map, VersionedMap> maps = new HashMap<>(); + for (Entry, VersionedMapStore> entry : this.stores.entrySet()) { + maps.put(entry.getKey(), entry.getValue().createMap()); + } + return new ModelImpl(this, maps); + } + + @Override + public synchronized ModelImpl createModel(long state) { + Map, VersionedMap> maps = new HashMap<>(); + for (Entry, VersionedMapStore> entry : this.stores.entrySet()) { + maps.put(entry.getKey(), entry.getValue().createMap(state)); + } + return new ModelImpl(this, maps); + } + + @Override + public synchronized Set getStates() { + var iterator = stores.values().iterator(); + if (iterator.hasNext()) { + return Set.copyOf(iterator.next().getStates()); + } + return Set.of(0l); + } + + @Override + public synchronized ModelDiffCursor getDiffCursor(long from, long to) { + Map, DiffCursor> diffcursors = new HashMap<>(); + for (Entry, VersionedMapStore> entry : stores.entrySet()) { + DataRepresentation representation = entry.getKey(); + DiffCursor diffCursor = entry.getValue().getDiffCursor(from, to); + diffcursors.put(representation, diffCursor); + } + return new ModelDiffCursor(diffcursors); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/Tuple.java b/subprojects/store/src/main/java/tools/refinery/store/model/Tuple.java new file mode 100644 index 00000000..0aae3727 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/Tuple.java @@ -0,0 +1,148 @@ +package tools.refinery.store.model; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public abstract class Tuple { + private static final int CUSTOMTUPLESIZE = 2; + protected static final List tuple1Cash = new ArrayList<>(1024); + + public abstract int getSize(); + public abstract int get(int element); + public abstract int[] toArray(); + + @Override + public String toString() { + StringBuilder b = new StringBuilder(); + b.append("["); + for(int i = 0; i= tuple1Cash.size()) { + newlyCreated = new Tuple1(tuple1Cash.size()); + tuple1Cash.add(newlyCreated); + } + return newlyCreated; + } + } + + public static Tuple of(int... values) { + if(values.length == 0) { + return new Tuple0(); + } else if(values.length == 1) { + return of1(values[0]); + } else if(values.length == 2) { + return new Tuple2(values[0],values[1]); + } else return new TupleN(values); + } + + protected IllegalArgumentException doesNotContain(int element) { + return new IllegalArgumentException("Tuple does not contain element "+element); + } + + public static class Tuple0 extends Tuple{ + protected Tuple0() { } + @Override public int getSize() { return 0; } + @Override public int get(int element) { + throw doesNotContain(element); + } + @Override public int[] toArray() {return new int[]{};} + @Override public int hashCode() { return TupleHashProvider.singleton().getHash(this, 0); } + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + return true; + } + } + public static class Tuple1 extends Tuple{ + final int value0; + protected Tuple1(int value0) { this.value0 = value0; } + @Override public int getSize() { return 1; } + @Override public int get(int element) { + if(element == 0) return value0; + throw doesNotContain(element); + } + @Override public int[] toArray() {return new int[]{ value0 };} + @Override public int hashCode() { return TupleHashProvider.singleton().getHash(this, 0); } + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Tuple1 other = (Tuple1) obj; + return value0 == other.value0; + } + } + public static class Tuple2 extends Tuple{ + final int value0; + final int value1; + protected Tuple2(int value0, int value1) { this.value0 = value0; this.value1 = value1; } + @Override public int getSize() { return 2; } + @Override public int get(int element) { + if(element == 0) return value0; + else if(element == 1) return value1; + throw doesNotContain(element); + } + @Override public int[] toArray() {return new int[]{ value0,value1 };} + @Override public int hashCode() { return TupleHashProvider.singleton().getHash(this, 0); } + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Tuple2 other = (Tuple2) obj; + return value0 == other.value0 && value1 == other.value1; + } + } + public static class TupleN extends Tuple{ + final int[] values; + protected TupleN(int[] values) { + if(values.length { + protected static TupleHashProvider instance; + + public static TupleHashProvider singleton() { + if (instance == null) { + instance = new TupleHashProvider(); + } + return instance; + } + + protected static final int[] primes = new int[] { 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, + 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, + 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, + 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, + 463, 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, + 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, 739, + 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, + 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, 1019, 1021, + 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, + 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, + 1289, 1291, 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, 1381, 1399, 1409, 1423, 1427, 1429, + 1433, 1439, 1447, 1451, 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, 1523, 1531, 1543, 1549, + 1553, 1559, 1567, 1571, 1579, 1583, 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, 1663, 1667, + 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, 1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, + 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, 1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, + 1979, 1987, 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, 2063, 2069, 2081, 2083, 2087, 2089, + 2099, 2111, 2113, 2129, 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, 2221, 2237, 2239, 2243, + 2251, 2267, 2269, 2273, 2281, 2287, 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, 2371, 2377, + 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, 2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, + 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, + 2683, 2687, 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, 2749, 2753, 2767, 2777, 2789, 2791, + 2797, 2801, 2803, 2819, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, 3187, 3191, 3203, 3209, 3217, + 3221, 3229, 3251, 3253, 3257, 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, 3343, 3347, 3359, + 3361, 3371, 3373, 3389, 3391, 3407, 3413, 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, 3517, + 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, + 3643, 3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727, 3733, 3739, 3761, 3767, 3769, 3779, 3793, + 3797, 3803, 3821, 3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, 3911 }; + + protected static final long LARGESTPRIME30BITS = 1073741789; + + public TupleHashProvider() { + if (primes.length < MAX_PRACTICAL_DEPTH) { + throw new UnsupportedOperationException( + "Not enough prime numbers to support the practical depth of continuous hash!"); + } + } + + @Override + public int getHash(Tuple key, int index) { + if (index >= primes.length) { + throw new IllegalArgumentException("Not enough prime numbers to support index"); + } + long accumulator = 0; + final int prime = primes[index]; + for (int i = 0; i < key.getSize(); i++) { + accumulator = (prime * accumulator + key.get(i)) % LARGESTPRIME30BITS; + } + + return (int) accumulator; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/TupleHashProviderBitMagic.java b/subprojects/store/src/main/java/tools/refinery/store/model/TupleHashProviderBitMagic.java new file mode 100644 index 00000000..5b053229 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/TupleHashProviderBitMagic.java @@ -0,0 +1,28 @@ +package tools.refinery.store.model; + +import tools.refinery.store.map.ContinousHashProvider; + +public class TupleHashProviderBitMagic implements ContinousHashProvider { + + @Override + public int getHash(Tuple key, int index) { + if(key.getSize() == 1) { + return key.get(0); + } + + int result = 0; + final int startBitIndex = index*30; + final int finalBitIndex = startBitIndex+30; + final int arity = key.getSize(); + + for(int i = startBitIndex; i<=finalBitIndex; i++) { + final int selectedKey = key.get(i%arity); + final int selectedPosition = 1<<(i/arity); + if((selectedKey&selectedPosition) != 0) { + result |= 1<<(i%30); + } + } + + return result; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/internal/ModelImpl.java b/subprojects/store/src/main/java/tools/refinery/store/model/internal/ModelImpl.java new file mode 100644 index 00000000..2a5f2925 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/internal/ModelImpl.java @@ -0,0 +1,124 @@ +package tools.refinery.store.model.internal; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.map.VersionedMap; +import tools.refinery.store.map.internal.MapDiffCursor; +import tools.refinery.store.model.Model; +import tools.refinery.store.model.ModelDiffCursor; +import tools.refinery.store.model.ModelStore; +import tools.refinery.store.model.representation.DataRepresentation; + +public class ModelImpl implements Model { + private final ModelStore store; + private final Map, VersionedMap> maps; + + public ModelImpl(ModelStore store, Map, VersionedMap> maps) { + this.store = store; + this.maps = maps; + } + + @Override + public Set> getDataRepresentations() { + return maps.keySet(); + } + + @SuppressWarnings("unchecked") + private VersionedMap getMap(DataRepresentation representation) { + if (maps.containsKey(representation)) { + return (VersionedMap) maps.get(representation); + } else { + throw new IllegalArgumentException("Model does have representation " + representation); + } + } + + private VersionedMap getMapValidateKey(DataRepresentation representation, K key) { + if (representation.isValidKey(key)) { + return getMap(representation); + } else { + throw new IllegalArgumentException( + "Key is not valid for representation! (representation=" + representation + ", key=" + key + ");"); + } + } + + @Override + public V get(DataRepresentation representation, K key) { + return getMapValidateKey(representation, key).get(key); + } + + @Override + public Cursor getAll(DataRepresentation representation) { + return getMap(representation).getAll(); + } + + @Override + public V put(DataRepresentation representation, K key, V value) { + return getMapValidateKey(representation, key).put(key, value); + } + + @Override + public void putAll(DataRepresentation representation, Cursor cursor) { + getMap(representation).putAll(cursor); + } + + @Override + public long getSize(DataRepresentation representation) { + return getMap(representation).getSize(); + } + + @Override + public ModelDiffCursor getDiffCursor(long to) { + Model toModel = store.createModel(to); + Map, DiffCursor> diffCursors = new HashMap<>(); + for (DataRepresentation representation : this.maps.keySet()) { + MapDiffCursor diffCursor = constructDiffCursor(toModel, representation); + diffCursors.put(representation, diffCursor); + } + return new ModelDiffCursor(diffCursors); + } + + private MapDiffCursor constructDiffCursor(Model toModel, DataRepresentation representation) { + @SuppressWarnings("unchecked") + Cursor fromCursor = (Cursor) this.maps.get(representation).getAll(); + Cursor toCursor = toModel.getAll(representation); + + ContinousHashProvider hashProvider = representation.getHashProvider(); + V defaultValue = representation.getDefaultValue(); + return new MapDiffCursor<>(hashProvider, defaultValue, fromCursor, toCursor); + } + + @Override + public long commit() { + long version = 0; + boolean versionSet = false; + for (VersionedMap map : maps.values()) { + long newVersion = map.commit(); + if (versionSet) { + if (version != newVersion) { + throw new IllegalStateException( + "Maps in model have different versions! (" + version + " and" + newVersion + ")"); + } + } else { + version = newVersion; + versionSet = true; + } + } + return version; + } + + @Override + public void restore(long state) { + if(store.getStates().contains(state)) { + for (VersionedMap map : maps.values()) { + map.restore(state); + } + } else { + throw new IllegalArgumentException("Map does not contain state "+state+"!"); + } + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/internal/SimilarRelationEquivalenceClass.java b/subprojects/store/src/main/java/tools/refinery/store/model/internal/SimilarRelationEquivalenceClass.java new file mode 100644 index 00000000..9d1b1dd0 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/internal/SimilarRelationEquivalenceClass.java @@ -0,0 +1,33 @@ +package tools.refinery.store.model.internal; + +import java.util.Objects; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.representation.Relation; + +public class SimilarRelationEquivalenceClass { + final ContinousHashProvider hashProvider; + final Object defaultValue; + final int arity; + public SimilarRelationEquivalenceClass(Relation representation) { + this.hashProvider = representation.getHashProvider(); + this.defaultValue = representation.getDefaultValue(); + this.arity = representation.getArity(); + } + @Override + public int hashCode() { + return Objects.hash(arity, defaultValue, hashProvider); + } + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!(obj instanceof SimilarRelationEquivalenceClass)) + return false; + SimilarRelationEquivalenceClass other = (SimilarRelationEquivalenceClass) obj; + return arity == other.arity && Objects.equals(defaultValue, other.defaultValue) + && Objects.equals(hashProvider, other.hashProvider); + } + +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/representation/AuxilaryData.java b/subprojects/store/src/main/java/tools/refinery/store/model/representation/AuxilaryData.java new file mode 100644 index 00000000..ddd8a5f2 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/representation/AuxilaryData.java @@ -0,0 +1,22 @@ +package tools.refinery.store.model.representation; + +import tools.refinery.store.map.ContinousHashProvider; + +public class AuxilaryData extends DataRepresentation { + private final String name; + + public AuxilaryData(String name, ContinousHashProvider hashProvider, V defaultValue) { + super(hashProvider, defaultValue); + this.name = name; + } + + @Override + public String getName() { + return name; + } + + @Override + public boolean isValidKey(K key) { + return true; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/representation/DataRepresentation.java b/subprojects/store/src/main/java/tools/refinery/store/model/representation/DataRepresentation.java new file mode 100644 index 00000000..585e7b88 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/representation/DataRepresentation.java @@ -0,0 +1,24 @@ +package tools.refinery.store.model.representation; + +import tools.refinery.store.map.ContinousHashProvider; + +public abstract class DataRepresentation { + protected final ContinousHashProvider hashProvider; + protected final V defaultValue; + + protected DataRepresentation(ContinousHashProvider hashProvider, V defaultValue) { + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + } + + public abstract String getName(); + + public ContinousHashProvider getHashProvider() { + return hashProvider; + } + public abstract boolean isValidKey(K key); + + public V getDefaultValue() { + return defaultValue; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/representation/Relation.java b/subprojects/store/src/main/java/tools/refinery/store/model/representation/Relation.java new file mode 100644 index 00000000..fc2a3185 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/representation/Relation.java @@ -0,0 +1,31 @@ +package tools.refinery.store.model.representation; + +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.TupleHashProvider; + +public class Relation extends DataRepresentation { + private final String name; + private final int arity; + + public Relation(String name, int arity, D defaultValue) { + super(TupleHashProvider.singleton(), defaultValue); + this.name = name; + this.arity = arity; + } + + @Override + public String getName() { + return name; + } + + public int getArity() { + return arity; + } + + @Override + public boolean isValidKey(Tuple key) { + if(key == null) { + return false; + } else return key.getSize() == getArity(); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/model/representation/TruthValue.java b/subprojects/store/src/main/java/tools/refinery/store/model/representation/TruthValue.java new file mode 100644 index 00000000..610713f3 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/model/representation/TruthValue.java @@ -0,0 +1,51 @@ +package tools.refinery.store.model.representation; + +public enum TruthValue { + TRUE("true"), + + FALSE("false"), + + UNKNOWN("unknown"), + + ERROR("error"); + + private final String name; + + private TruthValue(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public static TruthValue toTruthValue(boolean value) { + return value ? TRUE : FALSE; + } + + public boolean isConsistent() { + return this != ERROR; + } + + public boolean isComplete() { + return this != UNKNOWN; + } + + public boolean must() { + return this == TRUE || this == ERROR; + } + + public boolean may() { + return this == TRUE || this == UNKNOWN; + } + + public TruthValue not() { + if (this == TRUE) { + return FALSE; + } else if (this == FALSE) { + return TRUE; + } else { + return this; + } + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/QueriableModel.java b/subprojects/store/src/main/java/tools/refinery/store/query/QueriableModel.java new file mode 100644 index 00000000..f669b3ed --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/QueriableModel.java @@ -0,0 +1,30 @@ +package tools.refinery.store.query; + +import java.util.Optional; +import java.util.Set; +import java.util.stream.Stream; + +import tools.refinery.store.model.Model; +import tools.refinery.store.query.building.DNFPredicate; + +public interface QueriableModel extends Model{ + Set getPredicates(); + + void flushChanges(); + + boolean hasResult(DNFPredicate predicate); + + boolean hasResult(DNFPredicate predicate, Object[] parameters); + + Optional oneResult(DNFPredicate predicate); + + Optional oneResult(DNFPredicate predicate, Object[] parameters); + + Stream allResults(DNFPredicate predicate); + + Stream allResults(DNFPredicate predicate, Object[] parameters); + + int countResults(DNFPredicate predicate); + + int countResults(DNFPredicate predicate, Object[] parameters); +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/QueriableModelStore.java b/subprojects/store/src/main/java/tools/refinery/store/query/QueriableModelStore.java new file mode 100644 index 00000000..3a5b51ff --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/QueriableModelStore.java @@ -0,0 +1,23 @@ +package tools.refinery.store.query; + +import java.util.Set; + +import tools.refinery.store.model.ModelDiffCursor; +import tools.refinery.store.model.ModelStore; +import tools.refinery.store.model.representation.DataRepresentation; +import tools.refinery.store.query.building.DNFPredicate; +import tools.refinery.store.query.view.RelationView; + +public interface QueriableModelStore extends ModelStore{ + @SuppressWarnings("squid:S1452") + Set> getDataRepresentations(); + @SuppressWarnings("squid:S1452") + Set> getViews(); + Set getPredicates(); + + QueriableModel createModel(); + QueriableModel createModel(long state); + + Set getStates(); + ModelDiffCursor getDiffCursor(long from, long to); +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/QueriableModelStoreImpl.java b/subprojects/store/src/main/java/tools/refinery/store/query/QueriableModelStoreImpl.java new file mode 100644 index 00000000..653783dd --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/QueriableModelStoreImpl.java @@ -0,0 +1,127 @@ +package tools.refinery.store.query; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.eclipse.viatra.query.runtime.api.GenericQuerySpecification; + +import tools.refinery.store.model.ModelDiffCursor; +import tools.refinery.store.model.ModelStore; +import tools.refinery.store.model.ModelStoreImpl; +import tools.refinery.store.model.representation.DataRepresentation; +import tools.refinery.store.query.building.DNFAnd; +import tools.refinery.store.query.building.DNFAtom; +import tools.refinery.store.query.building.DNFPredicate; +import tools.refinery.store.query.building.PredicateAtom; +import tools.refinery.store.query.building.RelationAtom; +import tools.refinery.store.query.internal.DNF2PQuery; +import tools.refinery.store.query.internal.QueriableModelImpl; +import tools.refinery.store.query.internal.RawPatternMatcher; +import tools.refinery.store.query.internal.DNF2PQuery.SimplePQuery; +import tools.refinery.store.query.view.RelationView; + +public class QueriableModelStoreImpl implements QueriableModelStore { + protected final ModelStore store; + protected final Set> relationViews; + protected final Map> predicates; + + public QueriableModelStoreImpl(Set> dataRepresentations, + Set> relationViews, Set predicates) { + this.store = new ModelStoreImpl(dataRepresentations); + validateViews(dataRepresentations, relationViews); + this.relationViews = Collections.unmodifiableSet(relationViews); + validatePredicates(relationViews, predicates); + this.predicates = initPredicates(predicates); + } + + private void validateViews(Set> dataRepresentations, Set> relationViews) { + for (RelationView relationView : relationViews) { + // TODO: make it work for non-relation representation? + if (!dataRepresentations.contains(relationView.getRepresentation())) { + throw new IllegalArgumentException( + DataRepresentation.class.getSimpleName() + " " + relationView.getStringID() + " added to " + + QueriableModelStore.class.getSimpleName() + " without a referred representation."); + } + } + } + + private void validatePredicates(Set> relationViews, Set predicates) { + for (DNFPredicate dnfPredicate : predicates) { + for (DNFAnd clause : dnfPredicate.getClauses()) { + for (DNFAtom atom : clause.getConstraints()) { + if (atom instanceof RelationAtom relationAtom) { + validateRelationAtom(relationViews, dnfPredicate, relationAtom); + } else if (atom instanceof PredicateAtom predicateAtom) { + validatePredicateAtom(predicates, dnfPredicate, predicateAtom); + } + } + } + } + } + + private void validateRelationAtom(Set> relationViews, DNFPredicate dnfPredicate, + RelationAtom relationAtom) { + if (!relationViews.contains(relationAtom.getView())) { + throw new IllegalArgumentException(DNFPredicate.class.getSimpleName() + " " + + dnfPredicate.getUniqueName() + " contains reference to a view of " + + relationAtom.getView().getRepresentation().getName() + + " that is not in the model."); + } + } + private void validatePredicateAtom(Set predicates, DNFPredicate dnfPredicate, + PredicateAtom predicateAtom) { + if (!predicates.contains(predicateAtom.getReferred())) { + throw new IllegalArgumentException( + DNFPredicate.class.getSimpleName() + " " + dnfPredicate.getUniqueName() + + " contains reference to a predicate " + + predicateAtom.getReferred().getName() + + "that is not in the model."); + } + } + + private Map> initPredicates(Set predicates) { + Map> result = new HashMap<>(); + Map dnf2PQueryMap = new HashMap<>(); + for (DNFPredicate dnfPredicate : predicates) { + GenericQuerySpecification query = DNF2PQuery.translate(dnfPredicate,dnf2PQueryMap).build(); + result.put(dnfPredicate, query); + } + + return result; + } + + @Override + public Set> getDataRepresentations() { + return store.getDataRepresentations(); + } + @Override + public Set> getViews() { + return this.relationViews; + } + @Override + public Set getPredicates() { + return predicates.keySet(); + } + + @Override + public QueriableModel createModel() { + return new QueriableModelImpl(this, this.store.createModel(), predicates); + } + + @Override + public QueriableModel createModel(long state) { + return new QueriableModelImpl(this, this.store.createModel(state), predicates); + } + + @Override + public synchronized Set getStates() { + return this.store.getStates(); + } + + @Override + public synchronized ModelDiffCursor getDiffCursor(long from, long to) { + return this.store.getDiffCursor(from, to); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/building/DNFAnd.java b/subprojects/store/src/main/java/tools/refinery/store/query/building/DNFAnd.java new file mode 100644 index 00000000..48dabce2 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/building/DNFAnd.java @@ -0,0 +1,37 @@ +package tools.refinery.store.query.building; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class DNFAnd { + private Set existentiallyQuantified; + private List constraints; + public DNFAnd(Set quantifiedVariables, List constraints) { + super(); + this.existentiallyQuantified = quantifiedVariables; + this.constraints = constraints; + } + public Set getExistentiallyQuantified() { + return existentiallyQuantified; + } + public List getConstraints() { + return constraints; + } + void unifyVariables(Map uniqueVariableMap) { + Map uniqueVariableMapForClause = new HashMap<>(uniqueVariableMap); + for(DNFAtom atom : constraints) { + atom.unifyVariables(uniqueVariableMapForClause); + } + } + void collectQuantifiedVariables(Set parameters) { + Set result = new HashSet<>(); + for(DNFAtom constraint : constraints) { + constraint.collectAllVariables(result); + } + result.removeAll(parameters); + existentiallyQuantified = result; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/building/DNFAtom.java b/subprojects/store/src/main/java/tools/refinery/store/query/building/DNFAtom.java new file mode 100644 index 00000000..b047d7c8 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/building/DNFAtom.java @@ -0,0 +1,33 @@ +package tools.refinery.store.query.building; + +import java.util.Collection; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +public interface DNFAtom { + void unifyVariables(Map variables); + static Variable unifyVariables(Map unifiedVariables, Variable variable) { + if(variable != null) { + if(variable.isNamed() && unifiedVariables.containsKey(variable.getName())) { + return unifiedVariables.get(variable.getName()); + } + return variable; + } else { + return null; + } + } + void collectAllVariables(Set variables); + static void addToCollection(Set variables, Variable variable) { + if(variable != null) { + variables.add(variable); + } + } + static void addToCollection(Set variables, Collection variableCollection) { + Iterator iterator = variableCollection.iterator(); + while(iterator.hasNext()) { + Variable variable = iterator.next(); + addToCollection(variables, variable); + } + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/building/DNFPredicate.java b/subprojects/store/src/main/java/tools/refinery/store/query/building/DNFPredicate.java new file mode 100644 index 00000000..f0c9ac42 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/building/DNFPredicate.java @@ -0,0 +1,72 @@ +package tools.refinery.store.query.building; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +public class DNFPredicate { + private final String name; + private final String uniqueName; + private final List parameters; + private final List clauses; + + public DNFPredicate(String name, List parameters, List clauses) { + this.name = name; + this.uniqueName = generateUniqueName(name,"predicate"); + this.parameters = parameters; + this.clauses = clauses; + + postProcess(); + } + + public static String generateUniqueName(String originalName, String defaultPrefix) { + UUID uuid = UUID.randomUUID(); + String uniqueString = uuid.toString().replace('-', '_'); + if(originalName == null) { + return defaultPrefix+uniqueString; + } else { + return originalName+uniqueString; + } + } + + public String getName() { + return name; + } + public String getUniqueName() { + return uniqueName; + } + public List getVariables() { + return parameters; + } + public List getClauses() { + return clauses; + } + + public void unifyVariables() { + Map uniqueVariableMap = new HashMap<>(); + for(Variable parameter : this.parameters) { + if(parameter.isNamed()) { + String parameterName = parameter.getName(); + if(uniqueVariableMap.containsKey(parameterName)) { + throw new IllegalArgumentException("Multiple parameters has the name "+parameterName); + } else { + uniqueVariableMap.put(parameterName, parameter); + } + } + } + for(DNFAnd clause : this.clauses) { + clause.unifyVariables(uniqueVariableMap); + } + } + public void collectQuantifiedVariables() { + for(DNFAnd clause : this.clauses) { + clause.collectQuantifiedVariables(new HashSet<>(parameters)); + } + } + public void postProcess() { + unifyVariables(); + collectQuantifiedVariables(); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/building/EquivalenceAtom.java b/subprojects/store/src/main/java/tools/refinery/store/query/building/EquivalenceAtom.java new file mode 100644 index 00000000..fede2518 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/building/EquivalenceAtom.java @@ -0,0 +1,44 @@ +package tools.refinery.store.query.building; + +import java.util.Map; +import java.util.Set; + +public class EquivalenceAtom implements DNFAtom{ + private boolean positive; + private Variable left; + private Variable right; + public EquivalenceAtom(boolean positive, Variable left, Variable right) { + this.positive = positive; + this.left = left; + this.right = right; + } + public boolean isPositive() { + return positive; + } + public void setPositive(boolean positive) { + this.positive = positive; + } + public Variable getLeft() { + return left; + } + public void setLeft(Variable left) { + this.left = left; + } + public Variable getRight() { + return right; + } + public void setRight(Variable right) { + this.right = right; + } + + @Override + public void unifyVariables(Map variables) { + this.left = DNFAtom.unifyVariables(variables,left); + this.right = DNFAtom.unifyVariables(variables,right); + } + @Override + public void collectAllVariables(Set variables) { + DNFAtom.addToCollection(variables, left); + DNFAtom.addToCollection(variables, right); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/building/PredicateAtom.java b/subprojects/store/src/main/java/tools/refinery/store/query/building/PredicateAtom.java new file mode 100644 index 00000000..42394922 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/building/PredicateAtom.java @@ -0,0 +1,66 @@ +package tools.refinery.store.query.building; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class PredicateAtom implements DNFAtom { + private DNFPredicate referred; + private List substitution; + private boolean positive; + private boolean transitive; + + public PredicateAtom(boolean positive, boolean transitive, DNFPredicate referred, List substitution) { + this.positive = positive; + this.referred = referred; + this.substitution = substitution; + this.transitive = transitive; + } + + public DNFPredicate getReferred() { + return referred; + } + + public void setReferred(DNFPredicate referred) { + this.referred = referred; + } + + public List getSubstitution() { + return substitution; + } + + public void setSubstitution(List substitution) { + this.substitution = substitution; + } + + public boolean isPositive() { + return positive; + } + + public void setPositive(boolean positive) { + this.positive = positive; + } + + public boolean isTransitive() { + return transitive; + } + + public void setTransitive(boolean transitive) { + this.transitive = transitive; + } + + @Override + public void unifyVariables(Map variables) { + for (int i = 0; i < this.substitution.size(); i++) { + final Object term = this.substitution.get(i); + if (term instanceof Variable variableReference) { + this.substitution.set(i, DNFAtom.unifyVariables(variables, variableReference)); + } + } + } + + @Override + public void collectAllVariables(Set variables) { + DNFAtom.addToCollection(variables, substitution); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/building/RelationAtom.java b/subprojects/store/src/main/java/tools/refinery/store/query/building/RelationAtom.java new file mode 100644 index 00000000..1238f1d7 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/building/RelationAtom.java @@ -0,0 +1,49 @@ +package tools.refinery.store.query.building; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import tools.refinery.store.query.view.FilteredRelationView; +import tools.refinery.store.query.view.RelationView; + +public class RelationAtom implements DNFAtom { + RelationView view; + List substitution; + + public RelationAtom(RelationView view, List substitution) { + this.view = view; + this.substitution = substitution; + } + + public RelationView getView() { + return view; + } + + public void setView(FilteredRelationView view) { + this.view = view; + } + + public List getSubstitution() { + return substitution; + } + + public void setSubstitution(List substitution) { + this.substitution = substitution; + } + + @Override + public void unifyVariables(Map variables) { + for (int i = 0; i < this.substitution.size(); i++) { + final Object term = this.substitution.get(i); + if (term instanceof Variable variableReference) { + this.substitution.set(i, DNFAtom.unifyVariables(variables, variableReference)); + } + } + } + + @Override + public void collectAllVariables(Set variables) { + DNFAtom.addToCollection(variables, substitution); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/building/Variable.java b/subprojects/store/src/main/java/tools/refinery/store/query/building/Variable.java new file mode 100644 index 00000000..9ea7ce83 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/building/Variable.java @@ -0,0 +1,22 @@ +package tools.refinery.store.query.building; + +public class Variable { + private final String name; + private final String uniqueName; + + public Variable(String name) { + super(); + this.name = name; + this.uniqueName = DNFPredicate.generateUniqueName(name, "variable"); + + } + public String getName() { + return name; + } + public String getUniqueName() { + return uniqueName; + } + public boolean isNamed() { + return name != null; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/DNF2PQuery.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/DNF2PQuery.java new file mode 100644 index 00000000..bcc03fb4 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/DNF2PQuery.java @@ -0,0 +1,189 @@ +package tools.refinery.store.query.internal; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.InputMismatchException; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.eclipse.viatra.query.runtime.api.GenericQuerySpecification; +import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine; +import org.eclipse.viatra.query.runtime.api.scope.QueryScope; +import org.eclipse.viatra.query.runtime.matchers.backend.QueryEvaluationHint; +import org.eclipse.viatra.query.runtime.matchers.psystem.PBody; +import org.eclipse.viatra.query.runtime.matchers.psystem.PVariable; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.Equality; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.ExportedParameter; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.Inequality; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.NegativePatternCall; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.BinaryTransitiveClosure; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.PositivePatternCall; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.TypeConstraint; +import org.eclipse.viatra.query.runtime.matchers.psystem.queries.BasePQuery; +import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PParameter; +import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PVisibility; +import org.eclipse.viatra.query.runtime.matchers.tuple.Tuples; + +import tools.refinery.store.query.building.DNFAnd; +import tools.refinery.store.query.building.DNFAtom; +import tools.refinery.store.query.building.DNFPredicate; +import tools.refinery.store.query.building.EquivalenceAtom; +import tools.refinery.store.query.building.PredicateAtom; +import tools.refinery.store.query.building.RelationAtom; +import tools.refinery.store.query.building.Variable; + +public class DNF2PQuery { + + public static SimplePQuery translate(DNFPredicate predicate, Map dnf2PQueryMap) { + SimplePQuery query = dnf2PQueryMap.get(predicate); + if (query != null) { + return query; + } + query = new DNF2PQuery().new SimplePQuery(predicate.getName()); + Map parameters = new HashMap<>(); + + predicate.getVariables().forEach(variable -> parameters.put(variable, new PParameter(variable.getName()))); + List parameterList = new ArrayList<>(); + for(var param : predicate.getVariables()) { + parameterList.add(parameters.get(param)); + } + query.setParameter(parameterList); + for (DNFAnd clause : predicate.getClauses()) { + PBody body = new PBody(query); + List symbolicParameters = new ArrayList<>(); + for(var param : predicate.getVariables()) { + PVariable pVar = body.getOrCreateVariableByName(param.getName()); + symbolicParameters.add(new ExportedParameter(body, pVar, parameters.get(param))); + } + body.setSymbolicParameters(symbolicParameters); + query.addBody(body); + for (DNFAtom constraint : clause.getConstraints()) { + translateDNFAtom(constraint, body, dnf2PQueryMap); + } + } + dnf2PQueryMap.put(predicate, query); + return query; + } + + private static void translateDNFAtom(DNFAtom constraint, PBody body, Map dnf2PQueryMap) { + if (constraint instanceof EquivalenceAtom equivalence) { + translateEquivalenceAtom(equivalence, body); + } + if (constraint instanceof RelationAtom relation) { + translateRelationAtom(relation, body); + } + if (constraint instanceof PredicateAtom predicate) { + translatePredicateAtom(predicate, body, dnf2PQueryMap); + } + } + + private static void translateEquivalenceAtom(EquivalenceAtom equivalence, PBody body) { + PVariable varSource = body.getOrCreateVariableByName(equivalence.getLeft().getName()); + PVariable varTarget = body.getOrCreateVariableByName(equivalence.getRight().getName()); + if (equivalence.isPositive()) + new Equality(body, varSource, varTarget); + else + new Inequality(body, varSource, varTarget); + } + + private static void translateRelationAtom(RelationAtom relation, PBody body) { + if (relation.getSubstitution().size() != relation.getView().getArity()) { + throw new IllegalArgumentException("Arity (" + relation.getView().getArity() + + ") does not match parameter numbers (" + relation.getSubstitution().size() + ")"); + } + Object[] variables = new Object[relation.getSubstitution().size()]; + for (int i = 0; i < relation.getSubstitution().size(); i++) { + variables[i] = body.getOrCreateVariableByName(relation.getSubstitution().get(i).getName()); + } + new TypeConstraint(body, Tuples.flatTupleOf(variables), relation.getView()); + } + + private static void translatePredicateAtom(PredicateAtom predicate, PBody body, Map dnf2PQueryMap) { + Object[] variables = new Object[predicate.getSubstitution().size()]; + for (int i = 0; i < predicate.getSubstitution().size(); i++) { + variables[i] = body.getOrCreateVariableByName(predicate.getSubstitution().get(i).getName()); + } + if (predicate.isPositive()) { + if (predicate.isTransitive()) { + if (predicate.getSubstitution().size() != 2) { + throw new IllegalArgumentException("Transitive Predicate Atoms must be binary."); + } + new BinaryTransitiveClosure(body, Tuples.flatTupleOf(variables), + DNF2PQuery.translate(predicate.getReferred(), dnf2PQueryMap)); + } else { + new PositivePatternCall(body, Tuples.flatTupleOf(variables), + DNF2PQuery.translate(predicate.getReferred(), dnf2PQueryMap)); + } + } else { + if (predicate.isTransitive()) { + throw new InputMismatchException("Transitive Predicate Atoms cannot be negative."); + } else { + new NegativePatternCall(body, Tuples.flatTupleOf(variables), + DNF2PQuery.translate(predicate.getReferred(), dnf2PQueryMap)); + } + } + } + + public class SimplePQuery extends BasePQuery { + + private String fullyQualifiedName; + private List parameters; + private LinkedHashSet bodies = new LinkedHashSet<>(); + + public SimplePQuery(String name) { + super(PVisibility.PUBLIC); + fullyQualifiedName = name; + } + + @Override + public String getFullyQualifiedName() { + return fullyQualifiedName; + } + + public void setParameter(List parameters) { + this.parameters = parameters; + } + + @Override + public List getParameters() { + return parameters; + } + + public void addBody(PBody body) { + bodies.add(body); + } + + @Override + protected Set doGetContainedBodies() { + setEvaluationHints(new QueryEvaluationHint(null, QueryEvaluationHint.BackendRequirement.UNSPECIFIED)); + return bodies; + } + + public GenericQuerySpecification build() { + return new GenericQuerySpecification(this) { + + @Override + public Class getPreferredScopeClass() { + return RelationalScope.class; + } + + @Override + protected RawPatternMatcher instantiate(ViatraQueryEngine engine) { + RawPatternMatcher matcher = engine.getExistingMatcher(this); + if (matcher == null) { + matcher = engine.getMatcher(this); + } + return matcher; + } + + @Override + public RawPatternMatcher instantiate() { + return new RawPatternMatcher(this); + } + + }; + } + } +} \ No newline at end of file diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/DummyBaseIndexer.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/DummyBaseIndexer.java new file mode 100644 index 00000000..49637071 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/DummyBaseIndexer.java @@ -0,0 +1,59 @@ +package tools.refinery.store.query.internal; + +import java.lang.reflect.InvocationTargetException; +import java.util.concurrent.Callable; + +import org.eclipse.viatra.query.runtime.api.scope.IBaseIndex; +import org.eclipse.viatra.query.runtime.api.scope.IIndexingErrorListener; +import org.eclipse.viatra.query.runtime.api.scope.IInstanceObserver; +import org.eclipse.viatra.query.runtime.api.scope.ViatraBaseIndexChangeListener; + +/** + * copied from org.eclipse.viatra.query.runtime.tabular.TabularEngineContext; + */ +public class DummyBaseIndexer implements IBaseIndex{ + + @Override + public V coalesceTraversals(Callable callable) throws InvocationTargetException { + try { + return callable.call(); + } catch (Exception e) { + throw new InvocationTargetException(e); + } + } + + @Override + public void addBaseIndexChangeListener(ViatraBaseIndexChangeListener listener) { + // no notification support + } + + @Override + public void removeBaseIndexChangeListener(ViatraBaseIndexChangeListener listener) { + // no notification support + } + + @Override + public void resampleDerivedFeatures() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean addIndexingErrorListener(IIndexingErrorListener listener) { + return true; + } + + @Override + public boolean removeIndexingErrorListener(IIndexingErrorListener listener) { + return true; + } + + @Override + public boolean addInstanceObserver(IInstanceObserver observer, Object observedObject) { + return true; + } + + @Override + public boolean removeInstanceObserver(IInstanceObserver observer, Object observedObject) { + return true; + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/ModelUpdateListener.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/ModelUpdateListener.java new file mode 100644 index 00000000..aa80985f --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/ModelUpdateListener.java @@ -0,0 +1,103 @@ +package tools.refinery.store.query.internal; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContextListener; +import org.eclipse.viatra.query.runtime.matchers.tuple.ITuple; + +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.representation.Relation; +import tools.refinery.store.query.view.RelationView; + +public class ModelUpdateListener { + /** + * Collections of Relations and their Views. + */ + private final Map, Set>> relation2View; + /** + * Collection of Views and their buffers. + */ + private final Map, Set>> view2Buffers; + + public ModelUpdateListener(Set> relationViews) { + this.relation2View = new HashMap<>(); + this.view2Buffers = new HashMap<>(); + + for (RelationView relationView : relationViews) { + registerView(relationView); + } + } + + private void registerView(RelationView view) { + Relation relation = view.getRepresentation(); + + // 1. register views to relations, if necessary + var views = relation2View.computeIfAbsent(relation, x->new HashSet<>()); + views.add(view); + + // 2. register notifier map to views, if necessary + view2Buffers.computeIfAbsent(view, x->new HashSet<>()); + } + + boolean containsRelationalView(RelationView relationalKey) { + return view2Buffers.containsKey(relationalKey); + } + + void addListener(RelationView relationView, ITuple seed, IQueryRuntimeContextListener listener) { + if (view2Buffers.containsKey(relationView)) { + ViewUpdateTranslator updateListener = new ViewUpdateTranslator<>(relationView, seed, listener); + ViewUpdateBuffer updateBuffer = new ViewUpdateBuffer<>(updateListener); + view2Buffers.get(relationView).add(updateBuffer); + } else + throw new IllegalArgumentException(); + } + + void removeListener(RelationView relationView, ITuple seed, IQueryRuntimeContextListener listener) { + if (view2Buffers.containsKey(relationView)) { + Set> buffers = this.view2Buffers.get(relationView); + for(var buffer : buffers) { + if(buffer.getUpdateListener().key == seed && buffer.getUpdateListener().listener == listener) { + // remove buffer and terminate immediately, or it will break iterator. + buffers.remove(buffer); + return; + } + } + } else + throw new IllegalArgumentException(); + } + + public void addUpdate(Relation relation, Tuple key, D oldValue, D newValue) { + var views = this.relation2View.get(relation); + if (views != null) { + for (var view : views) { + var buffers = this.view2Buffers.get(view); + for (var buffer : buffers) { + @SuppressWarnings("unchecked") + var typedBuffer = (ViewUpdateBuffer) buffer; + typedBuffer.addChange(key, oldValue, newValue); + } + } + } + } + + public boolean hasChange() { + for (var bufferCollection : this.view2Buffers.values()) { + for (ViewUpdateBuffer buffer : bufferCollection) { + if (buffer.hasChange()) + return true; + } + } + return false; + } + + public void flush() { + for (var bufferCollection : this.view2Buffers.values()) { + for (ViewUpdateBuffer buffer : bufferCollection) { + buffer.flush(); + } + } + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/PredicateResult.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/PredicateResult.java new file mode 100644 index 00000000..65d23eb6 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/PredicateResult.java @@ -0,0 +1,24 @@ +package tools.refinery.store.query.internal; + +import java.util.Optional; +import java.util.stream.Stream; + +public interface PredicateResult { + + boolean hasResult(); + + boolean hasResult(Object[] parameters); + + Optional oneResult(); + + Optional oneResult(Object[] parameters); + + Stream allResults(); + + Stream allResults(Object[] parameters); + + int countResults(); + + int countResults(Object[] parameters); + +} \ No newline at end of file diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/QueriableModelImpl.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/QueriableModelImpl.java new file mode 100644 index 00000000..0f4d609f --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/QueriableModelImpl.java @@ -0,0 +1,212 @@ +package tools.refinery.store.query.internal; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Stream; + +import org.eclipse.viatra.query.runtime.api.AdvancedViatraQueryEngine; +import org.eclipse.viatra.query.runtime.api.GenericQueryGroup; +import org.eclipse.viatra.query.runtime.api.GenericQuerySpecification; +import org.eclipse.viatra.query.runtime.api.IQueryGroup; + +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.model.Model; +import tools.refinery.store.model.ModelDiffCursor; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.representation.DataRepresentation; +import tools.refinery.store.model.representation.Relation; +import tools.refinery.store.query.QueriableModel; +import tools.refinery.store.query.QueriableModelStore; +import tools.refinery.store.query.building.DNFPredicate; + +public class QueriableModelImpl implements QueriableModel { + protected final QueriableModelStore store; + protected final Model model; + protected final Map> predicates2PQuery; + + protected RelationalScope scope; + protected AdvancedViatraQueryEngine engine; + protected Map predicate2Matcher; + + public QueriableModelImpl(QueriableModelStore store, Model model, + Map> predicates2PQuery) { + this.store = store; + this.model = model; + this.predicates2PQuery = predicates2PQuery; + initEngine(); + } + + private void initEngine() { + this.scope = new RelationalScope(this.model, this.store.getViews()); + this.engine = AdvancedViatraQueryEngine.createUnmanagedEngine(this.scope); + this.predicate2Matcher = initMatchers(this.engine, this.predicates2PQuery); + } + + private Map initMatchers(AdvancedViatraQueryEngine engine, + Map> predicates2pQuery) { + // 1. prepare group + IQueryGroup queryGroup = GenericQueryGroup.of(Set.copyOf(predicates2pQuery.values())); + engine.prepareGroup(queryGroup, null); + + // 2. then get all matchers + Map result = new HashMap<>(); + for (var entry : predicates2pQuery.entrySet()) { + var matcher = engine.getMatcher(entry.getValue()); + result.put(entry.getKey(), matcher); + } + return result; + } + + @Override + public Set> getDataRepresentations() { + return model.getDataRepresentations(); + } + + @Override + public Set getPredicates() { + return store.getPredicates(); + } + + @Override + public V get(DataRepresentation representation, K key) { + return model.get(representation, key); + } + + @Override + public Cursor getAll(DataRepresentation representation) { + return model.getAll(representation); + } + + @SuppressWarnings("unchecked") + @Override + public V put(DataRepresentation representation, K key, V value) { + V oldValue = this.model.put(representation, key, value); + if(representation instanceof Relation relation) { + this.scope.processUpdate((Relation)relation, (Tuple)key, oldValue, value); + } + return oldValue; + } + + @Override + public void putAll(DataRepresentation representation, Cursor cursor) { + if(representation instanceof Relation) { + @SuppressWarnings("unchecked") + Relation relation = (Relation) representation; + while(cursor.move()) { + Tuple key = (Tuple) cursor.getKey(); + V newValue = cursor.getValue(); + V oldValue = this.model.put(relation, key, newValue); + this.scope.processUpdate(relation, key, oldValue, newValue); + } + } else { + this.model.putAll(representation, cursor); + } + } + + @Override + public long getSize(DataRepresentation representation) { + return model.getSize(representation); + } + + protected PredicateResult getPredicateResult(DNFPredicate predicate) { + var result = this.predicate2Matcher.get(predicate); + if (result == null) { + throw new IllegalArgumentException("Model does not contain predicate " + predicate.getName() + "!"); + } else + return result; + } + + protected void validateParameters(DNFPredicate predicate, Object[] parameters) { + int predicateArity = predicate.getVariables().size(); + int parameterArity = parameters.length; + if (parameterArity != predicateArity) { + throw new IllegalArgumentException("Predicate " + predicate.getName() + " with " + predicateArity + + " arity called with different number of parameters (" + parameterArity + ")!"); + } + } + + @Override + public boolean hasResult(DNFPredicate predicate) { + return getPredicateResult(predicate).hasResult(); + } + + @Override + public boolean hasResult(DNFPredicate predicate, Object[] parameters) { + validateParameters(predicate, parameters); + return getPredicateResult(predicate).hasResult(parameters); + } + + @Override + public Optional oneResult(DNFPredicate predicate){ + return getPredicateResult(predicate).oneResult(); + } + + @Override + public Optional oneResult(DNFPredicate predicate, Object[] parameters){ + validateParameters(predicate, parameters); + return getPredicateResult(predicate).oneResult(parameters); + } + + @Override + public Stream allResults(DNFPredicate predicate){ + return getPredicateResult(predicate).allResults(); + } + + @Override + public Stream allResults(DNFPredicate predicate, Object[] parameters){ + validateParameters(predicate, parameters); + return getPredicateResult(predicate).allResults(parameters); + } + + @Override + public int countResults(DNFPredicate predicate){ + return getPredicateResult(predicate).countResults(); + } + + @Override + public int countResults(DNFPredicate predicate, Object[] parameters){ + validateParameters(predicate, parameters); + return getPredicateResult(predicate).countResults(parameters); + + } + @Override + public void flushChanges() { + this.scope.flush(); + } + + @Override + public ModelDiffCursor getDiffCursor(long to) { + return model.getDiffCursor(to); + } + + @Override + public long commit() { + return this.model.commit(); + } + + @Override + public void restore(long state) { + restoreWithDiffReplay(state); + } + + public void restoreWithDiffReplay(long state) { + var modelDiffCursor = getDiffCursor(state); + for(DataRepresentation dataRepresentation : this.getDataRepresentations()) { + restoreRepresentationWithDiffReplay(modelDiffCursor, dataRepresentation); + } + } + + private void restoreRepresentationWithDiffReplay(ModelDiffCursor modelDiffCursor, + DataRepresentation dataRepresentation) { + DiffCursor diffCursor = modelDiffCursor.getCursor(dataRepresentation); + this.putAll(dataRepresentation, diffCursor); + } + + public void restoreWithReinit(long state) { + model.restore(state); + this.initEngine(); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/RawPatternMatcher.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/RawPatternMatcher.java new file mode 100644 index 00000000..c6d6353c --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/RawPatternMatcher.java @@ -0,0 +1,57 @@ +package tools.refinery.store.query.internal; + +import java.util.Optional; +import java.util.stream.Stream; + +import org.eclipse.viatra.query.runtime.api.GenericPatternMatcher; +import org.eclipse.viatra.query.runtime.api.GenericQuerySpecification; +import org.eclipse.viatra.query.runtime.matchers.tuple.Tuple; +import org.eclipse.viatra.query.runtime.matchers.tuple.AbstractTuple; + +public class RawPatternMatcher extends GenericPatternMatcher implements PredicateResult{ + + protected final Object[] empty; + + public RawPatternMatcher(GenericQuerySpecification specification) { + super(specification); + this.empty = new Object[specification.getParameterNames().size()]; + } + + @Override + public boolean hasResult() { + return hasResult(empty); + } + @Override + public boolean hasResult(Object[] parameters) { + return this.backend.hasMatch(parameters); + } + @Override + public Optional oneResult() { + return oneResult(empty); + } + @Override + public Optional oneResult(Object[] parameters) { + Optional tuple = this.backend.getOneArbitraryMatch(parameters); + if(tuple.isPresent()) { + return Optional.of(tuple.get().getElements()); + } else { + return Optional.empty(); + } + } + @Override + public Stream allResults() { + return allResults(empty); + } + @Override + public Stream allResults(Object[] parameters) { + return this.backend.getAllMatches(parameters).map(AbstractTuple::getElements); + } + @Override + public int countResults() { + return countResults(empty); + } + @Override + public int countResults(Object[] parameters) { + return backend.countMatches(parameters); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalEngineContext.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalEngineContext.java new file mode 100644 index 00000000..dfbd8545 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalEngineContext.java @@ -0,0 +1,33 @@ +package tools.refinery.store.query.internal; + +import org.eclipse.viatra.query.runtime.api.scope.IBaseIndex; +import org.eclipse.viatra.query.runtime.api.scope.IEngineContext; +import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContext; + +import tools.refinery.store.model.Model; + +public class RelationalEngineContext implements IEngineContext{ + private final IBaseIndex baseIndex = new DummyBaseIndexer(); + private final RelationalRuntimeContext runtimeContext; + + + public RelationalEngineContext(Model model, ModelUpdateListener updateListener) { + runtimeContext = new RelationalRuntimeContext(model, updateListener); + } + + @Override + public IBaseIndex getBaseIndex() { + return this.baseIndex; + } + + @Override + public void dispose() { + //lifecycle not controlled by engine + } + + @Override + public IQueryRuntimeContext getQueryRuntimeContext() { + return runtimeContext; + } + +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalQueryMetaContext.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalQueryMetaContext.java new file mode 100644 index 00000000..05fb0904 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalQueryMetaContext.java @@ -0,0 +1,58 @@ +package tools.refinery.store.query.internal; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.eclipse.viatra.query.runtime.matchers.context.AbstractQueryMetaContext; +import org.eclipse.viatra.query.runtime.matchers.context.IInputKey; +import org.eclipse.viatra.query.runtime.matchers.context.InputKeyImplication; + +import tools.refinery.store.query.view.RelationView; + +/** + * The meta context information for String scopes. + */ +public final class RelationalQueryMetaContext extends AbstractQueryMetaContext { + + @Override + public boolean isEnumerable(IInputKey key) { + ensureValidKey(key); + return key.isEnumerable(); + } + + @Override + public boolean isStateless(IInputKey key) { + ensureValidKey(key); + return key instanceof RelationView; + } + + @Override + public Collection getImplications(IInputKey implyingKey) { + ensureValidKey(implyingKey); + return new HashSet(); + } + + @Override + public Map, Set> getFunctionalDependencies(IInputKey key) { + ensureValidKey(key); + if (key instanceof RelationView) { + return new HashMap, Set>(); + } else { + return Collections.emptyMap(); + } + } + + public void ensureValidKey(IInputKey key) { + if (! (key instanceof RelationView)) + illegalInputKey(key); + } + + public void illegalInputKey(IInputKey key) { + throw new IllegalArgumentException("The input key " + key + " is not a valid input key."); + } + +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalRuntimeContext.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalRuntimeContext.java new file mode 100644 index 00000000..a186b5dd --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalRuntimeContext.java @@ -0,0 +1,178 @@ +package tools.refinery.store.query.internal; + +import static tools.refinery.store.util.CollectionsUtil.filter; +import static tools.refinery.store.util.CollectionsUtil.map; + +import java.lang.reflect.InvocationTargetException; +import java.util.Iterator; +import java.util.Optional; +import java.util.concurrent.Callable; + +import org.eclipse.viatra.query.runtime.base.core.NavigationHelperImpl; +import org.eclipse.viatra.query.runtime.matchers.context.IInputKey; +import org.eclipse.viatra.query.runtime.matchers.context.IQueryMetaContext; +import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContext; +import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContextListener; +import org.eclipse.viatra.query.runtime.matchers.context.IndexingService; +import org.eclipse.viatra.query.runtime.matchers.tuple.ITuple; +import org.eclipse.viatra.query.runtime.matchers.tuple.Tuple; +import org.eclipse.viatra.query.runtime.matchers.tuple.TupleMask; +import org.eclipse.viatra.query.runtime.matchers.tuple.Tuples; +import org.eclipse.viatra.query.runtime.matchers.util.Accuracy; + +import tools.refinery.store.model.Model; +import tools.refinery.store.query.view.RelationView; + +public class RelationalRuntimeContext implements IQueryRuntimeContext { + private final RelationalQueryMetaContext metaContext = new RelationalQueryMetaContext(); + private final ModelUpdateListener modelUpdateListener; + private final Model model; + + public RelationalRuntimeContext(Model model, ModelUpdateListener relationUpdateListener) { + this.model = model; + this.modelUpdateListener = relationUpdateListener; + } + + @Override + public IQueryMetaContext getMetaContext() { + return metaContext; + } + + /** + * TODO: check {@link NavigationHelperImpl#coalesceTraversals(Callable)} + */ + @Override + public V coalesceTraversals(Callable callable) throws InvocationTargetException { + try { + return callable.call(); + } catch (Exception e) { + throw new InvocationTargetException(e); + } + } + + @Override + public boolean isCoalescing() { + return true; + } + + @Override + public boolean isIndexed(IInputKey key, IndexingService service) { + if(key instanceof RelationView relationalKey) { + return this.modelUpdateListener.containsRelationalView(relationalKey); + } else { + return false; + } + } + + @Override + public void ensureIndexed(IInputKey key, IndexingService service) { + if(!isIndexed(key, service)) { + throw new IllegalStateException("Engine tries to index a new key " +key); + } + } + @SuppressWarnings("squid:S1452") + RelationView checkKey(IInputKey key) { + if(key instanceof RelationView) { + RelationView relationViewKey = (RelationView) key; + if(modelUpdateListener.containsRelationalView(relationViewKey)) { + return relationViewKey; + } else { + throw new IllegalStateException("Query is asking for non-indexed key"); + } + } else { + throw new IllegalStateException("Query is asking for non-relational key"); + } + } + + @Override + public int countTuples(IInputKey key, TupleMask seedMask, ITuple seed) { + RelationView relationalViewKey = checkKey(key); + Iterable allObjects = relationalViewKey.getAll(model); + Iterable filteredBySeed = filter(allObjects,objectArray -> isMatching(objectArray,seedMask,seed)); + Iterator iterator = filteredBySeed.iterator(); + int result = 0; + while(iterator.hasNext()) { + iterator.next(); + result++; + } + return result; + } + + @Override + public Optional estimateCardinality(IInputKey key, TupleMask groupMask, Accuracy requiredAccuracy) { + return Optional.empty(); + } + + @Override + public Iterable enumerateTuples(IInputKey key, TupleMask seedMask, ITuple seed) { + RelationView relationalViewKey = checkKey(key); + Iterable allObjects = relationalViewKey.getAll(model); + Iterable filteredBySeed = filter(allObjects,objectArray -> isMatching(objectArray,seedMask,seed)); + return map(filteredBySeed,Tuples::flatTupleOf); + } + + private boolean isMatching(Object[] tuple, TupleMask seedMask, ITuple seed) { + for(int i=0; i enumerateValues(IInputKey key, TupleMask seedMask, ITuple seed) { + return enumerateTuples(key, seedMask, seed); + } + + @Override + public boolean containsTuple(IInputKey key, ITuple seed) { + RelationView relationalViewKey = checkKey(key); + return relationalViewKey.get(model,seed.getElements()); + } + + @Override + public void addUpdateListener(IInputKey key, Tuple seed, IQueryRuntimeContextListener listener) { + RelationView relationalKey = checkKey(key); + this.modelUpdateListener.addListener(relationalKey, seed, listener); + + } + + @Override + public void removeUpdateListener(IInputKey key, Tuple seed, IQueryRuntimeContextListener listener) { + RelationView relationalKey = checkKey(key); + this.modelUpdateListener.removeListener(relationalKey, seed, listener); + } + + @Override + public Object wrapElement(Object externalElement) { + return externalElement; + } + + @Override + public Object unwrapElement(Object internalElement) { + return internalElement; + } + + @Override + public Tuple wrapTuple(Tuple externalElements) { + return externalElements; + } + + @Override + public Tuple unwrapTuple(Tuple internalElements) { + return internalElements; + } + + @Override + public void ensureWildcardIndexing(IndexingService service) { + throw new UnsupportedOperationException(); + } + + @Override + public void executeAfterTraversal(Runnable runnable) throws InvocationTargetException { + runnable.run(); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalScope.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalScope.java new file mode 100644 index 00000000..e8d45356 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/RelationalScope.java @@ -0,0 +1,43 @@ +package tools.refinery.store.query.internal; + +import java.util.Set; + +import org.apache.log4j.Logger; +import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine; +import org.eclipse.viatra.query.runtime.api.scope.IEngineContext; +import org.eclipse.viatra.query.runtime.api.scope.IIndexingErrorListener; +import org.eclipse.viatra.query.runtime.api.scope.QueryScope; + +import tools.refinery.store.model.Model; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.representation.Relation; +import tools.refinery.store.query.view.RelationView; + +public class RelationalScope extends QueryScope{ + private final Model model; + private final ModelUpdateListener updateListener; + + public RelationalScope(Model model, Set> relationViews) { + this.model = model; + this.updateListener = new ModelUpdateListener(relationViews); + //this.changeListener = new + } + + public void processUpdate(Relation relation, Tuple key, D oldValue, D newValue) { + updateListener.addUpdate(relation, key, oldValue, newValue); + } + + public boolean hasChange() { + return updateListener.hasChange(); + } + + public void flush() { + updateListener.flush(); + } + + @Override + protected IEngineContext createEngineContext(ViatraQueryEngine engine, IIndexingErrorListener errorListener, + Logger logger) { + return new RelationalEngineContext(model, updateListener); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdate.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdate.java new file mode 100644 index 00000000..7d1a4c05 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdate.java @@ -0,0 +1,34 @@ +package tools.refinery.store.query.internal; + +import java.util.Arrays; +import java.util.Objects; + +record ViewUpdate (Object[] tuple, boolean isInsertion) { + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Arrays.deepHashCode(tuple); + result = prime * result + Objects.hash(isInsertion); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + ViewUpdate other = (ViewUpdate) obj; + return isInsertion == other.isInsertion && Arrays.deepEquals(tuple, other.tuple); + } + + @Override + public String toString() { + return "ViewUpdate [" + Arrays.toString(tuple) + "insertion= "+this.isInsertion+"]"; + } + +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdateBuffer.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdateBuffer.java new file mode 100644 index 00000000..6bc4c96a --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdateBuffer.java @@ -0,0 +1,46 @@ +package tools.refinery.store.query.internal; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import tools.refinery.store.model.Tuple; + +public class ViewUpdateBuffer { + protected final ViewUpdateTranslator updateListener; + protected final List buffer = new ArrayList<>(); + + public ViewUpdateBuffer(ViewUpdateTranslator updateListener) { + this.updateListener = updateListener; + } + + public ViewUpdateTranslator getUpdateListener() { + return updateListener; + } + + public boolean hasChange() { + return ! buffer.isEmpty(); + } + + public void addChange(Tuple tuple, D oldValue, D newValue) { + if(oldValue != newValue) { + Object[] oldTuple = updateListener.isMatching(tuple, oldValue); + Object[] newTuple = updateListener.isMatching(tuple, newValue); + if(!Arrays.equals(oldTuple, newTuple)) { + if(oldTuple != null) { + buffer.add(new ViewUpdate(oldTuple, false)); + } + if(newTuple != null) { + buffer.add(new ViewUpdate(newTuple, true)); + } + } + } + } + + public void flush() { + for (ViewUpdate viewChange : buffer) { + updateListener.processChange(viewChange); + } + buffer.clear(); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdateTranslator.java b/subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdateTranslator.java new file mode 100644 index 00000000..1c210c5f --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/internal/ViewUpdateTranslator.java @@ -0,0 +1,57 @@ +package tools.refinery.store.query.internal; + +import java.util.Objects; + +import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContextListener; +import org.eclipse.viatra.query.runtime.matchers.tuple.ITuple; +import org.eclipse.viatra.query.runtime.matchers.tuple.Tuples; + +import tools.refinery.store.model.Tuple; +import tools.refinery.store.query.view.RelationView; + +public class ViewUpdateTranslator { + final RelationView key; + final ITuple filter; + final IQueryRuntimeContextListener listener; + + public ViewUpdateTranslator(RelationView key, ITuple filter, IQueryRuntimeContextListener listener) { + super(); + this.key = key; + this.filter = filter; + this.listener = listener; + } + + public void processChange(ViewUpdate change) { + listener.update(key, Tuples.flatTupleOf(change.tuple()), change.isInsertion()); + } + + public Object[] isMatching(Tuple tuple, D value){ + return isMatching(key.getWrappedKey().transform(tuple, value), filter); + } + @SuppressWarnings("squid:S1168") + private Object[] isMatching(Object[] tuple, ITuple filter) { + for(int i = 0; i other = (ViewUpdateTranslator) obj; + return Objects.equals(filter, other.filter) && Objects.equals(key, other.key) + && Objects.equals(listener, other.listener); + } +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/view/FilteredRelationView.java b/subprojects/store/src/main/java/tools/refinery/store/query/view/FilteredRelationView.java new file mode 100644 index 00000000..3531195a --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/view/FilteredRelationView.java @@ -0,0 +1,48 @@ +package tools.refinery.store.query.view; + +import java.util.function.BiPredicate; + +import tools.refinery.store.model.Model; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.Tuple.Tuple1; +import tools.refinery.store.model.representation.Relation; + +public class FilteredRelationView extends RelationView{ + private final BiPredicate predicate; + + public FilteredRelationView(Relation representation, BiPredicate predicate) { + super(representation); + this.predicate = predicate; + } + @Override + protected Object[] forwardMap(Tuple key, D value) { + return toTuple1Array(key); + } + @Override + public boolean get(Model model, Object[] tuple) { + int[] content = new int[tuple.length]; + for(int i = 0; i extends RelationView { + + public FunctionalRelationView(Relation representation) { + super(representation); + } + + @Override + protected boolean filter(Tuple key, D value) { + return true; + } + + @Override + protected Object[] forwardMap(Tuple key, D value) { + return toTuple1ArrayPlusValue(key, value); + } + + @Override + public boolean get(Model model, Object[] tuple) { + int[] content = new int[tuple.length-1]; + for(int i = 0; i Object[] toTuple1ArrayPlusValue(Tuple t, D value) { + Object[] result = new Object[t.getSize()+1]; + for(int i = 0; i{ + + public KeyOnlyRelationView(Relation representation) { + super(representation, (k,v)->true); + } + @Override + protected boolean filter(Tuple key, Boolean value) { + return !value.equals(representation.getDefaultValue()); + } + +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/query/view/RelationView.java b/subprojects/store/src/main/java/tools/refinery/store/query/view/RelationView.java new file mode 100644 index 00000000..fd55eed4 --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/query/view/RelationView.java @@ -0,0 +1,85 @@ +package tools.refinery.store.query.view; + +import java.util.Objects; + +import org.eclipse.viatra.query.runtime.matchers.context.common.BaseInputKeyWrapper; + +import tools.refinery.store.map.CursorAsIterator; +import tools.refinery.store.model.Model; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.representation.Relation; + +/** + * Represents a view of a {@link Relation} that can be queried. + * + * @author Oszkar Semerath + * + * @param + */ +public abstract class RelationView extends BaseInputKeyWrapper> { + protected final Relation representation; + + protected RelationView(Relation representation) { + super(null); + this.wrappedKey = this; + this.representation = representation; + } + + @Override + public String getPrettyPrintableName() { + return representation.getName(); + } + + @Override + public String getStringID() { + return representation.getName() + this.getClass().getName(); + } + + public Relation getRepresentation() { + return representation; + } + + @Override + public boolean isEnumerable() { + return true; + } + + protected abstract boolean filter(Tuple key, D value); + + protected abstract Object[] forwardMap(Tuple key, D value); + + public abstract boolean get(Model model, Object[] tuple); + + @SuppressWarnings("squid:S1168") + public Object[] transform(Tuple tuple, D value) { + if (filter(tuple, value)) { + return forwardMap(tuple, value); + } else + return null; + } + + public Iterable getAll(Model model) { + return (() -> new CursorAsIterator<>(model.getAll(representation), (k, v) -> forwardMap(k, v), + (k, v) -> filter(k, v))); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Objects.hash(representation); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!(obj instanceof RelationView)) + return false; + @SuppressWarnings("unchecked") + RelationView other = ((RelationView) obj); + return Objects.equals(representation, other.representation); + } + +} diff --git a/subprojects/store/src/main/java/tools/refinery/store/util/CollectionsUtil.java b/subprojects/store/src/main/java/tools/refinery/store/util/CollectionsUtil.java new file mode 100644 index 00000000..841d0dfa --- /dev/null +++ b/subprojects/store/src/main/java/tools/refinery/store/util/CollectionsUtil.java @@ -0,0 +1,72 @@ +package tools.refinery.store.util; + +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.function.Function; +import java.util.function.Predicate; + +public final class CollectionsUtil { + private CollectionsUtil() { + throw new UnsupportedOperationException(); + } + + public static Iterator map(Iterator source, Function transformation) { + return new Iterator() { + + @Override + public boolean hasNext() { + return source.hasNext(); + } + + @Override + public T next() { + return transformation.apply(source.next()); + } + }; + } + + public static Iterable map(Iterable source, Function transformation) { + return (()->map(source.iterator(),transformation)); + } + + public static Iterator filter(Iterator source, Predicate condition) { + return new Iterator() { + T internalNext = move(); + boolean internalHasNext; + + private T move() { + internalHasNext = source.hasNext(); + if(internalHasNext) { + internalNext = source.next(); + } + while(internalHasNext && !condition.test(internalNext)) { + internalHasNext = source.hasNext(); + if(internalHasNext) { + internalNext = source.next(); + } + } + return internalNext; + } + + @Override + public boolean hasNext() { + return internalHasNext; + } + + @Override + public T next() { + if(!internalHasNext) { + throw new NoSuchElementException(); + } else { + T result = internalNext; + move(); + return result; + } + } + }; + } + + public static Iterable filter(Iterable source, Predicate condition) { + return (()->filter(source.iterator(),condition)); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/MapUnitTests.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/MapUnitTests.java new file mode 100644 index 00000000..f0d5d927 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/MapUnitTests.java @@ -0,0 +1,22 @@ +package tools.refinery.store.map.tests; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.TupleHashProvider; + +class MapUnitTests { + @Test + void defaultTest() { + VersionedMapStore store = new VersionedMapStoreImpl(TupleHashProvider.singleton(), false); + var map = store.createMap(); + var out1 = map.put(Tuple.of(0), true); + assertEquals(false, out1); + var out2 = map.put(Tuple.of(1), true); + assertEquals(false, out2); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/CommitFuzzTest.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/CommitFuzzTest.java new file mode 100644 index 00000000..1f9d022f --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/CommitFuzzTest.java @@ -0,0 +1,96 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class CommitFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + VersionedMapImpl sut = (VersionedMapImpl) store.createMap(); + MapTestEnvironment e = new MapTestEnvironment(sut); + + Random r = new Random(seed); + + iterativeRandomPutsAndCommits(scenario, steps, maxKey, values, e, r, commitFrequency); + } + + private void iterativeRandomPutsAndCommits(String scenario, int steps, int maxKey, String[] values, + MapTestEnvironment e, Random r, int commitFrequency) { + int stopAt = -1; + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + if (index == stopAt) { + System.out.println("issue!"); + System.out.println("State before:"); + e.printComparison(); + e.sut.prettyPrint(); + System.out.println("Next: put(" + nextKey + "," + nextValue + ")"); + } + try { + e.put(nextKey, nextValue); + if (index == stopAt) { + e.sut.prettyPrint(); + } + e.checkEquivalence(scenario + ":" + index); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + MapTestEnvironment.printStatus(scenario, index, steps, null); + if (index % commitFrequency == 0) { + e.sut.commit(); + } + } + } + + @ParameterizedTest(name = "Commit {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Timeout(value = 10) + @Tag("fuzz") + void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("CommitS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Commit {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("CommitS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(parametrizedFastFuzz(), 1); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/ContentEqualsFuzzTest.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/ContentEqualsFuzzTest.java new file mode 100644 index 00000000..263cb2cd --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/ContentEqualsFuzzTest.java @@ -0,0 +1,143 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.AbstractMap.SimpleEntry; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.VersionedMap; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class ContentEqualsFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + Random r = new Random(seed); + + iterativeRandomPutsAndCommitsThenCompare(scenario, chp, steps, maxKey, values, r, commitFrequency); + } + + private void iterativeRandomPutsAndCommitsThenCompare(String scenario, ContinousHashProvider chp, int steps, int maxKey, String[] values, Random r, int commitFrequency) { + + VersionedMapStore store1 = new VersionedMapStoreImpl(chp, values[0]); + VersionedMap sut1 = store1.createMap(); + + // Fill one map + for (int i = 0; i < steps; i++) { + int index1 = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + sut1.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index1 + ": exception happened: " + exception); + } + MapTestEnvironment.printStatus(scenario, index1, steps, "Fill"); + if (index1 % commitFrequency == 0) { + sut1.commit(); + } + } + + // Get the content of the first map + List> content = new LinkedList<>(); + Cursor cursor = sut1.getAll(); + while (cursor.move()) { + content.add(new SimpleEntry<>(cursor.getKey(), cursor.getValue())); + } + + // Randomize the order of the content + Collections.shuffle(content, r); + + VersionedMapStore store2 = new VersionedMapStoreImpl(chp, values[0]); + VersionedMap sut2 = store2.createMap(); + int index2 = 1; + for (SimpleEntry entry : content) { + sut2.put(entry.getKey(), entry.getValue()); + if(index2++%commitFrequency == 0) + sut2.commit(); + } + + // Check the integrity of the maps + ((VersionedMapImpl) sut1).checkIntegrity(); + ((VersionedMapImpl) sut2).checkIntegrity(); + +// // Compare the two maps + // By size + assertEquals(sut1.getSize(), content.size()); + assertEquals(sut2.getSize(), content.size()); + + + + // By cursors + Cursor cursor1 = sut1.getAll(); + Cursor cursor2 = sut2.getAll(); + int index3 = 1; + boolean canMove = true; + do{ + boolean canMove1 = cursor1.move(); + boolean canMove2 = cursor2.move(); + assertEquals(canMove1, canMove2, scenario + ":" + index3 +" Cursors stopped at different times!"); + assertEquals(cursor1.getKey(), cursor2.getKey(), scenario + ":" + index3 +" Cursors have different keys!"); + assertEquals(cursor1.getValue(), cursor2.getValue(), scenario + ":" + index3 +" Cursors have different values!"); + + canMove = canMove1; + MapTestEnvironment.printStatus(scenario, index3++, content.size(), "Compare"); + } while (canMove); + + // By hashcode + assertEquals(sut1.hashCode(), sut2.hashCode(), "Hash codes are not equal!"); + + // By equals + assertEquals(sut1, sut2, "Maps are not equals"); + } + + @ParameterizedTest(name = "Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Timeout(value = 10) + @Tag("fuzz") + void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("CompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("CompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(parametrizedFastFuzz(), 1); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/DiffCursorFuzzTest.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/DiffCursorFuzzTest.java new file mode 100644 index 00000000..e6334224 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/DiffCursorFuzzTest.java @@ -0,0 +1,117 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class DiffCursorFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + iterativeRandomPutsAndCommitsThenDiffcursor(scenario, store, steps, maxKey, values, seed, commitFrequency); + } + + private void iterativeRandomPutsAndCommitsThenDiffcursor(String scenario, VersionedMapStore store, + int steps, int maxKey, String[] values, int seed, int commitFrequency) { + // 1. build a map with versions + Random r = new Random(seed); + VersionedMapImpl versioned = (VersionedMapImpl) store.createMap(); + int largestCommit = -1; + + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + versioned.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + long version = versioned.commit(); + largestCommit = (int) version; + } + if (index % 10000 == 0) + System.out.println(scenario + ":" + index + "/" + steps + " building finished"); + } + // 2. create a non-versioned map, + VersionedMapImpl moving = (VersionedMapImpl) store.createMap(); + Random r2 = new Random(seed + 1); + + final int diffTravelFrequency = commitFrequency * 2; + for (int i = 0; i < steps; i++) { + int index = i + 1; + if (index % diffTravelFrequency == 0) { + // difftravel + long travelToVersion = r2.nextInt(largestCommit + 1); + DiffCursor diffCursor = moving.getDiffCursor(travelToVersion); + moving.putAll(diffCursor); + + } else { + // random puts + int nextKey = r2.nextInt(maxKey); + String nextValue = values[r2.nextInt(values.length)]; + try { + moving.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + versioned.commit(); + } + if (index % 10000 == 0) + System.out.println(scenario + ":" + index + "/" + steps + " building finished"); + } + } + + } + + @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Timeout(value = 10) + @Tag("fuzz") + void parametrizedFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, + noKeys, noValues, commitFrequency, evilHash); + } + + static Stream parametrizedFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(parametrizedFuzz(), 1); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadFuzzTest.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadFuzzTest.java new file mode 100644 index 00000000..1ab431a8 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadFuzzTest.java @@ -0,0 +1,97 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class MultiThreadFuzzTest { + public static final int noThreads = 32; + + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + + // initialize runnables + MultiThreadTestRunnable[] runnables = new MultiThreadTestRunnable[noThreads]; + for(int i = 0; i errors = new LinkedList<>(); + for(int i = 0; i parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Multithread {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("RestoreS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(RestoreFuzzTest.parametrizedFastFuzz(), 1); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadTestRunnable.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadTestRunnable.java new file mode 100644 index 00000000..f77f9ee5 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadTestRunnable.java @@ -0,0 +1,101 @@ +package tools.refinery.store.map.tests.fuzz; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Random; + +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +public class MultiThreadTestRunnable implements Runnable { + String scenario; + VersionedMapStore store; + int steps; + int maxKey; + String[] values; + int seed; + int commitFrequency; + List errors = new LinkedList<>(); + + public MultiThreadTestRunnable(String scenario, VersionedMapStore store, int steps, + int maxKey, String[] values, int seed, int commitFrequency) { + super(); + this.scenario = scenario; + this.store = store; + this.steps = steps; + this.maxKey = maxKey; + this.values = values; + this.seed = seed; + this.commitFrequency = commitFrequency; + } + + private void logAndThrowError(String message) { + AssertionError error = new AssertionError(message); + errors.add(error); + } + + public List getErrors() { + return errors; + } + + @Override + public void run() { + // 1. build a map with versions + Random r = new Random(seed); + VersionedMapImpl versioned = (VersionedMapImpl) store.createMap(); + Map index2Version = new HashMap<>(); + + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + versioned.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + logAndThrowError(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + long version = versioned.commit(); + index2Version.put(i, version); + } + MapTestEnvironment.printStatus(scenario, index, steps, "building"); + } + // 2. create a non-versioned + VersionedMapImpl reference = (VersionedMapImpl) store.createMap(); + r = new Random(seed); + Random r2 = new Random(seed+1); + + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + reference.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + logAndThrowError(scenario + ":" + index + ": exception happened: " + exception); + } + // go back to an existing state and compare to the reference + if (index % (commitFrequency) == 0) { + versioned.restore(index2Version.get(i)); + MapTestEnvironment.compareTwoMaps(scenario + ":" + index, reference, versioned,errors); + + // go back to a random state (probably created by another thread) + List states = new ArrayList<>(store.getStates()); + Collections.shuffle(states, r2); + for(Long state : states.subList(0, Math.min(states.size(), 100))) { + versioned.restore(state); + } + versioned.restore(index2Version.get(i)); + } + + MapTestEnvironment.printStatus(scenario, index, steps, "comparison"); + } + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableFuzzTest.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableFuzzTest.java new file mode 100644 index 00000000..d40c49c4 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableFuzzTest.java @@ -0,0 +1,92 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class MutableFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + VersionedMapImpl sut = (VersionedMapImpl) store.createMap(); + MapTestEnvironment e = new MapTestEnvironment(sut); + + Random r = new Random(seed); + + iterativeRandomPuts(scenario, steps, maxKey, values, e, r); + } + + private void iterativeRandomPuts(String scenario, int steps, int maxKey, String[] values, + MapTestEnvironment e, Random r) { + int stopAt = -1; + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + if (index == stopAt) { + System.out.println("issue!"); + System.out.println("State before:"); + e.printComparison(); + e.sut.prettyPrint(); + System.out.println("Next: put(" + nextKey + "," + nextValue + ")"); + } + try { + e.put(nextKey, nextValue); + if (index == stopAt) { + e.sut.prettyPrint(); + } + e.checkEquivalence(scenario + ":" + index); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + MapTestEnvironment.printStatus(scenario, index, steps, null); + } + } + + @ParameterizedTest(name = "Mutable {index}/{0} Steps={1} Keys={2} Values={3} seed={4} evil-hash={5}") + @MethodSource + @Timeout(value = 10) + @Tag("fuzz") + void parametrizedFuzz(int test, int steps, int noKeys, int noValues, int seed, boolean evilHash) { + runFuzzTest( + "MutableS" + steps + "K" + noKeys + "V" + noValues + "s" + seed + "H" + (evilHash ? "Evil" : "Normal"), + seed, steps, noKeys, noValues, evilHash); + } + + static Stream parametrizedFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, + new Object[] { 3, 32, 32 * 32, 32 * 32 * 32 * 32 }, new Object[] { 2, 3 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Mutable {index}/{0} Steps={1} Keys={2} Values={3} seed={4} evil-hash={5}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int test, int steps, int noKeys, int noValues, int seed, boolean evilHash) { + runFuzzTest( + "MutableS" + steps + "K" + noKeys + "V" + noValues + "s" + seed + "H" + (evilHash ? "Evil" : "Normal"), + seed, steps, noKeys, noValues, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(parametrizedFuzz(), 1); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableImmutableCompareFuzzTest.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableImmutableCompareFuzzTest.java new file mode 100644 index 00000000..410705a2 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableImmutableCompareFuzzTest.java @@ -0,0 +1,89 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class MutableImmutableCompareFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + VersionedMapImpl immutable = (VersionedMapImpl) store.createMap(); + VersionedMapImpl mutable = (VersionedMapImpl) store.createMap(); + + Random r = new Random(seed); + + iterativeRandomPutsAndCommitsAndCompare(scenario, immutable, mutable, steps, maxKey, values, r, + commitFrequency); + } + + private void iterativeRandomPutsAndCommitsAndCompare(String scenario, VersionedMapImpl immutable, + VersionedMapImpl mutable, int steps, int maxKey, String[] values, Random r, + int commitFrequency) { + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + immutable.put(nextKey, nextValue); + mutable.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + immutable.commit(); + } + MapTestEnvironment.compareTwoMaps(scenario + ":" + index, immutable, mutable); + + MapTestEnvironment.printStatus(scenario, index, steps, null); + } + } + + @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Timeout(value = 10) + @Tag("fuzz") + void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, + noKeys, noValues, commitFrequency, evilHash); + } + + static Stream parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, + noKeys, noValues, commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(MutableImmutableCompareFuzzTest.parametrizedFastFuzz(), 1); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/RestoreFuzzTest.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/RestoreFuzzTest.java new file mode 100644 index 00000000..2e29a03f --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/RestoreFuzzTest.java @@ -0,0 +1,109 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.HashMap; +import java.util.Map; +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class RestoreFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + + iterativeRandomPutsAndCommitsThenRestore(scenario, store, steps, maxKey, values, seed, commitFrequency); + } + + private void iterativeRandomPutsAndCommitsThenRestore(String scenario, VersionedMapStore store, + int steps, int maxKey, String[] values, int seed, int commitFrequency) { + // 1. build a map with versions + Random r = new Random(seed); + VersionedMapImpl versioned = (VersionedMapImpl) store.createMap(); + Map index2Version = new HashMap<>(); + + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + versioned.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + long version = versioned.commit(); + index2Version.put(i, version); + } + MapTestEnvironment.printStatus(scenario, index, steps, "building"); + } + // 2. create a non-versioned and + VersionedMapImpl reference = (VersionedMapImpl) store.createMap(); + r = new Random(seed); + + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + reference.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + versioned.restore(index2Version.get(i)); + MapTestEnvironment.compareTwoMaps(scenario + ":" + index, reference, versioned); + } + MapTestEnvironment.printStatus(scenario, index, steps, "comparison"); + } + + } + + @ParameterizedTest(name = "Restore {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Timeout(value = 10) + @Tag("smoke") + void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("RestoreS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Restore {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("smoke") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("RestoreS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(RestoreFuzzTest.parametrizedFastFuzz(), 1); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/SharedStoreFuzzTest.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/SharedStoreFuzzTest.java new file mode 100644 index 00000000..914a0f63 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/SharedStoreFuzzTest.java @@ -0,0 +1,113 @@ +package tools.refinery.store.map.tests.fuzz; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class SharedStoreFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + List> stores = VersionedMapStoreImpl.createSharedVersionedMapStores(5, chp, values[0]); + + iterativeRandomPutsAndCommitsThenRestore(scenario, stores, steps, maxKey, values, seed, commitFrequency); + } + + private void iterativeRandomPutsAndCommitsThenRestore(String scenario, List> stores, + int steps, int maxKey, String[] values, int seed, int commitFrequency) { + // 1. maps with versions + Random r = new Random(seed); + List> versioneds = new LinkedList<>(); + for(VersionedMapStore store : stores) { + versioneds.add((VersionedMapImpl) store.createMap()); + } + + List> index2Version = new LinkedList<>(); + for(int i = 0; i()); + } + + for (int i = 0; i < steps; i++) { + int stepIndex = i + 1; + for (int storeIndex = 0; storeIndex> reference = new LinkedList<>(); + for(VersionedMapStore store : stores) { + reference.add((VersionedMapImpl) store.createMap()); + } + r = new Random(seed); + + for (int i = 0; i < steps; i++) { + int index = i + 1; + for (int storeIndex = 0; storeIndex parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Shared Store {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("smoke") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("SharedS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(RestoreFuzzTest.parametrizedFastFuzz(), 1); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtils.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtils.java new file mode 100644 index 00000000..e75d7f5a --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtils.java @@ -0,0 +1,64 @@ +package tools.refinery.store.map.tests.fuzz.utils; + +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.stream.Stream; + +import org.junit.jupiter.params.provider.Arguments; + +public final class FuzzTestUtils { + public static final int FAST_STEP_COUNT = 500; + public static final int SLOW_STEP_COUNT = 32 * 32 * 32 * 32; + + private FuzzTestUtils() { + throw new IllegalStateException("This is a static utility class and should not be instantiated directly"); + } + + public static Stream changeStepCount(Stream arguments, int parameterIndex) { + return arguments.map(x -> Arguments.of(updatedStepCount(x.get(), parameterIndex))); + } + + public static Object[] updatedStepCount(Object[] arguments, int parameterIndex) { + Object[] copy = Arrays.copyOf(arguments, arguments.length); + copy[parameterIndex] = SLOW_STEP_COUNT; + return copy; + } + + static List> permutationInternal(int from, Object[]... valueOption) { + if (valueOption.length == from) { + return List.of(List.of()); + } else { + Object[] permuteThis = valueOption[from]; + List> otherCombination = permutationInternal(from + 1, valueOption); + List> result = new LinkedList<>(); + for (Object permuteThisElement : permuteThis) { + for (List otherCombinationList : otherCombination) { + List newResult = new LinkedList<>(); + newResult.add(permuteThisElement); + newResult.addAll(otherCombinationList); + result.add(newResult); + } + } + return result; + } + } + + public static Stream permutation(Object[]... valueOption) { + List> permutations = permutationInternal(0, valueOption); + return permutations.stream().map(x -> Arguments.of(x.toArray())); + } + + public static Stream permutationWithSize(Object[]... valueOption) { + int size = 1; + for (int i = 0; i < valueOption.length; i++) { + size *= valueOption[i].length; + } + Object[][] newValueOption = new Object[valueOption.length + 1][]; + newValueOption[0] = new Object[] { size }; + for (int i = 1; i < newValueOption.length; i++) { + newValueOption[i] = valueOption[i - 1]; + } + return permutation(newValueOption); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtilsTest.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtilsTest.java new file mode 100644 index 00000000..72f2a46c --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtilsTest.java @@ -0,0 +1,33 @@ +package tools.refinery.store.map.tests.fuzz.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +class FuzzTestUtilsTest { + @Test + void permutationInternalTest() { + List> res = FuzzTestUtils.permutationInternal(0, new Object[] { 1, 2, 3 }, + new Object[] { 'a', 'b', 'c' }, new Object[] { "alpha", "beta", "gamma", "delta" }); + assertEquals(3 * 3 * 4, res.size()); + } + + @Test + void permutationTest1() { + var res = FuzzTestUtils.permutation(new Object[] { 1, 2, 3 }, new Object[] { 'a', 'b', 'c' }, + new Object[] { "alpha", "beta", "gamma", "delta" }); + assertEquals(3 * 3 * 4, res.count()); + } + + @Test + void permutationTest2() { + var res = FuzzTestUtils.permutation(new Object[] { 1, 2, 3 }, new Object[] { 'a', 'b', 'c' }, + new Object[] { "alpha", "beta", "gamma", "delta" }); + var arguments = res.findFirst().get().get(); + assertEquals(1, arguments[0]); + assertEquals('a', arguments[1]); + assertEquals("alpha", arguments[2]); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/map/tests/utils/MapTestEnvironment.java b/subprojects/store/src/test/java/tools/refinery/store/map/tests/utils/MapTestEnvironment.java new file mode 100644 index 00000000..991b4f51 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/map/tests/utils/MapTestEnvironment.java @@ -0,0 +1,214 @@ +package tools.refinery.store.map.tests.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.VersionedMap; +import tools.refinery.store.map.internal.VersionedMapImpl; + +import java.util.TreeMap; + +public class MapTestEnvironment { + public static String[] prepareValues(int maxValue) { + String[] values = new String[maxValue]; + values[0] = "DEFAULT"; + for (int i = 1; i < values.length; i++) { + values[i] = "VAL" + i; + } + return values; + } + + public static ContinousHashProvider prepareHashProvider(final boolean evil) { + // Use maxPrime = 2147483629 + + ContinousHashProvider chp = new ContinousHashProvider() { + + @Override + public int getHash(Integer key, int index) { + if (evil && index < 15 && index < key / 3) { + return 7; + } + int result = 1; + final int prime = 31; + + result = prime * result + key; + result = prime * result + index; + + return result; + } + }; + return chp; + } + + public static void printStatus(String scenario, int actual, int max, String stepName) { + if (actual % 10000 == 0) { + String printStepName = stepName == null ? "" : stepName; + System.out.format(scenario + ":%d/%d (%d%%) " + printStepName + "%n", actual, max, actual * 100 / max); + } + + } + + public static void compareTwoMaps(String title, VersionedMapImpl map1, + VersionedMapImpl map2) { + compareTwoMaps(title, map1, map2, null); + } + public static void compareTwoMaps(String title, VersionedMapImpl map1, + VersionedMapImpl map2, List errors) { + // 1. Comparing cursors. + Cursor cursor1 = map1.getAll(); + Cursor cursor2 = map2.getAll(); + while (!cursor1.isTerminated()) { + if (cursor2.isTerminated()) { + fail("cursor 2 terminated before cursor1"); + } + assertEqualsList(cursor1.getKey(), cursor2.getKey(),"Keys not equal", errors); + assertEqualsList(cursor2.getValue(), cursor2.getValue(), "Values not equal", errors); + cursor1.move(); + cursor2.move(); + } + if (!cursor2.isTerminated()) + fail("cursor 1 terminated before cursor 2"); + + // 2.1. comparing hash codes + assertEqualsList(map1.hashCode(), map2.hashCode(), title + ": hash code check",errors); + assertEqualsList(map1, map2, title + ": 1.equals(2)",errors); + assertEqualsList(map2, map1, title + ": 2.equals(1)",errors); + } + private static void assertEqualsList(Object o1, Object o2, String message, List errors) { + if(errors == null) { + assertEquals(o1, o2, message); + } else { + if(o1 != null) { + if(!(o1.equals(o2))) { + AssertionError error = new AssertionError((message != null ? message+" " : "") + "expected: " + o1 + " but was : " + o2); + errors.add(error); + } + } + } + } + + public VersionedMapImpl sut; + Map oracle = new HashMap(); + + public MapTestEnvironment(VersionedMapImpl sut) { + this.sut = sut; + } + + public void put(K key, V value) { + V oldSutValue = sut.put(key, value); + V oldOracleValue; + if (value != sut.getDefaultValue()) { + oldOracleValue = oracle.put(key, value); + } else { + oldOracleValue = oracle.remove(key); + } + if(oldSutValue == sut.getDefaultValue() && oldOracleValue != null) { + fail("After put, SUT old value was default, but oracle old walue was " + oldOracleValue); + } + if(oldSutValue != sut.getDefaultValue()) { + assertEquals(oldOracleValue, oldSutValue); + } + } + + public void checkEquivalence(String title) { + // 0. Checking integrity + try { + sut.checkIntegrity(); + } catch (IllegalStateException e) { + fail(title + ": " + e.getMessage()); + } + + // 1. Checking: if Reference contains pair, then SUT contains + // pair. + // Tests get functions + for (Entry entry : oracle.entrySet()) { + V sutValue = sut.get(entry.getKey()); + V oracleValue = entry.getValue(); + if (sutValue != oracleValue) { + printComparison(); + fail(title + ": Non-equivalent get(" + entry.getKey() + ") results: SUT=" + sutValue + ", Oracle=" + + oracleValue + "!"); + } + } + + // 2. Checking: if SUT contains pair, then Reference contains + // pair. + // Tests iterators + int elementsInSutEntrySet = 0; + Cursor cursor = sut.getAll(); + while (cursor.move()) { + elementsInSutEntrySet++; + K key = cursor.getKey(); + V sutValue = cursor.getValue(); + // System.out.println(key + " -> " + sutValue); + V oracleValue = oracle.get(key); + if (sutValue != oracleValue) { + printComparison(); + fail(title + ": Non-equivalent entry in iterator: SUT=<" + key + "," + sutValue + ">, Oracle=<" + key + + "," + oracleValue + ">!"); + } + + } + + // 3. Checking sizes + // Counting of non-default value pairs. + int oracleSize = oracle.entrySet().size(); + long sutSize = sut.getSize(); + if (oracleSize != sutSize || oracleSize != elementsInSutEntrySet) { + printComparison(); + fail(title + ": Non-eqivalent size() result: SUT.getSize()=" + sutSize + ", SUT.entryset.size=" + + elementsInSutEntrySet + ", Oracle=" + oracleSize + "!"); + } + } + + public static void checkOrder(String scenario, VersionedMap versionedMap) { + K previous = null; + Cursor cursor = versionedMap.getAll(); + while(cursor.move()) { + System.out.println(cursor.getKey() + " " + ((VersionedMapImpl) versionedMap).getHashProvider().getHash(cursor.getKey(), 0)); + if(previous != null) { + int comparisonResult = ((VersionedMapImpl) versionedMap).getHashProvider().compare(previous, cursor.getKey()); + assertTrue(comparisonResult<0,scenario+" Cursor order is not incremental!"); + } + previous = cursor.getKey(); + } + System.out.println(); + } + + public void printComparison() { + System.out.println("SUT:"); + printEntrySet(sut.getAll()); + System.out.println("Oracle:"); + printEntrySet(oracle.entrySet().iterator()); + } + + private void printEntrySet(Iterator> iterator) { + TreeMap treemap = new TreeMap<>(); + while (iterator.hasNext()) { + Entry entry = iterator.next(); + treemap.put(entry.getKey(), entry.getValue()); + } + for (Entry e : treemap.entrySet()) { + System.out.println("\t" + e.getKey() + " -> " + e.getValue()); + } + } + + private void printEntrySet(Cursor cursor) { + TreeMap treemap = new TreeMap<>(); + while (cursor.move()) { + treemap.put(cursor.getKey(), cursor.getValue()); + } + for (Entry e : treemap.entrySet()) { + System.out.println("\t" + e.getKey() + " -> " + e.getValue()); + } + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/model/hashTests/HashEfficiencyTest.java b/subprojects/store/src/test/java/tools/refinery/store/model/hashTests/HashEfficiencyTest.java new file mode 100644 index 00000000..7d070380 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/model/hashTests/HashEfficiencyTest.java @@ -0,0 +1,161 @@ +package tools.refinery.store.model.hashTests; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Random; + +import org.junit.jupiter.api.Test; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.TupleHashProvider; +import tools.refinery.store.model.TupleHashProviderBitMagic; + +class HashEfficiencyTest { + + private static List permutations(int range, int arity) { + if(arity == 1) { + List result = new ArrayList<>(range); + for(int i=0; i 1) { + List smallers = permutations(range, arity-1); + List result = new ArrayList<>(range*smallers.size()); + for(Tuple smaller : smallers) { + for(int i=0; i nPermutations(int arity, int n) { + int range = amountToRange(arity, n); + List permutations = permutations(range, arity); + return permutations.subList(0, n); + } + + public static List nRandoms(int arity, int n, int seed) { + int range = amountToRange(arity, n); + List permutations = new ArrayList<>(n); + Random r = new Random(seed); + for(int i = 0; i p = permutations(10, 2); + assertEquals(p.size(),10*10); + } +// private void printTuples(List p) { +// for(Tuple element : p) { +// System.out.println(element); +// } +// } + @Test + void nPermutationTest() { + final int amount = 500; + List p = nPermutations(2, amount); + assertEquals(amount,p.size()); + } + @Test + void nRandomTest() { + final int amount = 500; + List p = nRandoms(2, amount, 1);; + assertEquals(amount,p.size()); + } + private static double calculateHashClashes(List tuples, ContinousHashProvider chp) { + int sumClashes = 0; + + for(int i = 0; i chp, Tuple a, Tuple b) { + if(a.equals(b)) return 0; + final int bits = 5; + final int segments = Integer.SIZE/bits; + final int mask = (1<>(depth*5))&mask; + int bHash = (chp.getHash(b, index)>>(depth*5))&mask; + if(aHash != bHash) { + return i+1; + } + if(i>400) { + throw new IllegalStateException(a+" vs "+b); + } + } + } + private static double caclulateOptimalHashClash(int size) { + return (Math.log(size)/Math.log(32)); + } + public static void main(String[] args) { + List hashNames = new LinkedList<>(); + List> hashes = new LinkedList<>(); + hashNames.add("PrimeGroup"); + hashes.add(new TupleHashProvider()); + hashNames.add("BitMagic"); + hashes.add(new TupleHashProviderBitMagic()); + + int[] arities = new int[] {2,3,4,5}; + int[] sizes = new int[] {32*32,32*32*8}; + + System.out.println("Size,Arity,DataSource,Hash,Chashes,Optimal,Badness"); + for(int size : sizes) { + double optimalClashes = caclulateOptimalHashClash(size); + for(int arity : arities) { + List dataSourceNames = new LinkedList<>(); + List> dataSources = new LinkedList<>(); + +// dataSourceNames.add("Permutation"); +// dataSources.add(nPermutations(arity, size)); + dataSourceNames.add("Random"); + dataSources.add(nRandoms(arity, size, 0)); + + for(int dataSourceIndex = 0; dataSourceIndex person = new Relation<>("Person", 1, false); + Relation friend = new Relation<>("friend", 2, false); + + ModelStore store = new ModelStoreImpl(Set.of(person, friend)); + Model model = store.createModel(); + + assertTrue(store.getDataRepresentations().contains(person)); + assertTrue(store.getDataRepresentations().contains(friend)); + assertTrue(model.getDataRepresentations().contains(person)); + assertTrue(model.getDataRepresentations().contains(friend)); + + Relation other = new Relation("other", 2, null); + assertFalse(model.getDataRepresentations().contains(other)); + } + + @Test + void modelBuildingTest() { + Relation person = new Relation<>("Person", 1, false); + Relation age = new Relation("age", 1, null); + Relation friend = new Relation<>("friend", 2, false); + + ModelStore store = new ModelStoreImpl(Set.of(person, age, friend)); + Model model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(age, Tuple.of(0), 3); + model.put(age, Tuple.of(1), 1); + model.put(friend, Tuple.of(0, 1), true); + model.put(friend, Tuple.of(1, 0), true); + + assertTrue(model.get(person, Tuple.of(0))); + assertTrue(model.get(person, Tuple.of(1))); + assertFalse(model.get(person, Tuple.of(2))); + + assertEquals(3, model.get(age, Tuple.of(0))); + assertEquals(1, model.get(age, Tuple.of(1))); + assertEquals(null, model.get(age, Tuple.of(2))); + + assertTrue(model.get(friend, Tuple.of(0, 1))); + assertFalse(model.get(friend, Tuple.of(0, 5))); + } + + @Test + void modelBuildingArityFailTest() { + Relation person = new Relation<>("Person", 1, false); + ModelStore store = new ModelStoreImpl(Set.of(person)); + Model model = store.createModel(); + + final Tuple tuple3 = Tuple.of(1, 1, 1); + Assertions.assertThrows(IllegalArgumentException.class, () -> model.put(person, tuple3, true)); + Assertions.assertThrows(IllegalArgumentException.class, () -> model.get(person, tuple3)); + } + + @Test + void modelBuildingNullFailTest() { + Relation age = new Relation("age", 1, null); + ModelStore store = new ModelStoreImpl(Set.of(age)); + Model model = store.createModel(); + + model.put(age, Tuple.of(1), null); // valid + Assertions.assertThrows(IllegalArgumentException.class, () -> model.put(age, null, 1)); + Assertions.assertThrows(IllegalArgumentException.class, () -> model.get(age, null)); + + } + + @Test + void modelUpdateTest() { + Relation person = new Relation<>("Person", 1, false); + Relation age = new Relation("age", 1, null); + Relation friend = new Relation<>("friend", 2, false); + + ModelStore store = new ModelStoreImpl(Set.of(person, age, friend)); + Model model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(age, Tuple.of(0), 3); + model.put(age, Tuple.of(1), 1); + model.put(friend, Tuple.of(0, 1), true); + model.put(friend, Tuple.of(1, 0), true); + + assertEquals(3, model.get(age, Tuple.of(0))); + assertTrue(model.get(friend, Tuple.of(0, 1))); + + model.put(age, Tuple.of(0), 4); + model.put(friend, Tuple.of(0, 1), false); + + assertEquals(4, model.get(age, Tuple.of(0))); + assertFalse(model.get(friend, Tuple.of(0, 1))); + } + + @Test + void restoreTest() { + Relation person = new Relation("Person", 1, false); + Relation friend = new Relation("friend", 2, false); + + ModelStore store = new ModelStoreImpl(Set.of(person, friend)); + Model model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(friend, Tuple.of(0, 1), true); + model.put(friend, Tuple.of(1, 0), true); + long state1 = model.commit(); + + assertFalse(model.get(person, Tuple.of(2))); + assertFalse(model.get(friend, Tuple.of(0, 2))); + + model.put(person, Tuple.of(2), true); + model.put(friend, Tuple.of(0, 2), true); + long state2 = model.commit(); + + assertTrue(model.get(person, Tuple.of(2))); + assertTrue(model.get(friend, Tuple.of(0, 2))); + + model.restore(state1); + + assertFalse(model.get(person, Tuple.of(2))); + assertFalse(model.get(friend, Tuple.of(0, 2))); + + model.restore(state2); + + assertTrue(model.get(person, Tuple.of(2))); + assertTrue(model.get(friend, Tuple.of(0, 2))); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/query/test/QueryTest.java b/subprojects/store/src/test/java/tools/refinery/store/query/test/QueryTest.java new file mode 100644 index 00000000..02381bcd --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/query/test/QueryTest.java @@ -0,0 +1,445 @@ +package tools.refinery.store.query.test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Test; + +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.representation.Relation; +import tools.refinery.store.model.representation.TruthValue; +import tools.refinery.store.query.QueriableModel; +import tools.refinery.store.query.QueriableModelStore; +import tools.refinery.store.query.QueriableModelStoreImpl; +import tools.refinery.store.query.building.DNFAnd; +import tools.refinery.store.query.building.DNFPredicate; +import tools.refinery.store.query.building.EquivalenceAtom; +import tools.refinery.store.query.building.PredicateAtom; +import tools.refinery.store.query.building.RelationAtom; +import tools.refinery.store.query.building.Variable; +import tools.refinery.store.query.view.FilteredRelationView; +import tools.refinery.store.query.view.KeyOnlyRelationView; +import tools.refinery.store.query.view.RelationView; + +class QueryTest { + + static void compareMatchSets(Stream matchSet, Set> expected) { + Set> translatedMatchSet = new HashSet<>(); + var interator = matchSet.iterator(); + while (interator.hasNext()) { + var element = interator.next(); + List elementToTranslatedMatchSet = new ArrayList<>(); + for (int i = 0; i < element.length; i++) { + elementToTranslatedMatchSet.add((Tuple) element[i]); + } + translatedMatchSet.add(elementToTranslatedMatchSet); + } + + assertEquals(expected, translatedMatchSet); + } + + @Test + void typeConstraintTest() { + Relation person = new Relation<>("Person", 1, false); + Relation asset = new Relation<>("Asset", 1, false); + RelationView persionView = new KeyOnlyRelationView(person); + + List parameters = Arrays.asList(new Variable("p1")); + RelationAtom personRelationAtom = new RelationAtom(persionView, parameters); + DNFAnd clause = new DNFAnd(Collections.emptySet(), Arrays.asList(personRelationAtom)); + DNFPredicate predicate = new DNFPredicate("TypeConstraint", parameters, Arrays.asList(clause)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person, asset), Set.of(persionView), + Set.of(predicate)); + QueriableModel model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(asset, Tuple.of(1), true); + model.put(asset, Tuple.of(2), true); + + model.flushChanges(); + assertEquals(2, model.countResults(predicate)); + compareMatchSets(model.allResults(predicate), Set.of(List.of(Tuple.of(0)), List.of(Tuple.of(1)))); + } + + @Test + void relationConstraintTest() { + Relation person = new Relation("Person", 1, false); + Relation friend = new Relation<>("friend", 2, TruthValue.FALSE); + RelationView persionView = new KeyOnlyRelationView(person); + RelationView friendMustView = new FilteredRelationView(friend, (k, v) -> v.must()); + + Variable p1 = new Variable("p1"); + Variable p2 = new Variable("p2"); + List parameters = Arrays.asList(p1, p2); + + RelationAtom personRelationAtom1 = new RelationAtom(persionView, Arrays.asList(p1)); + RelationAtom personRelationAtom2 = new RelationAtom(persionView, Arrays.asList(p2)); + RelationAtom friendRelationAtom = new RelationAtom(friendMustView, Arrays.asList(p1, p2)); + DNFAnd clause = new DNFAnd(Collections.emptySet(), + Arrays.asList(personRelationAtom1, personRelationAtom2, friendRelationAtom)); + DNFPredicate predicate = new DNFPredicate("RelationConstraint", parameters, Arrays.asList(clause)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person, friend), + Set.of(persionView, friendMustView), Set.of(predicate)); + QueriableModel model = store.createModel(); + + assertEquals(0, model.countResults(predicate)); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(person, Tuple.of(2), true); + model.put(friend, Tuple.of(0, 1), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 0), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 2), TruthValue.TRUE); + + assertEquals(0, model.countResults(predicate)); + + model.flushChanges(); + assertEquals(3, model.countResults(predicate)); + compareMatchSets(model.allResults(predicate), Set.of(List.of(Tuple.of(0), Tuple.of(1)), + List.of(Tuple.of(1), Tuple.of(0)), List.of(Tuple.of(1), Tuple.of(2)))); + } + + @Test + void andTest() { + Relation person = new Relation("Person", 1, false); + Relation friend = new Relation<>("friend", 2, TruthValue.FALSE); + RelationView persionView = new KeyOnlyRelationView(person); + RelationView friendMustView = new FilteredRelationView(friend, (k, v) -> v.must()); + + Variable p1 = new Variable("p1"); + Variable p2 = new Variable("p2"); + List parameters = Arrays.asList(p1, p2); + + RelationAtom personRelationAtom1 = new RelationAtom(persionView, Arrays.asList(p1)); + RelationAtom personRelationAtom2 = new RelationAtom(persionView, Arrays.asList(p2)); + RelationAtom friendRelationAtom1 = new RelationAtom(friendMustView, Arrays.asList(p1, p2)); + RelationAtom friendRelationAtom2 = new RelationAtom(friendMustView, Arrays.asList(p2, p1)); + DNFAnd clause = new DNFAnd(Collections.emptySet(), + Arrays.asList(personRelationAtom1, personRelationAtom2, friendRelationAtom1, friendRelationAtom2)); + DNFPredicate predicate = new DNFPredicate("RelationConstraint", parameters, Arrays.asList(clause)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person, friend), + Set.of(persionView, friendMustView), Set.of(predicate)); + QueriableModel model = store.createModel(); + + assertEquals(0, model.countResults(predicate)); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(person, Tuple.of(2), true); + + model.put(friend, Tuple.of(0, 1), TruthValue.TRUE); + model.put(friend, Tuple.of(0, 2), TruthValue.TRUE); + + model.flushChanges(); + assertEquals(0, model.countResults(predicate)); + + model.put(friend, Tuple.of(1, 0), TruthValue.TRUE); + model.flushChanges(); + assertEquals(2, model.countResults(predicate)); + compareMatchSets(model.allResults(predicate), + Set.of(List.of(Tuple.of(0), Tuple.of(1)), List.of(Tuple.of(1), Tuple.of(0)))); + + model.put(friend, Tuple.of(2, 0), TruthValue.TRUE); + model.flushChanges(); + assertEquals(4, model.countResults(predicate)); + compareMatchSets(model.allResults(predicate), + Set.of(List.of(Tuple.of(0), Tuple.of(1)), List.of(Tuple.of(1), Tuple.of(0)), + List.of(Tuple.of(0), Tuple.of(2)), List.of(Tuple.of(2), Tuple.of(0)))); + } + + @Test + void existTest() { + Relation person = new Relation("Person", 1, false); + Relation friend = new Relation<>("friend", 2, TruthValue.FALSE); + RelationView persionView = new KeyOnlyRelationView(person); + RelationView friendMustView = new FilteredRelationView(friend, (k, v) -> v.must()); + + Variable p1 = new Variable("p1"); + Variable p2 = new Variable("p2"); + List parameters = Arrays.asList(p1); + + RelationAtom personRelationAtom1 = new RelationAtom(persionView, Arrays.asList(p1)); + RelationAtom personRelationAtom2 = new RelationAtom(persionView, Arrays.asList(p2)); + RelationAtom friendRelationAtom = new RelationAtom(friendMustView, Arrays.asList(p1, p2)); + DNFAnd clause = new DNFAnd(Set.of(p2), + Arrays.asList(personRelationAtom1, personRelationAtom2, friendRelationAtom)); + DNFPredicate predicate = new DNFPredicate("RelationConstraint", parameters, Arrays.asList(clause)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person, friend), + Set.of(persionView, friendMustView), Set.of(predicate)); + QueriableModel model = store.createModel(); + + assertEquals(0, model.countResults(predicate)); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(person, Tuple.of(2), true); + model.put(friend, Tuple.of(0, 1), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 0), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 2), TruthValue.TRUE); + + assertEquals(0, model.countResults(predicate)); + + model.flushChanges(); + assertEquals(2, model.countResults(predicate)); + compareMatchSets(model.allResults(predicate), Set.of(List.of(Tuple.of(0)), List.of(Tuple.of(1)))); + } + + @Test + void orTest() { + Relation person = new Relation<>("Person", 1, false); + Relation animal = new Relation<>("Animal", 1, false); + Relation friend = new Relation<>("friend", 2, TruthValue.FALSE); + RelationView persionView = new KeyOnlyRelationView(person); + RelationView animalView = new KeyOnlyRelationView(animal); + RelationView friendMustView = new FilteredRelationView(friend, (k, v) -> v.must()); + + Variable p1 = new Variable("p1"); + Variable p2 = new Variable("p2"); + List parameters = Arrays.asList(p1, p2); + + // Person-Person friendship + RelationAtom personRelationAtom1 = new RelationAtom(persionView, Arrays.asList(p1)); + RelationAtom personRelationAtom2 = new RelationAtom(persionView, Arrays.asList(p2)); + RelationAtom friendRelationAtom1 = new RelationAtom(friendMustView, Arrays.asList(p1, p2)); + DNFAnd clause1 = new DNFAnd(Collections.emptySet(), + Arrays.asList(personRelationAtom1, personRelationAtom2, friendRelationAtom1)); + + // Animal-Animal friendship + RelationAtom animalRelationAtom1 = new RelationAtom(animalView, Arrays.asList(p1)); + RelationAtom animalRelationAtom2 = new RelationAtom(animalView, Arrays.asList(p2)); + RelationAtom friendRelationAtom2 = new RelationAtom(friendMustView, Arrays.asList(p1, p2)); + DNFAnd clause2 = new DNFAnd(Collections.emptySet(), + Arrays.asList(animalRelationAtom1, animalRelationAtom2, friendRelationAtom2)); + + // No inter-species friendship + + DNFPredicate predicate = new DNFPredicate("Or", parameters, Arrays.asList(clause1, clause2)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person, animal, friend), + Set.of(persionView, animalView, friendMustView), Set.of(predicate)); + QueriableModel model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(animal, Tuple.of(2), true); + model.put(animal, Tuple.of(3), true); + model.put(friend, Tuple.of(0, 1), TruthValue.TRUE); + model.put(friend, Tuple.of(0, 2), TruthValue.TRUE); + model.put(friend, Tuple.of(2, 3), TruthValue.TRUE); + model.put(friend, Tuple.of(3, 0), TruthValue.TRUE); + + model.flushChanges(); + assertEquals(2, model.countResults(predicate)); + compareMatchSets(model.allResults(predicate), + Set.of(List.of(Tuple.of(0), Tuple.of(1)), List.of(Tuple.of(2), Tuple.of(3)))); + } + + @Test + void equalityTest() { + Relation person = new Relation("Person", 1, false); + RelationView persionView = new KeyOnlyRelationView(person); + + Variable p1 = new Variable("p1"); + Variable p2 = new Variable("p2"); + List parameters = Arrays.asList(p1, p2); + + RelationAtom personRelationAtom1 = new RelationAtom(persionView, Arrays.asList(p1)); + RelationAtom personRelationAtom2 = new RelationAtom(persionView, Arrays.asList(p2)); + EquivalenceAtom equivalenceAtom = new EquivalenceAtom(true, p1, p2); + DNFAnd clause = new DNFAnd(Collections.emptySet(), + Arrays.asList(personRelationAtom1, personRelationAtom2, equivalenceAtom)); + DNFPredicate predicate = new DNFPredicate("Equality", parameters, Arrays.asList(clause)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person), Set.of(persionView), Set.of(predicate)); + QueriableModel model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(person, Tuple.of(2), true); + + model.flushChanges(); + assertEquals(3, model.countResults(predicate)); + compareMatchSets(model.allResults(predicate), Set.of(List.of(Tuple.of(0), Tuple.of(0)), + List.of(Tuple.of(1), Tuple.of(1)), List.of(Tuple.of(2), Tuple.of(2)))); + } + + @Test + void inequalityTest() { + Relation person = new Relation("Person", 1, false); + Relation friend = new Relation<>("friend", 2, TruthValue.FALSE); + RelationView persionView = new KeyOnlyRelationView(person); + RelationView friendMustView = new FilteredRelationView(friend, (k, v) -> v.must()); + + Variable p1 = new Variable("p1"); + Variable p2 = new Variable("p2"); + Variable p3 = new Variable("p3"); + List parameters = Arrays.asList(p1, p2, p3); + + RelationAtom personRelationAtom1 = new RelationAtom(persionView, Arrays.asList(p1)); + RelationAtom personRelationAtom2 = new RelationAtom(persionView, Arrays.asList(p2)); + RelationAtom friendRelationAtom1 = new RelationAtom(friendMustView, Arrays.asList(p1, p3)); + RelationAtom friendRelationAtom2 = new RelationAtom(friendMustView, Arrays.asList(p2, p3)); + EquivalenceAtom inequivalenceAtom = new EquivalenceAtom(false, p1, p2); + DNFAnd clause = new DNFAnd(Collections.emptySet(), Arrays.asList(personRelationAtom1, personRelationAtom2, + friendRelationAtom1, friendRelationAtom2, inequivalenceAtom)); + DNFPredicate predicate = new DNFPredicate("Inequality", parameters, Arrays.asList(clause)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person, friend), + Set.of(persionView, friendMustView), Set.of(predicate)); + QueriableModel model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(person, Tuple.of(2), true); + model.put(friend, Tuple.of(0, 2), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 2), TruthValue.TRUE); + + model.flushChanges(); + assertEquals(2, model.countResults(predicate)); + compareMatchSets(model.allResults(predicate), + Set.of(List.of(Tuple.of(0), Tuple.of(1), Tuple.of(2)), List.of(Tuple.of(1), Tuple.of(0), Tuple.of(2)))); + } + + @Test + void patternCallTest() { + Relation person = new Relation("Person", 1, false); + Relation friend = new Relation<>("friend", 2, TruthValue.FALSE); + RelationView persionView = new KeyOnlyRelationView(person); + RelationView friendMustView = new FilteredRelationView(friend, (k, v) -> v.must()); + + Variable p1 = new Variable("p1"); + Variable p2 = new Variable("p2"); + List parameters = Arrays.asList(p1, p2); + + RelationAtom personRelationAtom1 = new RelationAtom(persionView, Arrays.asList(p1)); + RelationAtom personRelationAtom2 = new RelationAtom(persionView, Arrays.asList(p2)); + RelationAtom friendRelationAtom = new RelationAtom(friendMustView, Arrays.asList(p1, p2)); + DNFAnd clause = new DNFAnd(Collections.emptySet(), + Arrays.asList(personRelationAtom1, personRelationAtom2, friendRelationAtom)); + DNFPredicate friendPredicate = new DNFPredicate("RelationConstraint", parameters, Arrays.asList(clause)); + + Variable p3 = new Variable("p3"); + Variable p4 = new Variable("p4"); + List substitution = Arrays.asList(p3, p4); + RelationAtom personRelationAtom3 = new RelationAtom(persionView, Arrays.asList(p3)); + RelationAtom personRelationAtom4 = new RelationAtom(persionView, Arrays.asList(p4)); + PredicateAtom friendPredicateAtom = new PredicateAtom(true, false, friendPredicate, substitution); + DNFAnd patternCallClause = new DNFAnd(Collections.emptySet(), + Arrays.asList(personRelationAtom3, personRelationAtom4, friendPredicateAtom)); + DNFPredicate predicate = new DNFPredicate("PatternCall", substitution, Arrays.asList(patternCallClause)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person, friend), + Set.of(persionView, friendMustView), Set.of(friendPredicate, predicate)); + QueriableModel model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(person, Tuple.of(2), true); + model.put(friend, Tuple.of(0, 1), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 0), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 2), TruthValue.TRUE); + + model.flushChanges(); + + assertEquals(3, model.countResults(friendPredicate)); + } + + @Test + void negativePatternCallTest() { + Relation person = new Relation("Person", 1, false); + Relation friend = new Relation<>("friend", 2, TruthValue.FALSE); + RelationView persionView = new KeyOnlyRelationView(person); + RelationView friendMustView = new FilteredRelationView(friend, (k, v) -> v.must()); + + Variable p1 = new Variable("p1"); + Variable p2 = new Variable("p2"); + List parameters = Arrays.asList(p1, p2); + + RelationAtom personRelationAtom1 = new RelationAtom(persionView, Arrays.asList(p1)); + RelationAtom personRelationAtom2 = new RelationAtom(persionView, Arrays.asList(p2)); + RelationAtom friendRelationAtom = new RelationAtom(friendMustView, Arrays.asList(p1, p2)); + DNFAnd clause = new DNFAnd(Collections.emptySet(), + Arrays.asList(personRelationAtom1, personRelationAtom2, friendRelationAtom)); + DNFPredicate friendPredicate = new DNFPredicate("RelationConstraint", parameters, Arrays.asList(clause)); + + Variable p3 = new Variable("p3"); + Variable p4 = new Variable("p4"); + List substitution = Arrays.asList(p3, p4); + RelationAtom personRelationAtom3 = new RelationAtom(persionView, Arrays.asList(p3)); + RelationAtom personRelationAtom4 = new RelationAtom(persionView, Arrays.asList(p4)); + PredicateAtom friendPredicateAtom = new PredicateAtom(false, false, friendPredicate, substitution); + DNFAnd negativePatternCallClause = new DNFAnd(Collections.emptySet(), + Arrays.asList(personRelationAtom3, personRelationAtom4, friendPredicateAtom)); + DNFPredicate predicate = new DNFPredicate("NegativePatternCall", substitution, + Arrays.asList(negativePatternCallClause)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person, friend), + Set.of(persionView, friendMustView), Set.of(friendPredicate, predicate)); + QueriableModel model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(person, Tuple.of(2), true); + model.put(friend, Tuple.of(0, 1), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 0), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 2), TruthValue.TRUE); + + model.flushChanges(); + assertEquals(6, model.countResults(predicate)); + } + + @Test + void transitivePatternCallTest() { + Relation person = new Relation("Person", 1, false); + Relation friend = new Relation<>("friend", 2, TruthValue.FALSE); + RelationView persionView = new KeyOnlyRelationView(person); + RelationView friendMustView = new FilteredRelationView(friend, (k, v) -> v.must()); + + Variable p1 = new Variable("p1"); + Variable p2 = new Variable("p2"); + List parameters = Arrays.asList(p1, p2); + + RelationAtom personRelationAtom1 = new RelationAtom(persionView, Arrays.asList(p1)); + RelationAtom personRelationAtom2 = new RelationAtom(persionView, Arrays.asList(p2)); + RelationAtom friendRelationAtom = new RelationAtom(friendMustView, Arrays.asList(p1, p2)); + DNFAnd clause = new DNFAnd(Collections.emptySet(), + Arrays.asList(personRelationAtom1, personRelationAtom2, friendRelationAtom)); + DNFPredicate friendPredicate = new DNFPredicate("RelationConstraint", parameters, Arrays.asList(clause)); + + Variable p3 = new Variable("p3"); + Variable p4 = new Variable("p4"); + List substitution = Arrays.asList(p3, p4); + RelationAtom personRelationAtom3 = new RelationAtom(persionView, Arrays.asList(p3)); + RelationAtom personRelationAtom4 = new RelationAtom(persionView, Arrays.asList(p4)); + PredicateAtom friendPredicateAtom = new PredicateAtom(true, true, friendPredicate, substitution); + DNFAnd patternCallClause = new DNFAnd(Collections.emptySet(), + Arrays.asList(personRelationAtom3, personRelationAtom4, friendPredicateAtom)); + DNFPredicate predicate = new DNFPredicate("TransitivePatternCall", substitution, + Arrays.asList(patternCallClause)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person, friend), + Set.of(persionView, friendMustView), Set.of(friendPredicate, predicate)); + QueriableModel model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(person, Tuple.of(2), true); + model.put(friend, Tuple.of(0, 1), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 2), TruthValue.TRUE); + + model.flushChanges(); + assertEquals(3, model.countResults(predicate)); + } +} \ No newline at end of file diff --git a/subprojects/store/src/test/java/tools/refinery/store/query/test/QueryTransactionTest.java b/subprojects/store/src/test/java/tools/refinery/store/query/test/QueryTransactionTest.java new file mode 100644 index 00000000..e72186b9 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/query/test/QueryTransactionTest.java @@ -0,0 +1,58 @@ +package tools.refinery.store.query.test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import org.junit.jupiter.api.Test; + +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.representation.Relation; +import tools.refinery.store.query.QueriableModel; +import tools.refinery.store.query.QueriableModelStore; +import tools.refinery.store.query.QueriableModelStoreImpl; +import tools.refinery.store.query.building.DNFAnd; +import tools.refinery.store.query.building.DNFPredicate; +import tools.refinery.store.query.building.RelationAtom; +import tools.refinery.store.query.building.Variable; +import tools.refinery.store.query.view.KeyOnlyRelationView; +import tools.refinery.store.query.view.RelationView; + +class QueryTransactionTest { + @Test + void flushTest() { + Relation person = new Relation<>("Person", 1, false); + Relation asset = new Relation<>("Asset", 1, false); + RelationView persionView = new KeyOnlyRelationView(person); + + List parameters = Arrays.asList(new Variable("p1")); + RelationAtom personRelationAtom = new RelationAtom(persionView, parameters); + DNFAnd clause = new DNFAnd(Collections.emptySet(), Arrays.asList(personRelationAtom)); + DNFPredicate predicate = new DNFPredicate("TypeConstraint", parameters, Arrays.asList(clause)); + + QueriableModelStore store = new QueriableModelStoreImpl(Set.of(person, asset), Set.of(persionView), + Set.of(predicate)); + QueriableModel model = store.createModel(); + + assertEquals(0, model.countResults(predicate)); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(asset, Tuple.of(1), true); + model.put(asset, Tuple.of(2), true); + + assertEquals(0, model.countResults(predicate)); + + model.flushChanges(); + assertEquals(2, model.countResults(predicate)); + + model.put(person, Tuple.of(4), true); + assertEquals(2, model.countResults(predicate)); + + model.flushChanges(); + assertEquals(3, model.countResults(predicate)); + } +} diff --git a/subprojects/store/src/test/java/tools/refinery/store/util/CollectionsUtilTests.java b/subprojects/store/src/test/java/tools/refinery/store/util/CollectionsUtilTests.java new file mode 100644 index 00000000..171be0e5 --- /dev/null +++ b/subprojects/store/src/test/java/tools/refinery/store/util/CollectionsUtilTests.java @@ -0,0 +1,78 @@ +package tools.refinery.store.util; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static tools.refinery.store.util.CollectionsUtil.filter; +import static tools.refinery.store.util.CollectionsUtil.map; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class CollectionsUtilTests { + List list10 = List.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); + List listTen = List.of("1", "2", "3", "4", "5", "6", "7", "8", "9", "10"); + + private static void compare(Iterable a, Iterable b) { + List listA = toList(a); + List listB = toList(b); + assertEquals(listA, listB); + } + + private static List toList(Iterable a) { + List result = new ArrayList(); + Iterator iterator = a.iterator(); + while (iterator.hasNext()) { + result.add(iterator.next()); + } + return result; + } + + @Test + void testFilterEven() { + compare(List.of(2, 4, 6, 8, 10), filter(list10, (x -> x % 2 == 0))); + } + + @Test + void testFilterOdd() { + compare(List.of(1, 3, 5, 7, 9), filter(list10, (x -> x % 2 == 1))); + } + + @Test + void testFilterFalse() { + compare(List.of(), filter(list10, (x -> false))); + } + + @Test + void testFilterTrue() { + compare(list10, filter(list10, (x -> true))); + } + + @Test + void testFilterEmpty() { + compare(List.of(), filter(List.of(), (x -> true))); + } + + @Test() + void testNoSuchElement() { + Iterable iterable = filter(list10, (x -> x % 2 == 0)); + Iterator iterator = iterable.iterator(); + while (iterator.hasNext()) { + iterator.next(); + } + Assertions.assertThrows(NoSuchElementException.class, () -> iterator.next()); + } + + @Test() + void mapTest() { + compare(listTen, map(list10, x -> x.toString())); + } + + @Test() + void mapEmtyTest() { + compare(List.of(), map(List.of(), x -> x.toString())); + } +} -- cgit v1.2.3-54-g00ecf