From baa96eaafa26532480c82a9114a26a7a5610838b Mon Sep 17 00:00:00 2001 From: Kristóf Marussy Date: Tue, 5 Oct 2021 11:51:35 +0200 Subject: chore(store): rename store package --- .../language/mapping/PartialModelMapper.java | 2 +- .../data/map/benchmarks/ImmutablePutBenchmark.java | 77 ---- .../map/benchmarks/ImmutablePutExecutionPlan.java | 56 --- .../map/benchmarks/ImmutablePutBenchmark.java | 77 ++++ .../map/benchmarks/ImmutablePutExecutionPlan.java | 57 +++ .../refinery/data/map/ContinousHashProvider.java | 69 ---- .../main/java/tools/refinery/data/map/Cursor.java | 14 - .../tools/refinery/data/map/CursorAsIterator.java | 57 --- .../java/tools/refinery/data/map/DiffCursor.java | 6 - .../tools/refinery/data/map/MapAsIterable.java | 26 -- .../java/tools/refinery/data/map/Versioned.java | 7 - .../java/tools/refinery/data/map/VersionedMap.java | 13 - .../tools/refinery/data/map/VersionedMapStore.java | 14 - .../data/map/VersionedMapStoreConfiguration.java | 48 --- .../refinery/data/map/VersionedMapStoreImpl.java | 135 ------ .../refinery/data/map/internal/HashClash.java | 18 - .../refinery/data/map/internal/ImmutableNode.java | 378 ----------------- .../refinery/data/map/internal/MapCursor.java | 131 ------ .../refinery/data/map/internal/MapDiffCursor.java | 221 ---------- .../refinery/data/map/internal/MutableNode.java | 456 --------------------- .../tools/refinery/data/map/internal/Node.java | 85 ---- .../refinery/data/map/internal/OldValueBox.java | 19 - .../data/map/internal/VersionedMapImpl.java | 171 -------- .../main/java/tools/refinery/data/model/Model.java | 20 - .../tools/refinery/data/model/ModelCursor.java | 25 -- .../tools/refinery/data/model/ModelDiffCursor.java | 26 -- .../java/tools/refinery/data/model/ModelStore.java | 16 - .../tools/refinery/data/model/ModelStoreImpl.java | 121 ------ .../main/java/tools/refinery/data/model/Tuple.java | 148 ------- .../refinery/data/model/TupleHashProvider.java | 65 --- .../data/model/TupleHashProviderBitMagic.java | 28 -- .../refinery/data/model/internal/ModelImpl.java | 124 ------ .../internal/SimilarRelationEquivalenceClass.java | 33 -- .../data/model/representation/AuxilaryData.java | 22 - .../model/representation/DataRepresentation.java | 24 -- .../data/model/representation/Relation.java | 31 -- .../data/model/representation/TruthValue.java | 51 --- .../tools/refinery/data/query/RelationalScope.java | 35 -- .../tools/refinery/data/query/building/DNFAnd.java | 37 -- .../refinery/data/query/building/DNFAtom.java | 33 -- .../refinery/data/query/building/DNFPredicate.java | 72 ---- .../data/query/building/EquivalenceAtom.java | 44 -- .../data/query/building/PredicateAtom.java | 66 --- .../query/building/PredicateBuilder_string.java | 107 ----- .../refinery/data/query/building/RelationAtom.java | 49 --- .../refinery/data/query/building/Variable.java | 22 - .../data/query/internal/DummyBaseIndexer.java | 59 --- .../data/query/internal/PredicateTranslator.java | 210 ---------- .../query/internal/RelationUpdateListener.java | 52 --- .../internal/RelationUpdateListenerEntry.java | 64 --- .../query/internal/RelationalEngineContext.java | 33 -- .../query/internal/RelationalQueryMetaContext.java | 58 --- .../query/internal/RelationalRuntimeContext.java | 186 --------- .../data/query/view/FilteredRelationView.java | 48 --- .../data/query/view/FunctionalRelationView.java | 50 --- .../data/query/view/KeyOnlyRelationView.java | 16 - .../refinery/data/query/view/RelationView.java | 86 ---- .../tools/refinery/data/util/CollectionsUtil.java | 72 ---- .../refinery/store/map/ContinousHashProvider.java | 69 ++++ .../main/java/tools/refinery/store/map/Cursor.java | 14 + .../tools/refinery/store/map/CursorAsIterator.java | 57 +++ .../java/tools/refinery/store/map/DiffCursor.java | 6 + .../tools/refinery/store/map/MapAsIterable.java | 26 ++ .../java/tools/refinery/store/map/Versioned.java | 7 + .../tools/refinery/store/map/VersionedMap.java | 13 + .../refinery/store/map/VersionedMapStore.java | 14 + .../store/map/VersionedMapStoreConfiguration.java | 48 +++ .../refinery/store/map/VersionedMapStoreImpl.java | 135 ++++++ .../refinery/store/map/internal/HashClash.java | 18 + .../refinery/store/map/internal/ImmutableNode.java | 378 +++++++++++++++++ .../refinery/store/map/internal/MapCursor.java | 131 ++++++ .../refinery/store/map/internal/MapDiffCursor.java | 221 ++++++++++ .../refinery/store/map/internal/MutableNode.java | 456 +++++++++++++++++++++ .../tools/refinery/store/map/internal/Node.java | 85 ++++ .../refinery/store/map/internal/OldValueBox.java | 19 + .../store/map/internal/VersionedMapImpl.java | 171 ++++++++ .../java/tools/refinery/store/model/Model.java | 20 + .../tools/refinery/store/model/ModelCursor.java | 25 ++ .../refinery/store/model/ModelDiffCursor.java | 26 ++ .../tools/refinery/store/model/ModelStore.java | 16 + .../tools/refinery/store/model/ModelStoreImpl.java | 122 ++++++ .../java/tools/refinery/store/model/Tuple.java | 148 +++++++ .../refinery/store/model/TupleHashProvider.java | 65 +++ .../store/model/TupleHashProviderBitMagic.java | 28 ++ .../refinery/store/model/internal/ModelImpl.java | 124 ++++++ .../internal/SimilarRelationEquivalenceClass.java | 33 ++ .../store/model/representation/AuxilaryData.java | 22 + .../model/representation/DataRepresentation.java | 24 ++ .../store/model/representation/Relation.java | 31 ++ .../store/model/representation/TruthValue.java | 51 +++ .../refinery/store/query/RelationalScope.java | 35 ++ .../refinery/store/query/building/DNFAnd.java | 37 ++ .../refinery/store/query/building/DNFAtom.java | 33 ++ .../store/query/building/DNFPredicate.java | 72 ++++ .../store/query/building/EquivalenceAtom.java | 44 ++ .../store/query/building/PredicateAtom.java | 66 +++ .../query/building/PredicateBuilder_string.java | 107 +++++ .../store/query/building/RelationAtom.java | 49 +++ .../refinery/store/query/building/Variable.java | 22 + .../store/query/internal/DummyBaseIndexer.java | 59 +++ .../store/query/internal/PredicateTranslator.java | 210 ++++++++++ .../query/internal/RelationUpdateListener.java | 52 +++ .../internal/RelationUpdateListenerEntry.java | 64 +++ .../query/internal/RelationalEngineContext.java | 33 ++ .../query/internal/RelationalQueryMetaContext.java | 58 +++ .../query/internal/RelationalRuntimeContext.java | 186 +++++++++ .../store/query/view/FilteredRelationView.java | 48 +++ .../store/query/view/FunctionalRelationView.java | 50 +++ .../store/query/view/KeyOnlyRelationView.java | 16 + .../refinery/store/query/view/RelationView.java | 86 ++++ .../tools/refinery/store/util/CollectionsUtil.java | 72 ++++ .../data/map/tests/fuzz/CommitFuzzTest.java | 96 ----- .../data/map/tests/fuzz/ContentEqualsFuzzTest.java | 143 ------- .../data/map/tests/fuzz/DiffCursorFuzzTest.java | 117 ------ .../data/map/tests/fuzz/MultiThreadFuzzTest.java | 97 ----- .../map/tests/fuzz/MultiThreadTestRunnable.java | 101 ----- .../data/map/tests/fuzz/MutableFuzzTest.java | 92 ----- .../fuzz/MutableImmutableCompareFuzzTest.java | 89 ---- .../data/map/tests/fuzz/RestoreFuzzTest.java | 109 ----- .../data/map/tests/fuzz/SharedStoreFuzzTest.java | 113 ----- .../data/map/tests/fuzz/utils/FuzzTestUtils.java | 64 --- .../map/tests/fuzz/utils/FuzzTestUtilsTest.java | 33 -- .../data/map/tests/utils/MapTestEnvironment.java | 213 ---------- .../data/model/hashTests/HashEfficiencyTest.java | 161 -------- .../tools/refinery/data/model/tests/ModelTest.java | 148 ------- .../tools/refinery/data/query/test/QueryTest.java | 90 ---- .../refinery/data/util/CollectionsUtilTests.java | 78 ---- .../store/map/tests/fuzz/CommitFuzzTest.java | 96 +++++ .../map/tests/fuzz/ContentEqualsFuzzTest.java | 143 +++++++ .../store/map/tests/fuzz/DiffCursorFuzzTest.java | 117 ++++++ .../store/map/tests/fuzz/MultiThreadFuzzTest.java | 97 +++++ .../map/tests/fuzz/MultiThreadTestRunnable.java | 101 +++++ .../store/map/tests/fuzz/MutableFuzzTest.java | 92 +++++ .../fuzz/MutableImmutableCompareFuzzTest.java | 89 ++++ .../store/map/tests/fuzz/RestoreFuzzTest.java | 109 +++++ .../store/map/tests/fuzz/SharedStoreFuzzTest.java | 113 +++++ .../store/map/tests/fuzz/utils/FuzzTestUtils.java | 64 +++ .../map/tests/fuzz/utils/FuzzTestUtilsTest.java | 33 ++ .../store/map/tests/utils/MapTestEnvironment.java | 214 ++++++++++ .../store/model/hashTests/HashEfficiencyTest.java | 161 ++++++++ .../refinery/store/model/tests/ModelTest.java | 148 +++++++ .../tools/refinery/store/query/test/QueryTest.java | 90 ++++ .../refinery/store/util/CollectionsUtilTests.java | 78 ++++ 143 files changed, 5882 insertions(+), 5879 deletions(-) delete mode 100644 store/src/jmh/java/tools/refinery/data/map/benchmarks/ImmutablePutBenchmark.java delete mode 100644 store/src/jmh/java/tools/refinery/data/map/benchmarks/ImmutablePutExecutionPlan.java create mode 100644 store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutBenchmark.java create mode 100644 store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutExecutionPlan.java delete mode 100644 store/src/main/java/tools/refinery/data/map/ContinousHashProvider.java delete mode 100644 store/src/main/java/tools/refinery/data/map/Cursor.java delete mode 100644 store/src/main/java/tools/refinery/data/map/CursorAsIterator.java delete mode 100644 store/src/main/java/tools/refinery/data/map/DiffCursor.java delete mode 100644 store/src/main/java/tools/refinery/data/map/MapAsIterable.java delete mode 100644 store/src/main/java/tools/refinery/data/map/Versioned.java delete mode 100644 store/src/main/java/tools/refinery/data/map/VersionedMap.java delete mode 100644 store/src/main/java/tools/refinery/data/map/VersionedMapStore.java delete mode 100644 store/src/main/java/tools/refinery/data/map/VersionedMapStoreConfiguration.java delete mode 100644 store/src/main/java/tools/refinery/data/map/VersionedMapStoreImpl.java delete mode 100644 store/src/main/java/tools/refinery/data/map/internal/HashClash.java delete mode 100644 store/src/main/java/tools/refinery/data/map/internal/ImmutableNode.java delete mode 100644 store/src/main/java/tools/refinery/data/map/internal/MapCursor.java delete mode 100644 store/src/main/java/tools/refinery/data/map/internal/MapDiffCursor.java delete mode 100644 store/src/main/java/tools/refinery/data/map/internal/MutableNode.java delete mode 100644 store/src/main/java/tools/refinery/data/map/internal/Node.java delete mode 100644 store/src/main/java/tools/refinery/data/map/internal/OldValueBox.java delete mode 100644 store/src/main/java/tools/refinery/data/map/internal/VersionedMapImpl.java delete mode 100644 store/src/main/java/tools/refinery/data/model/Model.java delete mode 100644 store/src/main/java/tools/refinery/data/model/ModelCursor.java delete mode 100644 store/src/main/java/tools/refinery/data/model/ModelDiffCursor.java delete mode 100644 store/src/main/java/tools/refinery/data/model/ModelStore.java delete mode 100644 store/src/main/java/tools/refinery/data/model/ModelStoreImpl.java delete mode 100644 store/src/main/java/tools/refinery/data/model/Tuple.java delete mode 100644 store/src/main/java/tools/refinery/data/model/TupleHashProvider.java delete mode 100644 store/src/main/java/tools/refinery/data/model/TupleHashProviderBitMagic.java delete mode 100644 store/src/main/java/tools/refinery/data/model/internal/ModelImpl.java delete mode 100644 store/src/main/java/tools/refinery/data/model/internal/SimilarRelationEquivalenceClass.java delete mode 100644 store/src/main/java/tools/refinery/data/model/representation/AuxilaryData.java delete mode 100644 store/src/main/java/tools/refinery/data/model/representation/DataRepresentation.java delete mode 100644 store/src/main/java/tools/refinery/data/model/representation/Relation.java delete mode 100644 store/src/main/java/tools/refinery/data/model/representation/TruthValue.java delete mode 100644 store/src/main/java/tools/refinery/data/query/RelationalScope.java delete mode 100644 store/src/main/java/tools/refinery/data/query/building/DNFAnd.java delete mode 100644 store/src/main/java/tools/refinery/data/query/building/DNFAtom.java delete mode 100644 store/src/main/java/tools/refinery/data/query/building/DNFPredicate.java delete mode 100644 store/src/main/java/tools/refinery/data/query/building/EquivalenceAtom.java delete mode 100644 store/src/main/java/tools/refinery/data/query/building/PredicateAtom.java delete mode 100644 store/src/main/java/tools/refinery/data/query/building/PredicateBuilder_string.java delete mode 100644 store/src/main/java/tools/refinery/data/query/building/RelationAtom.java delete mode 100644 store/src/main/java/tools/refinery/data/query/building/Variable.java delete mode 100644 store/src/main/java/tools/refinery/data/query/internal/DummyBaseIndexer.java delete mode 100644 store/src/main/java/tools/refinery/data/query/internal/PredicateTranslator.java delete mode 100644 store/src/main/java/tools/refinery/data/query/internal/RelationUpdateListener.java delete mode 100644 store/src/main/java/tools/refinery/data/query/internal/RelationUpdateListenerEntry.java delete mode 100644 store/src/main/java/tools/refinery/data/query/internal/RelationalEngineContext.java delete mode 100644 store/src/main/java/tools/refinery/data/query/internal/RelationalQueryMetaContext.java delete mode 100644 store/src/main/java/tools/refinery/data/query/internal/RelationalRuntimeContext.java delete mode 100644 store/src/main/java/tools/refinery/data/query/view/FilteredRelationView.java delete mode 100644 store/src/main/java/tools/refinery/data/query/view/FunctionalRelationView.java delete mode 100644 store/src/main/java/tools/refinery/data/query/view/KeyOnlyRelationView.java delete mode 100644 store/src/main/java/tools/refinery/data/query/view/RelationView.java delete mode 100644 store/src/main/java/tools/refinery/data/util/CollectionsUtil.java create mode 100644 store/src/main/java/tools/refinery/store/map/ContinousHashProvider.java create mode 100644 store/src/main/java/tools/refinery/store/map/Cursor.java create mode 100644 store/src/main/java/tools/refinery/store/map/CursorAsIterator.java create mode 100644 store/src/main/java/tools/refinery/store/map/DiffCursor.java create mode 100644 store/src/main/java/tools/refinery/store/map/MapAsIterable.java create mode 100644 store/src/main/java/tools/refinery/store/map/Versioned.java create mode 100644 store/src/main/java/tools/refinery/store/map/VersionedMap.java create mode 100644 store/src/main/java/tools/refinery/store/map/VersionedMapStore.java create mode 100644 store/src/main/java/tools/refinery/store/map/VersionedMapStoreConfiguration.java create mode 100644 store/src/main/java/tools/refinery/store/map/VersionedMapStoreImpl.java create mode 100644 store/src/main/java/tools/refinery/store/map/internal/HashClash.java create mode 100644 store/src/main/java/tools/refinery/store/map/internal/ImmutableNode.java create mode 100644 store/src/main/java/tools/refinery/store/map/internal/MapCursor.java create mode 100644 store/src/main/java/tools/refinery/store/map/internal/MapDiffCursor.java create mode 100644 store/src/main/java/tools/refinery/store/map/internal/MutableNode.java create mode 100644 store/src/main/java/tools/refinery/store/map/internal/Node.java create mode 100644 store/src/main/java/tools/refinery/store/map/internal/OldValueBox.java create mode 100644 store/src/main/java/tools/refinery/store/map/internal/VersionedMapImpl.java create mode 100644 store/src/main/java/tools/refinery/store/model/Model.java create mode 100644 store/src/main/java/tools/refinery/store/model/ModelCursor.java create mode 100644 store/src/main/java/tools/refinery/store/model/ModelDiffCursor.java create mode 100644 store/src/main/java/tools/refinery/store/model/ModelStore.java create mode 100644 store/src/main/java/tools/refinery/store/model/ModelStoreImpl.java create mode 100644 store/src/main/java/tools/refinery/store/model/Tuple.java create mode 100644 store/src/main/java/tools/refinery/store/model/TupleHashProvider.java create mode 100644 store/src/main/java/tools/refinery/store/model/TupleHashProviderBitMagic.java create mode 100644 store/src/main/java/tools/refinery/store/model/internal/ModelImpl.java create mode 100644 store/src/main/java/tools/refinery/store/model/internal/SimilarRelationEquivalenceClass.java create mode 100644 store/src/main/java/tools/refinery/store/model/representation/AuxilaryData.java create mode 100644 store/src/main/java/tools/refinery/store/model/representation/DataRepresentation.java create mode 100644 store/src/main/java/tools/refinery/store/model/representation/Relation.java create mode 100644 store/src/main/java/tools/refinery/store/model/representation/TruthValue.java create mode 100644 store/src/main/java/tools/refinery/store/query/RelationalScope.java create mode 100644 store/src/main/java/tools/refinery/store/query/building/DNFAnd.java create mode 100644 store/src/main/java/tools/refinery/store/query/building/DNFAtom.java create mode 100644 store/src/main/java/tools/refinery/store/query/building/DNFPredicate.java create mode 100644 store/src/main/java/tools/refinery/store/query/building/EquivalenceAtom.java create mode 100644 store/src/main/java/tools/refinery/store/query/building/PredicateAtom.java create mode 100644 store/src/main/java/tools/refinery/store/query/building/PredicateBuilder_string.java create mode 100644 store/src/main/java/tools/refinery/store/query/building/RelationAtom.java create mode 100644 store/src/main/java/tools/refinery/store/query/building/Variable.java create mode 100644 store/src/main/java/tools/refinery/store/query/internal/DummyBaseIndexer.java create mode 100644 store/src/main/java/tools/refinery/store/query/internal/PredicateTranslator.java create mode 100644 store/src/main/java/tools/refinery/store/query/internal/RelationUpdateListener.java create mode 100644 store/src/main/java/tools/refinery/store/query/internal/RelationUpdateListenerEntry.java create mode 100644 store/src/main/java/tools/refinery/store/query/internal/RelationalEngineContext.java create mode 100644 store/src/main/java/tools/refinery/store/query/internal/RelationalQueryMetaContext.java create mode 100644 store/src/main/java/tools/refinery/store/query/internal/RelationalRuntimeContext.java create mode 100644 store/src/main/java/tools/refinery/store/query/view/FilteredRelationView.java create mode 100644 store/src/main/java/tools/refinery/store/query/view/FunctionalRelationView.java create mode 100644 store/src/main/java/tools/refinery/store/query/view/KeyOnlyRelationView.java create mode 100644 store/src/main/java/tools/refinery/store/query/view/RelationView.java create mode 100644 store/src/main/java/tools/refinery/store/util/CollectionsUtil.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/CommitFuzzTest.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/ContentEqualsFuzzTest.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/DiffCursorFuzzTest.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/MultiThreadFuzzTest.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/MultiThreadTestRunnable.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/MutableFuzzTest.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/MutableImmutableCompareFuzzTest.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/RestoreFuzzTest.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/SharedStoreFuzzTest.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/utils/FuzzTestUtils.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/fuzz/utils/FuzzTestUtilsTest.java delete mode 100644 store/src/test/java/tools/refinery/data/map/tests/utils/MapTestEnvironment.java delete mode 100644 store/src/test/java/tools/refinery/data/model/hashTests/HashEfficiencyTest.java delete mode 100644 store/src/test/java/tools/refinery/data/model/tests/ModelTest.java delete mode 100644 store/src/test/java/tools/refinery/data/query/test/QueryTest.java delete mode 100644 store/src/test/java/tools/refinery/data/util/CollectionsUtilTests.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/CommitFuzzTest.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/ContentEqualsFuzzTest.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/DiffCursorFuzzTest.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadFuzzTest.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadTestRunnable.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableFuzzTest.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableImmutableCompareFuzzTest.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/RestoreFuzzTest.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/SharedStoreFuzzTest.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtils.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtilsTest.java create mode 100644 store/src/test/java/tools/refinery/store/map/tests/utils/MapTestEnvironment.java create mode 100644 store/src/test/java/tools/refinery/store/model/hashTests/HashEfficiencyTest.java create mode 100644 store/src/test/java/tools/refinery/store/model/tests/ModelTest.java create mode 100644 store/src/test/java/tools/refinery/store/query/test/QueryTest.java create mode 100644 store/src/test/java/tools/refinery/store/util/CollectionsUtilTests.java diff --git a/language-to-store/src/main/java/tools/refinery/language/mapping/PartialModelMapper.java b/language-to-store/src/main/java/tools/refinery/language/mapping/PartialModelMapper.java index e501ce8f..a2cab671 100644 --- a/language-to-store/src/main/java/tools/refinery/language/mapping/PartialModelMapper.java +++ b/language-to-store/src/main/java/tools/refinery/language/mapping/PartialModelMapper.java @@ -1,7 +1,7 @@ package tools.refinery.language.mapping; -import tools.refinery.data.model.Model; import tools.refinery.language.model.problem.Problem; +import tools.refinery.store.model.Model; public class PartialModelMapper { public Model transformProblem(Problem problem) { diff --git a/store/src/jmh/java/tools/refinery/data/map/benchmarks/ImmutablePutBenchmark.java b/store/src/jmh/java/tools/refinery/data/map/benchmarks/ImmutablePutBenchmark.java deleted file mode 100644 index bc5e056f..00000000 --- a/store/src/jmh/java/tools/refinery/data/map/benchmarks/ImmutablePutBenchmark.java +++ /dev/null @@ -1,77 +0,0 @@ -package tools.refinery.data.map.benchmarks; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.concurrent.TimeUnit; - -import org.openjdk.jmh.annotations.Benchmark; -import org.openjdk.jmh.annotations.BenchmarkMode; -import org.openjdk.jmh.annotations.Fork; -import org.openjdk.jmh.annotations.Measurement; -import org.openjdk.jmh.annotations.Mode; -import org.openjdk.jmh.annotations.OutputTimeUnit; -import org.openjdk.jmh.annotations.Warmup; -import org.openjdk.jmh.infra.Blackhole; - -@Fork(1) -@BenchmarkMode(Mode.AverageTime) -@OutputTimeUnit(TimeUnit.MILLISECONDS) -@Measurement(time = 1, timeUnit = TimeUnit.SECONDS) -@Warmup(time = 1, timeUnit = TimeUnit.SECONDS) -public class ImmutablePutBenchmark { - @Benchmark - public void immutablePutBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { - var sut = executionPlan.createSut(); - for (int i = 0; i < executionPlan.nPut; i++) { - sut.put(executionPlan.nextKey(), executionPlan.nextValue()); - } - blackhole.consume(sut); - } - - @Benchmark - public void immutablePutAndCommitBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { - var sut = executionPlan.createSut(); - for (int i = 0; i < executionPlan.nPut; i++) { - sut.put(executionPlan.nextKey(), executionPlan.nextValue()); - if (i % 10 == 0) { - blackhole.consume(sut.commit()); - } - } - blackhole.consume(sut); - } - - @Benchmark - public void baselinePutBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { - var sut = new HashMap(); - for (int i = 0; i < executionPlan.nPut; i++) { - var key = executionPlan.nextKey(); - var value = executionPlan.nextValue(); - if (executionPlan.isDefault(value)) { - sut.remove(key); - } else { - sut.put(key, value); - } - } - blackhole.consume(sut); - } - - @Benchmark - public void baselinePutAndCommitBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { - var sut = new HashMap(); - var store = new ArrayList>(); - for (int i = 0; i < executionPlan.nPut; i++) { - var key = executionPlan.nextKey(); - var value = executionPlan.nextValue(); - if (executionPlan.isDefault(value)) { - sut.remove(key); - } else { - sut.put(key, value); - } - if (i % 10 == 0) { - store.add(new HashMap<>(sut)); - } - } - blackhole.consume(sut); - blackhole.consume(store); - } -} diff --git a/store/src/jmh/java/tools/refinery/data/map/benchmarks/ImmutablePutExecutionPlan.java b/store/src/jmh/java/tools/refinery/data/map/benchmarks/ImmutablePutExecutionPlan.java deleted file mode 100644 index 084381a0..00000000 --- a/store/src/jmh/java/tools/refinery/data/map/benchmarks/ImmutablePutExecutionPlan.java +++ /dev/null @@ -1,56 +0,0 @@ -package tools.refinery.data.map.benchmarks; - -import java.util.Random; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.VersionedMapStoreImpl; -import tools.refinery.data.map.internal.VersionedMapImpl; -import tools.refinery.data.map.tests.utils.MapTestEnvironment; -import org.openjdk.jmh.annotations.Level; -import org.openjdk.jmh.annotations.Param; -import org.openjdk.jmh.annotations.Scope; -import org.openjdk.jmh.annotations.Setup; -import org.openjdk.jmh.annotations.State; - -@State(Scope.Benchmark) -public class ImmutablePutExecutionPlan { - - @Param({ "100", "10000" }) - public int nPut; - - @Param({ "32", "1000", "100000" }) - public int nKeys; - - @Param({ "2", "3" }) - public int nValues; - - private Random random; - - private String[] values; - - private ContinousHashProvider hashProvider = MapTestEnvironment.prepareHashProvider(false); - - @Setup(Level.Trial) - public void setUpTrial() { - random = new Random(); - values = MapTestEnvironment.prepareValues(nValues); - } - - public VersionedMapImpl createSut() { - VersionedMapStore store = new VersionedMapStoreImpl(hashProvider, values[0]); - return (VersionedMapImpl) store.createMap(); - } - - public Integer nextKey() { - return random.nextInt(nKeys); - } - - public boolean isDefault(String value) { - return value == values[0]; - } - - public String nextValue() { - return values[random.nextInt(nValues)]; - } -} diff --git a/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutBenchmark.java b/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutBenchmark.java new file mode 100644 index 00000000..cdf3d3c8 --- /dev/null +++ b/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutBenchmark.java @@ -0,0 +1,77 @@ +package tools.refinery.store.map.benchmarks; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.concurrent.TimeUnit; + +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; + +@Fork(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Measurement(time = 1, timeUnit = TimeUnit.SECONDS) +@Warmup(time = 1, timeUnit = TimeUnit.SECONDS) +public class ImmutablePutBenchmark { + @Benchmark + public void immutablePutBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { + var sut = executionPlan.createSut(); + for (int i = 0; i < executionPlan.nPut; i++) { + sut.put(executionPlan.nextKey(), executionPlan.nextValue()); + } + blackhole.consume(sut); + } + + @Benchmark + public void immutablePutAndCommitBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { + var sut = executionPlan.createSut(); + for (int i = 0; i < executionPlan.nPut; i++) { + sut.put(executionPlan.nextKey(), executionPlan.nextValue()); + if (i % 10 == 0) { + blackhole.consume(sut.commit()); + } + } + blackhole.consume(sut); + } + + @Benchmark + public void baselinePutBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { + var sut = new HashMap(); + for (int i = 0; i < executionPlan.nPut; i++) { + var key = executionPlan.nextKey(); + var value = executionPlan.nextValue(); + if (executionPlan.isDefault(value)) { + sut.remove(key); + } else { + sut.put(key, value); + } + } + blackhole.consume(sut); + } + + @Benchmark + public void baselinePutAndCommitBenchmark(ImmutablePutExecutionPlan executionPlan, Blackhole blackhole) { + var sut = new HashMap(); + var store = new ArrayList>(); + for (int i = 0; i < executionPlan.nPut; i++) { + var key = executionPlan.nextKey(); + var value = executionPlan.nextValue(); + if (executionPlan.isDefault(value)) { + sut.remove(key); + } else { + sut.put(key, value); + } + if (i % 10 == 0) { + store.add(new HashMap<>(sut)); + } + } + blackhole.consume(sut); + blackhole.consume(store); + } +} diff --git a/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutExecutionPlan.java b/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutExecutionPlan.java new file mode 100644 index 00000000..756d504e --- /dev/null +++ b/store/src/jmh/java/tools/refinery/store/map/benchmarks/ImmutablePutExecutionPlan.java @@ -0,0 +1,57 @@ +package tools.refinery.store.map.benchmarks; + +import java.util.Random; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; + +@State(Scope.Benchmark) +public class ImmutablePutExecutionPlan { + + @Param({ "100", "10000" }) + public int nPut; + + @Param({ "32", "1000", "100000" }) + public int nKeys; + + @Param({ "2", "3" }) + public int nValues; + + private Random random; + + private String[] values; + + private ContinousHashProvider hashProvider = MapTestEnvironment.prepareHashProvider(false); + + @Setup(Level.Trial) + public void setUpTrial() { + random = new Random(); + values = MapTestEnvironment.prepareValues(nValues); + } + + public VersionedMapImpl createSut() { + VersionedMapStore store = new VersionedMapStoreImpl(hashProvider, values[0]); + return (VersionedMapImpl) store.createMap(); + } + + public Integer nextKey() { + return random.nextInt(nKeys); + } + + public boolean isDefault(String value) { + return value == values[0]; + } + + public String nextValue() { + return values[random.nextInt(nValues)]; + } +} diff --git a/store/src/main/java/tools/refinery/data/map/ContinousHashProvider.java b/store/src/main/java/tools/refinery/data/map/ContinousHashProvider.java deleted file mode 100644 index 6a54a3ff..00000000 --- a/store/src/main/java/tools/refinery/data/map/ContinousHashProvider.java +++ /dev/null @@ -1,69 +0,0 @@ -package tools.refinery.data.map; - -import tools.refinery.data.map.internal.Node; - -/** - * A class representing an equivalence relation for a type {@code K} with a - * continuous hash function. - * - * @author Oszkar Semerath - * - * @param Target java type. - */ -public interface ContinousHashProvider { - public static final int EFFECTIVE_BITS = Node.EFFECTIVE_BITS; - public static final int EFFECTIVE_BIT_MASK = (1 << (EFFECTIVE_BITS)) - 1; - - /** - * Maximal practical depth for differentiating keys. If two keys have the same - * hash code until that depth, the algorithm can stop. - */ - public static final int MAX_PRACTICAL_DEPTH = 500; - - /** - * Provides a hash code for a object {@code key} with a given {@code index}. It - * has the following contracts: - *
    - *
  • If {@link #equals}{@code (key1,key2)}, then - * {@code getHash(key1, index) == getHash(key2, index)} for all values of - * {@code index}.
  • - *
  • If {@code getHash(key1,index) == getHash(key2, index)} for all values of - * {@code index}, then {@link #equals}{@code (key1, key2)}
  • - *
  • In current implementation, we use only the least significant - * {@link #EFFECTIVE_BITS} - *
- * Check {@link #equals} for further details. - * - * @param key The target data object. - * @param index The depth of the the hash code. Needs to be non-negative. - * @return A hash code. - */ - public int getHash(K key, int index); - - public default int getEffectiveHash(K key, int index) { - return getHash(key, index) & EFFECTIVE_BIT_MASK; - } - - public default int compare(K key1, K key2) { - if (key1.equals(key2)) { - return 0; - } else { - for (int i = 0; i < ContinousHashProvider.MAX_PRACTICAL_DEPTH; i++) { - int hash1 = getEffectiveHash(key1, i); - int hash2 = getEffectiveHash(key2, i); - for(int j = 0; j>>j*Node.BRANCHING_FACTOR_BITS) & factorMask; - int hashFragment2 = (hash2>>>j*Node.BRANCHING_FACTOR_BITS) & factorMask; - var result = Integer.compare(hashFragment1, hashFragment2); - if (result != 0) { - return result; - } - } - } - throw new IllegalArgumentException("Two different keys (" + key1 + " and " + key2 - + ") have the same hashcode over the practical depth limitation (" - + ContinousHashProvider.MAX_PRACTICAL_DEPTH + ")!"); - } - } -} diff --git a/store/src/main/java/tools/refinery/data/map/Cursor.java b/store/src/main/java/tools/refinery/data/map/Cursor.java deleted file mode 100644 index a137e6c1..00000000 --- a/store/src/main/java/tools/refinery/data/map/Cursor.java +++ /dev/null @@ -1,14 +0,0 @@ -package tools.refinery.data.map; - -import java.util.List; - -public interface Cursor { - public K getKey(); - public V getValue(); - public boolean isTerminated(); - public boolean move(); - public boolean isDirty(); - - @SuppressWarnings("squid:S1452") - public List> getDependingMaps(); -} diff --git a/store/src/main/java/tools/refinery/data/map/CursorAsIterator.java b/store/src/main/java/tools/refinery/data/map/CursorAsIterator.java deleted file mode 100644 index 02a49ba1..00000000 --- a/store/src/main/java/tools/refinery/data/map/CursorAsIterator.java +++ /dev/null @@ -1,57 +0,0 @@ -package tools.refinery.data.map; - -import java.util.Iterator; -import java.util.NoSuchElementException; -import java.util.function.BiFunction; -import java.util.function.BiPredicate; - -public class CursorAsIterator implements Iterator { - private final Cursor internal; - private final BiFunction entryTransformation; - private final BiPredicate filtering; - - D lastValidElement; - - public CursorAsIterator(Cursor internal, BiFunction entryTransformation, BiPredicate filtering) { - this.internal = internal; - this.entryTransformation = entryTransformation; - this.filtering = filtering; - - moveToNext(); - } - public CursorAsIterator(Cursor internal, BiFunction entryTransformation) { - this.internal = internal; - this.entryTransformation = entryTransformation; - this.filtering = ((k,v)->true); - - moveToNext(); - } - - private void moveToNext() { - internal.move(); - while(!internal.isTerminated() && !filtering.test(internal.getKey(), internal.getValue())) { - internal.move(); - } - if(!internal.isTerminated()) { - lastValidElement = entryTransformation.apply(internal.getKey(), internal.getValue()); - } - } - - - @Override - public boolean hasNext() { - return !internal.isTerminated(); - } - @Override - public D next() { - if(hasNext()) { - D last = lastValidElement; - moveToNext(); - return last; - } else { - throw new NoSuchElementException(); - } - - } - -} diff --git a/store/src/main/java/tools/refinery/data/map/DiffCursor.java b/store/src/main/java/tools/refinery/data/map/DiffCursor.java deleted file mode 100644 index 747aa610..00000000 --- a/store/src/main/java/tools/refinery/data/map/DiffCursor.java +++ /dev/null @@ -1,6 +0,0 @@ -package tools.refinery.data.map; - -public interface DiffCursor extends Cursor { - public V getFromValue(); - public V getToValue(); -} \ No newline at end of file diff --git a/store/src/main/java/tools/refinery/data/map/MapAsIterable.java b/store/src/main/java/tools/refinery/data/map/MapAsIterable.java deleted file mode 100644 index 05bdbef2..00000000 --- a/store/src/main/java/tools/refinery/data/map/MapAsIterable.java +++ /dev/null @@ -1,26 +0,0 @@ -package tools.refinery.data.map; - -import java.util.Iterator; -import java.util.function.BiFunction; -import java.util.function.BiPredicate; - -public class MapAsIterable implements Iterable { - private final VersionedMap internal; - private final BiFunction entryTransformation; - private final BiPredicate filtering; - - public MapAsIterable(VersionedMap internal, BiFunction entryTransformation, BiPredicate filtering) { - this.internal = internal; - this.entryTransformation = entryTransformation; - this.filtering = filtering; - } - public MapAsIterable(VersionedMap internal, BiFunction entryTransformation) { - this.internal = internal; - this.entryTransformation = entryTransformation; - this.filtering = ((k,v)->true); - } - @Override - public Iterator iterator() { - return new CursorAsIterator<>(internal.getAll(), entryTransformation, filtering); - } -} diff --git a/store/src/main/java/tools/refinery/data/map/Versioned.java b/store/src/main/java/tools/refinery/data/map/Versioned.java deleted file mode 100644 index eb364b87..00000000 --- a/store/src/main/java/tools/refinery/data/map/Versioned.java +++ /dev/null @@ -1,7 +0,0 @@ -package tools.refinery.data.map; - -public interface Versioned { - public long commit(); - //maybe revert()? - public void restore(long state); -} diff --git a/store/src/main/java/tools/refinery/data/map/VersionedMap.java b/store/src/main/java/tools/refinery/data/map/VersionedMap.java deleted file mode 100644 index d57e3ee5..00000000 --- a/store/src/main/java/tools/refinery/data/map/VersionedMap.java +++ /dev/null @@ -1,13 +0,0 @@ -package tools.refinery.data.map; - -public interface VersionedMap extends Versioned{ - public V get(K key); - public Cursor getAll(); - - public V put(K key, V value); - public void putAll(Cursor cursor); - - public long getSize(); - - public DiffCursor getDiffCursor(long state); -} diff --git a/store/src/main/java/tools/refinery/data/map/VersionedMapStore.java b/store/src/main/java/tools/refinery/data/map/VersionedMapStore.java deleted file mode 100644 index 029cf9e7..00000000 --- a/store/src/main/java/tools/refinery/data/map/VersionedMapStore.java +++ /dev/null @@ -1,14 +0,0 @@ -package tools.refinery.data.map; - -import java.util.Set; - -public interface VersionedMapStore { - - public VersionedMap createMap(); - - public VersionedMap createMap(long state); - - public Set getStates(); - - public DiffCursor getDiffCursor(long fromState, long toState); -} \ No newline at end of file diff --git a/store/src/main/java/tools/refinery/data/map/VersionedMapStoreConfiguration.java b/store/src/main/java/tools/refinery/data/map/VersionedMapStoreConfiguration.java deleted file mode 100644 index 162ec4ca..00000000 --- a/store/src/main/java/tools/refinery/data/map/VersionedMapStoreConfiguration.java +++ /dev/null @@ -1,48 +0,0 @@ -package tools.refinery.data.map; - -public class VersionedMapStoreConfiguration { - - public VersionedMapStoreConfiguration() { - - } - public VersionedMapStoreConfiguration(boolean immutableWhenCommiting, boolean sharedNodeCacheInStore, - boolean sharedNodeCacheInStoreGroups) { - super(); - this.immutableWhenCommiting = immutableWhenCommiting; - this.sharedNodeCacheInStore = sharedNodeCacheInStore; - this.sharedNodeCacheInStoreGroups = sharedNodeCacheInStoreGroups; - } - - /** - * If true root is replaced with immutable node when committed. Frees up memory - * by releasing immutable nodes, but it may decrease performance by recreating - * immutable nodes upon changes (some evidence). - */ - private boolean immutableWhenCommiting = true; - public boolean isImmutableWhenCommiting() { - return immutableWhenCommiting; - } - - /** - * If true, all subnodes are cached within a {@link VersionedMapStore}. It - * decreases the memory requirements. It may increase performance by discovering - * existing immutable copy of a node (some evidence). Additional overhead may - * decrease performance (no example found). The option permits the efficient - * implementation of version deletion. - */ - private boolean sharedNodeCacheInStore = true; - public boolean isSharedNodeCacheInStore() { - return sharedNodeCacheInStore; - } - - /** - * If true, all subnodes are cached within a group of - * {@link VersionedMapStoreImpl#createSharedVersionedMapStores(int, ContinousHashProvider, Object, VersionedMapStoreConfiguration)}. - * If {@link VersionedMapStoreConfiguration#sharedNodeCacheInStore} is - * false, then it has currently no impact. - */ - private boolean sharedNodeCacheInStoreGroups = true; - public boolean isSharedNodeCacheInStoreGroups() { - return sharedNodeCacheInStoreGroups; - } -} diff --git a/store/src/main/java/tools/refinery/data/map/VersionedMapStoreImpl.java b/store/src/main/java/tools/refinery/data/map/VersionedMapStoreImpl.java deleted file mode 100644 index c194faf8..00000000 --- a/store/src/main/java/tools/refinery/data/map/VersionedMapStoreImpl.java +++ /dev/null @@ -1,135 +0,0 @@ -package tools.refinery.data.map; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import tools.refinery.data.map.internal.ImmutableNode; -import tools.refinery.data.map.internal.MapDiffCursor; -import tools.refinery.data.map.internal.Node; -import tools.refinery.data.map.internal.VersionedMapImpl; - -public class VersionedMapStoreImpl implements VersionedMapStore { - // Configuration - private final boolean immutableWhenCommiting; - - // Static data - protected final ContinousHashProvider hashProvider; - protected final V defaultValue; - - // Dynamic data - protected final Map> states = new HashMap<>(); - protected final Map, ImmutableNode> nodeCache; - protected long nextID = 0; - - public VersionedMapStoreImpl(ContinousHashProvider hashProvider, V defaultValue, - VersionedMapStoreConfiguration config) { - this.immutableWhenCommiting = config.isImmutableWhenCommiting(); - this.hashProvider = hashProvider; - this.defaultValue = defaultValue; - if (config.isSharedNodeCacheInStore()) { - nodeCache = new HashMap<>(); - } else { - nodeCache = null; - } - } - - private VersionedMapStoreImpl(ContinousHashProvider hashProvider, V defaultValue, - Map, ImmutableNode> nodeCache, VersionedMapStoreConfiguration config) { - this.immutableWhenCommiting = config.isImmutableWhenCommiting(); - this.hashProvider = hashProvider; - this.defaultValue = defaultValue; - this.nodeCache = nodeCache; - } - - public VersionedMapStoreImpl(ContinousHashProvider hashProvider, V defaultValue) { - this(hashProvider, defaultValue, new VersionedMapStoreConfiguration()); - } - - public static List> createSharedVersionedMapStores(int amount, - ContinousHashProvider hashProvider, V defaultValue, - VersionedMapStoreConfiguration config) { - List> result = new ArrayList<>(amount); - if (config.isSharedNodeCacheInStoreGroups()) { - Map, ImmutableNode> nodeCache; - if (config.isSharedNodeCacheInStore()) { - nodeCache = new HashMap<>(); - } else { - nodeCache = null; - } - for (int i = 0; i < amount; i++) { - result.add(new VersionedMapStoreImpl<>(hashProvider, defaultValue, nodeCache, config)); - } - } else { - for (int i = 0; i < amount; i++) { - result.add(new VersionedMapStoreImpl<>(hashProvider, defaultValue, config)); - } - } - return result; - } - - public static List> createSharedVersionedMapStores(int amount, - ContinousHashProvider hashProvider, V defaultValue) { - return createSharedVersionedMapStores(amount, hashProvider, defaultValue, new VersionedMapStoreConfiguration()); - } - - @Override - public synchronized Set getStates() { - return new HashSet<>(states.keySet()); - } - - @Override - public VersionedMap createMap() { - return new VersionedMapImpl<>(this, hashProvider, defaultValue); - } - - @Override - public VersionedMap createMap(long state) { - ImmutableNode data = revert(state); - return new VersionedMapImpl<>(this, hashProvider, defaultValue, data); - } - - - public synchronized ImmutableNode revert(long state) { - if (states.containsKey(state)) { - return states.get(state); - } else { - ArrayList existingKeys = new ArrayList<>(states.keySet()); - Collections.sort(existingKeys); - throw new IllegalArgumentException("Store does not contain state " + state + "! Avaliable states: " - + Arrays.toString(existingKeys.toArray())); - } - } - - public synchronized long commit(Node data, VersionedMapImpl mapToUpdateRoot) { - ImmutableNode immutable; - if (data != null) { - immutable = data.toImmutable(this.nodeCache); - } else { - immutable = null; - } - - if (nextID == Long.MAX_VALUE) - throw new IllegalStateException("Map store run out of Id-s"); - long id = nextID++; - this.states.put(id, immutable); - if (this.immutableWhenCommiting) { - mapToUpdateRoot.setRoot(immutable); - } - return id; - } - - @Override - public DiffCursor getDiffCursor(long fromState, long toState) { - VersionedMap map1 = createMap(fromState); - VersionedMap map2 = createMap(toState); - Cursor cursor1 = map1.getAll(); - Cursor cursor2 = map2.getAll(); - return new MapDiffCursor<>(this.hashProvider, this.defaultValue, cursor1, cursor2); - } -} diff --git a/store/src/main/java/tools/refinery/data/map/internal/HashClash.java b/store/src/main/java/tools/refinery/data/map/internal/HashClash.java deleted file mode 100644 index 640feb95..00000000 --- a/store/src/main/java/tools/refinery/data/map/internal/HashClash.java +++ /dev/null @@ -1,18 +0,0 @@ -package tools.refinery.data.map.internal; - -enum HashClash { - /** - * Not stuck. - */ - NONE, - - /** - * Clashed, next we should return the key of cursor 1. - */ - STUCK_CURSOR_1, - - /** - * Clashed, next we should return the key of cursor 2. - */ - STUCK_CURSOR_2 -} diff --git a/store/src/main/java/tools/refinery/data/map/internal/ImmutableNode.java b/store/src/main/java/tools/refinery/data/map/internal/ImmutableNode.java deleted file mode 100644 index 99e27bb6..00000000 --- a/store/src/main/java/tools/refinery/data/map/internal/ImmutableNode.java +++ /dev/null @@ -1,378 +0,0 @@ -package tools.refinery.data.map.internal; - -import java.util.Arrays; -import java.util.Map; - -import tools.refinery.data.map.ContinousHashProvider; - -public class ImmutableNode extends Node { - /** - * Bitmap defining the stored key and values. - */ - final int dataMap; - /** - * Bitmap defining the positions of further nodes. - */ - final int nodeMap; - /** - * Stores Keys, Values, and subnodes. Structure: (K,V)*,NODE; NODES are stored - * backwards. - */ - final Object[] content; - - /** - * Hash code derived from immutable hash code - */ - final int precalculatedHash; - - private ImmutableNode(int dataMap, int nodeMap, Object[] content, int precalculatedHash) { - super(); - this.dataMap = dataMap; - this.nodeMap = nodeMap; - this.content = content; - this.precalculatedHash = precalculatedHash; - } - - /** - * Constructor that copies a mutable node to an immutable. - * - * @param node A mutable node. - * @param cache A cache of existing immutable nodes. It can be used to search - * and place reference immutable nodes. It can be null, if no cache - * available. - * @return an immutable version of the input node. - */ - static ImmutableNode constructImmutable(MutableNode node, - Map, ImmutableNode> cache) { - // 1. try to return from cache - if (cache != null) { - ImmutableNode cachedResult = cache.get(node); - if (cachedResult != null) { - // 1.1 Already cached, return from cache. - return cachedResult; - } - } - - // 2. otherwise construct a new ImmutableNode - int size = 0; - for (int i = 0; i < node.content.length; i++) { - if (node.content[i] != null) { - size++; - } - } - - int datas = 0; - int nodes = 0; - int resultDataMap = 0; - int resultNodeMap = 0; - final Object[] resultContent = new Object[size]; - int bitposition = 1; - for (int i = 0; i < FACTOR; i++) { - Object key = node.content[i * 2]; - if (key != null) { - resultDataMap |= bitposition; - resultContent[datas * 2] = key; - resultContent[datas * 2 + 1] = node.content[i * 2 + 1]; - datas++; - } else { - @SuppressWarnings("unchecked") - var subnode = (Node) node.content[i * 2 + 1]; - if (subnode != null) { - ImmutableNode immutableSubnode = subnode.toImmutable(cache); - resultNodeMap |= bitposition; - resultContent[size - 1 - nodes] = immutableSubnode; - nodes++; - } - } - bitposition <<= 1; - } - final int resultHash = node.hashCode(); - var newImmutable = new ImmutableNode(resultDataMap, resultNodeMap, resultContent, resultHash); - - // 3. save new immutable. - if (cache != null) { - cache.put(newImmutable, newImmutable); - } - return newImmutable; - } - - private int index(int bitmap, int bitpos) { - return Integer.bitCount(bitmap & (bitpos - 1)); - } - - @Override - public V getValue(K key, ContinousHashProvider hashProvider, V defaultValue, int hash, int depth) { - int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); - int bitposition = 1 << selectedHashFragment; - // If the key is stored as a data - if ((dataMap & bitposition) != 0) { - int keyIndex = 2 * index(dataMap, bitposition); - @SuppressWarnings("unchecked") - K keyCandidate = (K) content[keyIndex]; - if (keyCandidate.equals(key)) { - @SuppressWarnings("unchecked") - V value = (V) content[keyIndex + 1]; - return value; - } else { - return defaultValue; - } - } - // the key is stored as a node - else if ((nodeMap & bitposition) != 0) { - int keyIndex = content.length - 1 - index(nodeMap, bitposition); - @SuppressWarnings("unchecked") - var subNode = (ImmutableNode) content[keyIndex]; - int newDepth = depth + 1; - int newHash = newHash(hashProvider, key, hash, newDepth); - return subNode.getValue(key, hashProvider, defaultValue, newHash, newDepth); - } - // the key is not stored at all - else { - return defaultValue; - } - } - - @Override - public Node putValue(K key, V value, OldValueBox oldValue, ContinousHashProvider hashProvider, - V defaultValue, int hash, int depth) { - int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); - int bitposition = 1 << selectedHashFragment; - if ((dataMap & bitposition) != 0) { - int keyIndex = 2 * index(dataMap, bitposition); - @SuppressWarnings("unchecked") - K keyCandidate = (K) content[keyIndex]; - if (keyCandidate.equals(key)) { - if (value == defaultValue) { - // delete - MutableNode mutable = this.toMutable(); - return mutable.removeEntry(selectedHashFragment, oldValue); - } else if (value == content[keyIndex + 1]) { - // dont change - oldValue.setOldValue(value); - return this; - } else { - // update existing value - MutableNode mutable = this.toMutable(); - return mutable.updateValue(value, oldValue, selectedHashFragment); - } - } else { - if (value == defaultValue) { - // dont change - oldValue.setOldValue(defaultValue); - return this; - } else { - // add new key + value - MutableNode mutable = this.toMutable(); - return mutable.putValue(key, value, oldValue, hashProvider, defaultValue, hash, depth); - } - } - } else if ((nodeMap & bitposition) != 0) { - int keyIndex = content.length - 1 - index(nodeMap, bitposition); - @SuppressWarnings("unchecked") - var subNode = (ImmutableNode) content[keyIndex]; - int newDepth = depth + 1; - int newHash = newHash(hashProvider, key, hash, newDepth); - var newsubNode = subNode.putValue(key, value, oldValue, hashProvider, defaultValue, newHash, newDepth); - - if (subNode == newsubNode) { - // nothing changed - return this; - } else { - MutableNode mutable = toMutable(); - return mutable.updateWithSubNode(selectedHashFragment, newsubNode, value.equals(defaultValue)); - } - } else { - // add new key + value - MutableNode mutable = this.toMutable(); - return mutable.putValue(key, value, oldValue, hashProvider, defaultValue, hash, depth); - } - } - - @Override - public long getSize() { - int result = Integer.bitCount(this.dataMap); - for (int subnodeIndex = 0; subnodeIndex < Integer.bitCount(this.nodeMap); subnodeIndex++) { - @SuppressWarnings("unchecked") - var subnode = (ImmutableNode) this.content[this.content.length - 1 - subnodeIndex]; - result += subnode.getSize(); - } - return result; - } - - @Override - protected MutableNode toMutable() { - return new MutableNode<>(this); - } - - @Override - public ImmutableNode toImmutable(Map, ImmutableNode> cache) { - return this; - } - - @Override - protected MutableNode isMutable() { - return null; - } - - @SuppressWarnings("unchecked") - @Override - boolean moveToNext(MapCursor cursor) { - // 1. try to move to data - int datas = Integer.bitCount(this.dataMap); - if (cursor.dataIndex != MapCursor.INDEX_FINISH) { - int newDataIndex = cursor.dataIndex + 1; - if (newDataIndex < datas) { - cursor.dataIndex = newDataIndex; - cursor.key = (K) this.content[newDataIndex * 2]; - cursor.value = (V) this.content[newDataIndex * 2 + 1]; - return true; - } else { - cursor.dataIndex = MapCursor.INDEX_FINISH; - } - } - - // 2. look inside the subnodes - int nodes = Integer.bitCount(this.nodeMap); - int newNodeIndex = cursor.nodeIndexStack.peek() + 1; - if (newNodeIndex < nodes) { - // 2.1 found next subnode, move down to the subnode - Node subnode = (Node) this.content[this.content.length - 1 - newNodeIndex]; - cursor.dataIndex = MapCursor.INDEX_START; - cursor.nodeIndexStack.pop(); - cursor.nodeIndexStack.push(newNodeIndex); - cursor.nodeIndexStack.push(MapCursor.INDEX_START); - cursor.nodeStack.push(subnode); - return subnode.moveToNext(cursor); - } else { - // 3. no subnode found, move up - cursor.nodeStack.pop(); - cursor.nodeIndexStack.pop(); - if (!cursor.nodeStack.isEmpty()) { - Node supernode = cursor.nodeStack.peek(); - return supernode.moveToNext(cursor); - } else { - cursor.key = null; - cursor.value = null; - return false; - } - } - } - - @Override - public void prettyPrint(StringBuilder builder, int depth, int code) { - for (int i = 0; i < depth; i++) { - builder.append("\t"); - } - if (code >= 0) { - builder.append(code); - builder.append(":"); - } - builder.append("Immutable("); - boolean hadContent = false; - int dataMask = 1; - for (int i = 0; i < FACTOR; i++) { - if ((dataMask & dataMap) != 0) { - if (hadContent) { - builder.append(","); - } - builder.append(i); - builder.append(":["); - builder.append(content[2 * index(dataMap, dataMask)].toString()); - builder.append("]->["); - builder.append(content[2 * index(dataMap, dataMask) + 1].toString()); - builder.append("]"); - hadContent = true; - } - dataMask <<= 1; - } - builder.append(")"); - int nodeMask = 1; - for (int i = 0; i < FACTOR; i++) { - if ((nodeMask & nodeMap) != 0) { - @SuppressWarnings("unchecked") - Node subNode = (Node) content[content.length - 1 - index(nodeMap, nodeMask)]; - builder.append("\n"); - subNode.prettyPrint(builder, depth + 1, i); - } - nodeMask <<= 1; - } - } - - @Override - public void checkIntegrity(ContinousHashProvider hashProvider, V defaultValue, int depth) { - if (depth > 0) { - boolean orphaned = Integer.bitCount(dataMap) == 1 && nodeMap == 0; - if (orphaned) { - throw new IllegalStateException("Orphaned node! " + dataMap + ": " + content[0]); - } - } - // check the place of data - - // check subnodes - for (int i = 0; i < Integer.bitCount(nodeMap); i++) { - @SuppressWarnings("unchecked") - var subnode = (Node) this.content[this.content.length - 1 - i]; - if (!(subnode instanceof ImmutableNode)) { - throw new IllegalStateException("Immutable node contains mutable subnodes!"); - } else { - subnode.checkIntegrity(hashProvider, defaultValue, depth + 1); - } - } - } - - @Override - public int hashCode() { - return this.precalculatedHash; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (obj instanceof ImmutableNode other) { - return precalculatedHash == other.precalculatedHash && dataMap == other.dataMap && nodeMap == other.nodeMap - && Arrays.deepEquals(content, other.content); - } else if (obj instanceof MutableNode mutableObj) { - return ImmutableNode.compareImmutableMutable(this, mutableObj); - } else { - return false; - } - } - - public static boolean compareImmutableMutable(ImmutableNode immutable, MutableNode mutable) { - int datas = 0; - int nodes = 0; - final int immutableLength = immutable.content.length; - for (int i = 0; i < FACTOR; i++) { - Object key = mutable.content[i * 2]; - // For each key candidate - if (key != null) { - // Check whether a new Key-Value pair can fit into the immutable container - if (datas * 2 + nodes + 2 <= immutableLength) { - if (!immutable.content[datas * 2].equals(key) - || !immutable.content[datas * 2 + 1].equals(mutable.content[i * 2 + 1])) { - return false; - } - } else - return false; - datas++; - } else { - var mutableSubnode = (Node) mutable.content[i * 2 + 1]; - if (mutableSubnode != null) { - if (datas * 2 + nodes + 1 <= immutableLength) { - Object immutableSubnode = immutable.content[immutableLength - 1 - nodes]; - if (!mutableSubnode.equals(immutableSubnode)) { - return false; - } - nodes++; - } else { - return false; - } - } - } - } - return true; - } -} diff --git a/store/src/main/java/tools/refinery/data/map/internal/MapCursor.java b/store/src/main/java/tools/refinery/data/map/internal/MapCursor.java deleted file mode 100644 index 44ed1f45..00000000 --- a/store/src/main/java/tools/refinery/data/map/internal/MapCursor.java +++ /dev/null @@ -1,131 +0,0 @@ -package tools.refinery.data.map.internal; - -import java.util.ArrayDeque; -import java.util.ConcurrentModificationException; -import java.util.Iterator; -import java.util.List; - -import tools.refinery.data.map.Cursor; -import tools.refinery.data.map.VersionedMap; - -public class MapCursor implements Cursor { - // Constants - static final int INDEX_START = -1; - static final int INDEX_FINISH = -2; - - // Tree stack - ArrayDeque> nodeStack; - ArrayDeque nodeIndexStack; - int dataIndex; - - // Values - K key; - V value; - - // Hash code for checking concurrent modifications - final VersionedMap map; - final int creationHash; - - public MapCursor(Node root, VersionedMap map) { - // Initializing tree stack - super(); - this.nodeStack = new ArrayDeque<>(); - this.nodeIndexStack = new ArrayDeque<>(); - if(root != null) { - this.nodeStack.add(root); - this.nodeIndexStack.push(INDEX_START); - } - - this.dataIndex = INDEX_START; - - // Initializing cache - this.key = null; - this.value = null; - - // Initializing state - this.map=map; - this.creationHash = map.hashCode(); - } - - public K getKey() { - return key; - } - - public V getValue() { - return value; - } - - public boolean isTerminated() { - return this.nodeStack.isEmpty(); - } - - public boolean move() { - if(isDirty()) { - throw new ConcurrentModificationException(); - } - if(!isTerminated()) { - boolean result = this.nodeStack.peek().moveToNext(this); - if(this.nodeIndexStack.size() != this.nodeStack.size()) { - throw new IllegalArgumentException("Node stack is corrupted by illegal moves!"); - } - return result; - } - return false; - } - public boolean skipCurrentNode() { - nodeStack.pop(); - nodeIndexStack.pop(); - dataIndex = INDEX_FINISH; - return move(); - } - @Override - public boolean isDirty() { - return this.map.hashCode() != this.creationHash; - } - @Override - public List> getDependingMaps() { - return List.of(this.map); - } - - public static boolean sameSubnode(MapCursor cursor1, MapCursor cursor2) { - Node nodeOfCursor1 = cursor1.nodeStack.peek(); - Node nodeOfCursor2 = cursor2.nodeStack.peek(); - if(nodeOfCursor1 != null && nodeOfCursor2 != null) { - return nodeOfCursor1.equals(nodeOfCursor2); - } else { - return false; - } - } - - /** - * - * @param - * @param - * @param cursor1 - * @param cursor2 - * @return Positive number if cursor 1 is behind, negative number if cursor 2 is behind, and 0 if they are at the same position. - */ - public static int compare(MapCursor cursor1, MapCursor cursor2) { - // two cursors are equally deep - Iterator stack1 = cursor1.nodeIndexStack.descendingIterator(); - Iterator stack2 = cursor2.nodeIndexStack.descendingIterator(); - if(stack1.hasNext()) { - if(!stack2.hasNext()) { - // stack 2 has no more element, thus stack 1 is deeper - return 1; - } - int val1 = stack1.next(); - int val2 = stack2.next(); - if(val1 < val2) { - return -1; - } else if(val2 < val1) { - return 1; - } - } - if(stack2.hasNext()) { - // stack 2 has more element, thus stack 2 is deeper - return 1; - } - return Integer.compare(cursor1.dataIndex, cursor2.dataIndex); - } -} diff --git a/store/src/main/java/tools/refinery/data/map/internal/MapDiffCursor.java b/store/src/main/java/tools/refinery/data/map/internal/MapDiffCursor.java deleted file mode 100644 index dcff3c06..00000000 --- a/store/src/main/java/tools/refinery/data/map/internal/MapDiffCursor.java +++ /dev/null @@ -1,221 +0,0 @@ -package tools.refinery.data.map.internal; - -import java.util.List; -import java.util.stream.Stream; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.Cursor; -import tools.refinery.data.map.DiffCursor; -import tools.refinery.data.map.VersionedMap; - -/** - * A cursor representing the difference between two states of a map. - * - * @author Oszkar Semerath - * - */ -public class MapDiffCursor implements DiffCursor, Cursor { - /** - * Default value representing missing elements. - */ - private V defaultValue; - private MapCursor cursor1; - private MapCursor cursor2; - private ContinousHashProvider hashProvider; - - // Values - private K key; - private V fromValue; - private V toValue; - - // State - /** - * Positive number if cursor 1 is behind, negative number if cursor 2 is behind, - * and 0 if they are at the same position. - */ - private int cursorRelation; - private HashClash hashClash = HashClash.NONE; - - public MapDiffCursor(ContinousHashProvider hashProvider, V defaultValue, Cursor cursor1, - Cursor cursor2) { - super(); - this.hashProvider = hashProvider; - this.defaultValue = defaultValue; - this.cursor1 = (MapCursor) cursor1; - this.cursor2 = (MapCursor) cursor2; - } - - @Override - public K getKey() { - return key; - } - - @Override - public V getFromValue() { - return fromValue; - } - - @Override - public V getToValue() { - return toValue; - } - - @Override - public V getValue() { - return getToValue(); - } - - public boolean isTerminated() { - return cursor1.isTerminated() && cursor2.isTerminated(); - } - - @Override - public boolean isDirty() { - return this.cursor1.isDirty() || this.cursor2.isDirty(); - } - - @Override - public List> getDependingMaps() { - return Stream.concat(cursor1.getDependingMaps().stream(), cursor2.getDependingMaps().stream()).toList(); - } - - protected void updateState() { - if (!isTerminated()) { - this.cursorRelation = MapCursor.compare(cursor1, cursor2); - if (cursorRelation > 0 || cursor2.isTerminated()) { - this.key = cursor1.getKey(); - this.fromValue = cursor1.getValue(); - this.toValue = defaultValue; - } else if (cursorRelation < 0 || cursor1.isTerminated()) { - this.key = cursor2.getKey(); - this.fromValue = defaultValue; - this.toValue = cursor1.getValue(); - } else { - // cursor1 = cursor2 - if (cursor1.getKey().equals(cursor2.getKey())) { - this.key = cursor1.getKey(); - this.fromValue = cursor1.getValue(); - this.toValue = defaultValue; - } else { - resolveHashClashWithFirstEntry(); - } - } - } - } - - protected void resolveHashClashWithFirstEntry() { - int compareResult = this.hashProvider.compare(cursor1.key, cursor2.key); - if (compareResult < 0) { - this.hashClash = HashClash.STUCK_CURSOR_2; - this.cursorRelation = 0; - this.key = cursor1.key; - this.fromValue = cursor1.value; - this.toValue = defaultValue; - } else if (compareResult > 0) { - this.hashClash = HashClash.STUCK_CURSOR_1; - this.cursorRelation = 0; - this.key = cursor2.key; - this.fromValue = defaultValue; - this.toValue = cursor2.value; - } else { - throw new IllegalArgumentException("Inconsistent compare result for diffcursor"); - } - } - - protected boolean isInHashClash() { - return this.hashClash != HashClash.NONE; - } - - protected void resolveHashClashWithSecondEntry() { - switch (this.hashClash) { - case STUCK_CURSOR_1: - this.hashClash = HashClash.NONE; - this.cursorRelation = 0; - this.key = cursor1.key; - this.fromValue = cursor1.value; - this.toValue = defaultValue; - break; - case STUCK_CURSOR_2: - this.hashClash = HashClash.NONE; - this.cursorRelation = 0; - this.key = cursor2.key; - this.fromValue = defaultValue; - this.toValue = cursor2.value; - break; - default: - throw new IllegalArgumentException("Inconsistent compare result for diffcursor"); - } - } - - protected boolean sameValues() { - if (this.fromValue == null) { - return this.toValue == null; - } else { - return this.fromValue.equals(this.toValue); - } - } - - protected boolean moveOne() { - if (isTerminated()) { - return false; - } - if (this.cursorRelation > 0 || cursor2.isTerminated()) { - return cursor1.move(); - } else if (this.cursorRelation < 0 || cursor1.isTerminated()) { - return cursor2.move(); - } else { - boolean moved1 = cursor1.move(); - boolean moved2 = cursor2.move(); - return moved1 && moved2; - } - } - - private boolean skipNode() { - if (isTerminated()) { - throw new IllegalStateException("DiffCursor tries to skip when terminated!"); - } - boolean update1 = cursor1.skipCurrentNode(); - boolean update2 = cursor2.skipCurrentNode(); - updateState(); - return update1 && update2; - } - - protected boolean moveToConsistentState() { - if (!isTerminated()) { - boolean changed; - boolean lastResult = true; - do { - changed = false; - if (MapCursor.sameSubnode(cursor1, cursor2)) { - lastResult = skipNode(); - changed = true; - } - if (sameValues()) { - lastResult = moveOne(); - changed = true; - } - updateState(); - } while (changed && !isTerminated()); - return lastResult; - } else { - return false; - } - } - - public boolean move() { - if (!isTerminated()) { - if (isInHashClash()) { - this.resolveHashClashWithSecondEntry(); - return true; - } else { - if (moveOne()) { - return moveToConsistentState(); - } else { - return false; - } - } - - } else - return false; - } -} diff --git a/store/src/main/java/tools/refinery/data/map/internal/MutableNode.java b/store/src/main/java/tools/refinery/data/map/internal/MutableNode.java deleted file mode 100644 index 5522051a..00000000 --- a/store/src/main/java/tools/refinery/data/map/internal/MutableNode.java +++ /dev/null @@ -1,456 +0,0 @@ -package tools.refinery.data.map.internal; - -import java.util.Arrays; -import java.util.Map; - -import tools.refinery.data.map.ContinousHashProvider; - -public class MutableNode extends Node { - int cachedHash; - protected Object[] content; - - protected MutableNode() { - this.content = new Object[2 * FACTOR]; - updateHash(); - } - - public static MutableNode initialize(K key, V value, ContinousHashProvider hashProvider, - V defaultValue) { - if (value == defaultValue) { - return null; - } else { - int hash = hashProvider.getHash(key, 0); - int fragment = hashFragment(hash, 0); - MutableNode res = new MutableNode<>(); - res.content[2 * fragment] = key; - res.content[2 * fragment + 1] = value; - res.updateHash(); - return res; - } - } - - /** - * Constructs a {@link MutableNode} as a copy of an {@link ImmutableNode} - * - * @param node - */ - protected MutableNode(ImmutableNode node) { - this.content = new Object[2 * FACTOR]; - int dataUsed = 0; - int nodeUsed = 0; - for (int i = 0; i < FACTOR; i++) { - int bitposition = 1 << i; - if ((node.dataMap & bitposition) != 0) { - content[2 * i] = node.content[dataUsed * 2]; - content[2 * i + 1] = node.content[dataUsed * 2 + 1]; - dataUsed++; - } else if ((node.nodeMap & bitposition) != 0) { - content[2 * i + 1] = node.content[node.content.length - 1 - nodeUsed]; - nodeUsed++; - } - } - this.cachedHash = node.hashCode(); - } - - @Override - public V getValue(K key, ContinousHashProvider hashProvider, V defaultValue, int hash, int depth) { - int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); - @SuppressWarnings("unchecked") - K keyCandidate = (K) this.content[2 * selectedHashFragment]; - if (keyCandidate != null) { - if (keyCandidate.equals(key)) { - @SuppressWarnings("unchecked") - V value = (V) this.content[2 * selectedHashFragment + 1]; - return value; - } else { - return defaultValue; - } - } else { - @SuppressWarnings("unchecked") - var nodeCandidate = (Node) content[2 * selectedHashFragment + 1]; - if (nodeCandidate != null) { - int newDepth = depth + 1; - int newHash = newHash(hashProvider, key, hash, newDepth); - return nodeCandidate.getValue(key, hashProvider, defaultValue, newHash, newDepth); - } else { - return defaultValue; - } - } - } - - @Override - public Node putValue(K key, V value, OldValueBox oldValue, ContinousHashProvider hashProvider, - V defaultValue, int hash, int depth) { - int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); - @SuppressWarnings("unchecked") - K keyCandidate = (K) content[2 * selectedHashFragment]; - if (keyCandidate != null) { - // If has key - if (keyCandidate.equals(key)) { - // The key is equals to an existing key -> update entry - if (value == defaultValue) { - return removeEntry(selectedHashFragment, oldValue); - } else { - return updateValue(value, oldValue, selectedHashFragment); - } - } else { - // The key is not equivalent to an existing key on the same hash bin - // -> split entry if it is necessary - if (value == defaultValue) { - // Value is default -> do not need to add new node - oldValue.setOldValue(defaultValue); - return this; - } else { - // Value is not default -> Split entry data to a new node - oldValue.setOldValue(defaultValue); - return moveDownAndSplit(hashProvider, key, value, keyCandidate, hash, depth, selectedHashFragment); - } - } - } else { - // If it does not have key, check for value - @SuppressWarnings("unchecked") - var nodeCandidate = (Node) content[2 * selectedHashFragment + 1]; - if (nodeCandidate != null) { - // If it has value, it is a subnode -> upate that - var newNode = nodeCandidate.putValue(key, value, oldValue, hashProvider, defaultValue, - newHash(hashProvider, key, hash, depth + 1), depth + 1); - return updateWithSubNode(selectedHashFragment, newNode, value.equals(defaultValue)); - } else { - // If it does not have value, put it in the empty place - if (value == defaultValue) { - // dont need to add new key-value pair - oldValue.setOldValue(defaultValue); - return this; - } else { - return addEntry(key, value, oldValue, selectedHashFragment); - } - - } - } - } - - private Node addEntry(K key, V value, OldValueBox oldValueBox, int selectedHashFragment) { - content[2 * selectedHashFragment] = key; - @SuppressWarnings("unchecked") - V oldValue = (V) content[2 * selectedHashFragment + 1]; - oldValueBox.setOldValue(oldValue); - content[2 * selectedHashFragment + 1] = value; - updateHash(); - return this; - } - - /** - * Updates an entry in a selected hash-fragment to a non-default value. - * - * @param value - * @param selectedHashFragment - * @return - */ - @SuppressWarnings("unchecked") - Node updateValue(V value, OldValueBox oldValue, int selectedHashFragment) { - oldValue.setOldValue((V) content[2 * selectedHashFragment + 1]); - content[2 * selectedHashFragment + 1] = value; - updateHash(); - return this; - } - - /** - * - * @param selectedHashFragment - * @param newNode - * @return - */ - Node updateWithSubNode(int selectedHashFragment, Node newNode, boolean deletionHappened) { - if (deletionHappened) { - if (newNode == null) { - // Check whether this node become empty - content[2 * selectedHashFragment + 1] = null; // i.e. the new node - if (hasContent()) { - updateHash(); - return this; - } else { - return null; - } - } else { - // check whether newNode is orphan - MutableNode immutableNewNode = newNode.isMutable(); - if (immutableNewNode != null) { - int orphaned = immutableNewNode.isOrphaned(); - if (orphaned >= 0) { - // orphan subnode data is replaced with data - content[2 * selectedHashFragment] = immutableNewNode.content[orphaned * 2]; - content[2 * selectedHashFragment + 1] = immutableNewNode.content[orphaned * 2 + 1]; - updateHash(); - return this; - } - } - } - } - // normal behaviour - content[2 * selectedHashFragment + 1] = newNode; - updateHash(); - return this; - - } - - private boolean hasContent() { - for (Object element : this.content) { - if (element != null) - return true; - } - return false; - } - - @Override - protected MutableNode isMutable() { - return this; - } - - protected int isOrphaned() { - int dataFound = -2; - for (int i = 0; i < FACTOR; i++) { - if (content[i * 2] != null) { - if (dataFound >= 0) { - return -1; - } else { - dataFound = i; - } - } else if (content[i * 2 + 1] != null) { - return -3; - } - } - return dataFound; - } - - @SuppressWarnings("unchecked") - private Node moveDownAndSplit(ContinousHashProvider hashProvider, K newKey, V newValue, - K previousKey, int hashOfNewKey, int depth, int selectedHashFragmentOfCurrentDepth) { - V previousValue = (V) content[2 * selectedHashFragmentOfCurrentDepth + 1]; - - MutableNode newSubNode = newNodeWithTwoEntries(hashProvider, previousKey, previousValue, - hashProvider.getHash(previousKey, hashDepth(depth)), newKey, newValue, hashOfNewKey, depth + 1); - - content[2 * selectedHashFragmentOfCurrentDepth] = null; - content[2 * selectedHashFragmentOfCurrentDepth + 1] = newSubNode; - updateHash(); - return this; - } - - // Pass everything as parameters for performance. - @SuppressWarnings("squid:S107") - private MutableNode newNodeWithTwoEntries(ContinousHashProvider hashProvider, K key1, V value1, - int oldHash1, K key2, V value2, int oldHash2, int newdepth) { - int newHash1 = newHash(hashProvider, key1, oldHash1, newdepth); - int newHash2 = newHash(hashProvider, key2, oldHash2, newdepth); - int newFragment1 = hashFragment(newHash1, shiftDepth(newdepth)); - int newFragment2 = hashFragment(newHash2, shiftDepth(newdepth)); - - MutableNode subNode = new MutableNode<>(); - if (newFragment1 != newFragment2) { - subNode.content[newFragment1 * 2] = key1; - subNode.content[newFragment1 * 2 + 1] = value1; - - subNode.content[newFragment2 * 2] = key2; - subNode.content[newFragment2 * 2 + 1] = value2; - } else { - MutableNode subSubNode = newNodeWithTwoEntries(hashProvider, key1, value1, newHash1, key2, value2, - newHash2, newdepth + 1); - subNode.content[newFragment1 * 2 + 1] = subSubNode; - } - subNode.updateHash(); - return subNode; - } - - @SuppressWarnings("unchecked") - Node removeEntry(int selectedHashFragment, OldValueBox oldValue) { - content[2 * selectedHashFragment] = null; - oldValue.setOldValue((V) content[2 * selectedHashFragment + 1]); - content[2 * selectedHashFragment + 1] = null; - if (hasContent()) { - updateHash(); - return this; - } else { - return null; - } - } - - @SuppressWarnings("unchecked") - @Override - public long getSize() { - int size = 0; - for (int i = 0; i < FACTOR; i++) { - if (content[i * 2] != null) { - size++; - } else { - Node nodeCandidate = (Node) content[i * 2 + 1]; - if (nodeCandidate != null) { - size += nodeCandidate.getSize(); - } - } - } - return size; - } - - @Override - protected MutableNode toMutable() { - return this; - } - - @Override - public ImmutableNode toImmutable(Map, ImmutableNode> cache) { - return ImmutableNode.constructImmutable(this, cache); - } - - @SuppressWarnings("unchecked") - @Override - boolean moveToNext(MapCursor cursor) { - // 1. try to move to data - if (cursor.dataIndex != MapCursor.INDEX_FINISH) { - for (int index = cursor.dataIndex + 1; index < FACTOR; index++) { - if (this.content[index * 2] != null) { - // 1.1 found next data - cursor.dataIndex = index; - cursor.key = (K) this.content[index * 2]; - cursor.value = (V) this.content[index * 2 + 1]; - return true; - } - } - cursor.dataIndex = MapCursor.INDEX_FINISH; - } - - // 2. look inside the subnodes - for (int index = cursor.nodeIndexStack.peek() + 1; index < FACTOR; index++) { - if (this.content[index * 2] == null && this.content[index * 2 + 1] != null) { - // 2.1 found next subnode, move down to the subnode - Node subnode = (Node) this.content[index * 2 + 1]; - - cursor.dataIndex = MapCursor.INDEX_START; - cursor.nodeIndexStack.pop(); - cursor.nodeIndexStack.push(index); - cursor.nodeIndexStack.push(MapCursor.INDEX_START); - cursor.nodeStack.push(subnode); - - return subnode.moveToNext(cursor); - } - } - // 3. no subnode found, move up - cursor.nodeStack.pop(); - cursor.nodeIndexStack.pop(); - if (!cursor.nodeStack.isEmpty()) { - Node supernode = cursor.nodeStack.peek(); - return supernode.moveToNext(cursor); - } else { - cursor.key = null; - cursor.value = null; - return false; - } - } - - @Override - public void prettyPrint(StringBuilder builder, int depth, int code) { - for (int i = 0; i < depth; i++) { - builder.append("\t"); - } - if (code >= 0) { - builder.append(code); - builder.append(":"); - } - builder.append("Mutable("); - // print content - boolean hadContent = false; - for (int i = 0; i < FACTOR; i++) { - if (content[2 * i] != null) { - if (hadContent) { - builder.append(","); - } - builder.append(i); - builder.append(":["); - builder.append(content[2 * i].toString()); - builder.append("]->["); - builder.append(content[2 * i + 1].toString()); - builder.append("]"); - hadContent = true; - } - } - builder.append(")"); - // print subnodes - for (int i = 0; i < FACTOR; i++) { - if (content[2 * i] == null && content[2 * i + 1] != null) { - @SuppressWarnings("unchecked") - Node subNode = (Node) content[2 * i + 1]; - builder.append("\n"); - subNode.prettyPrint(builder, depth + 1, i); - } - } - } - - @Override - public void checkIntegrity(ContinousHashProvider hashProvider, V defaultValue, int depth) { - // check for orphan nodes - if (depth > 0) { - int orphaned = isOrphaned(); - if (orphaned >= 0) { - throw new IllegalStateException("Orphaned node! " + orphaned + ": " + content[2 * orphaned]); - } - } - // check the place of data - for (int i = 0; i < FACTOR; i++) { - if (this.content[2 * i] != null) { - @SuppressWarnings("unchecked") - K key = (K) this.content[2 * i]; - @SuppressWarnings("unchecked") - V value = (V) this.content[2 * i + 1]; - - if (value == defaultValue) { - throw new IllegalStateException("Node contains default value!"); - } - int hashCode = hashProvider.getHash(key, hashDepth(depth)); - int shiftDepth = shiftDepth(depth); - int selectedHashFragment = hashFragment(hashCode, shiftDepth); - if (i != selectedHashFragment) { - throw new IllegalStateException("Key " + key + " with hash code " + hashCode - + " is in bad place! Fragment=" + selectedHashFragment + ", Place=" + i); - } - } - } - // check subnodes - for (int i = 0; i < FACTOR; i++) { - if (this.content[2 * i + 1] != null && this.content[2 * i] == null) { - @SuppressWarnings("unchecked") - var subNode = (Node) this.content[2 * i + 1]; - subNode.checkIntegrity(hashProvider, defaultValue, depth + 1); - } - } - // check the hash - int oldHash = this.cachedHash; - updateHash(); - int newHash = this.cachedHash; - if (oldHash != newHash) { - throw new IllegalStateException("Hash code was not up to date! (old=" + oldHash + ",new=" + newHash + ")"); - } - } - - protected void updateHash() { - this.cachedHash = Arrays.hashCode(content); - } - - @Override - public int hashCode() { - return this.cachedHash; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (obj instanceof MutableNode mutableObj) { - return Arrays.deepEquals(this.content, mutableObj.content); - } else if (obj instanceof ImmutableNode immutableObj) { - return ImmutableNode.compareImmutableMutable(immutableObj, this); - } else { - return false; - } - } -} diff --git a/store/src/main/java/tools/refinery/data/map/internal/Node.java b/store/src/main/java/tools/refinery/data/map/internal/Node.java deleted file mode 100644 index b99275d9..00000000 --- a/store/src/main/java/tools/refinery/data/map/internal/Node.java +++ /dev/null @@ -1,85 +0,0 @@ -package tools.refinery.data.map.internal; - -import java.util.Map; - -import tools.refinery.data.map.ContinousHashProvider; - -public abstract class Node{ - public static final int BRANCHING_FACTOR_BITS = 5; - public static final int FACTOR = 1< hashProvider, V defaultValue, int hash, int depth); - public abstract Node putValue(K key, V value, OldValueBox old, ContinousHashProvider hashProvider, V defaultValue, int hash, int depth); - public abstract long getSize(); - - abstract MutableNode toMutable(); - public abstract ImmutableNode toImmutable( - Map,ImmutableNode> cache); - protected abstract MutableNode isMutable(); - /** - * Moves a {@link MapCursor} to its next position. - * @param cursor the cursor - * @return Whether there was a next value to move on. - */ - abstract boolean moveToNext(MapCursor cursor); - - ///////// FOR printing - public abstract void prettyPrint(StringBuilder builder, int depth, int code); - @Override - public String toString() { - StringBuilder stringBuilder = new StringBuilder(); - prettyPrint(stringBuilder, 0, -1); - return stringBuilder.toString(); - } - public void checkIntegrity(ContinousHashProvider hashProvider, V defaultValue, int depth) {} - -} diff --git a/store/src/main/java/tools/refinery/data/map/internal/OldValueBox.java b/store/src/main/java/tools/refinery/data/map/internal/OldValueBox.java deleted file mode 100644 index 641c831b..00000000 --- a/store/src/main/java/tools/refinery/data/map/internal/OldValueBox.java +++ /dev/null @@ -1,19 +0,0 @@ -package tools.refinery.data.map.internal; - -public class OldValueBox{ - V oldValue; - boolean isSet = false; - - public V getOldValue() { - if(!isSet) throw new IllegalStateException(); - isSet = false; - return oldValue; - } - - public void setOldValue(V ouldValue) { - if(isSet) throw new IllegalStateException(); - this.oldValue = ouldValue; - isSet = true; - } - -} diff --git a/store/src/main/java/tools/refinery/data/map/internal/VersionedMapImpl.java b/store/src/main/java/tools/refinery/data/map/internal/VersionedMapImpl.java deleted file mode 100644 index 8aa9686f..00000000 --- a/store/src/main/java/tools/refinery/data/map/internal/VersionedMapImpl.java +++ /dev/null @@ -1,171 +0,0 @@ -package tools.refinery.data.map.internal; - -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.Cursor; -import tools.refinery.data.map.DiffCursor; -import tools.refinery.data.map.VersionedMap; -import tools.refinery.data.map.VersionedMapStoreImpl; - -/** - * Not threadSafe in itself - * @author Oszkar Semerath - * - * @param - * @param - */ -public class VersionedMapImpl implements VersionedMap{ - protected final VersionedMapStoreImpl store; - - protected final ContinousHashProvider hashProvider; - protected final V defaultValue; - protected Node root; - - private OldValueBox oldValueBox = new OldValueBox<>(); - - public VersionedMapImpl( - VersionedMapStoreImpl store, - ContinousHashProvider hashProvider, - V defaultValue) - { - this.store = store; - this.hashProvider = hashProvider; - this.defaultValue = defaultValue; - this.root = null; - } - public VersionedMapImpl( - VersionedMapStoreImpl store, - ContinousHashProvider hashProvider, - V defaultValue, Node data) - { - this.store = store; - this.hashProvider = hashProvider; - this.defaultValue = defaultValue; - this.root = data; - } - - public V getDefaultValue() { - return defaultValue; - } - public ContinousHashProvider getHashProvider() { - return hashProvider; - } - @Override - public V put(K key, V value) { - if(root!=null) { - root = root.putValue(key, value, oldValueBox, hashProvider, defaultValue, hashProvider.getHash(key, 0), 0); - return oldValueBox.getOldValue(); - } else { - root = MutableNode.initialize(key, value, hashProvider, defaultValue); - return defaultValue; - } - } - - @Override - public void putAll(Cursor cursor) { - if(cursor.getDependingMaps().contains(this)) { - List keys = new LinkedList<>(); - List values = new LinkedList<>(); - while(cursor.move()) { - keys.add(cursor.getKey()); - values.add(cursor.getValue()); - } - Iterator keyIterator = keys.iterator(); - Iterator valueIterator = values.iterator(); - while(keyIterator.hasNext()) { - this.put(keyIterator.next(), valueIterator.next()); - } - } else { - while(cursor.move()) { - this.put(cursor.getKey(), cursor.getValue()); - } - } - } - - @Override - public V get(K key) { - if(root!=null) { - return root.getValue(key, hashProvider, defaultValue, hashProvider.getHash(key, 0), 0); - } else { - return defaultValue; - } - } - @Override - public long getSize() { - if(root == null) { - return 0; - } else { - return root.getSize(); - } - } - - @Override - public Cursor getAll() { - return new MapCursor<>(this.root,this); - } - @Override - public DiffCursor getDiffCursor(long toVersion) { - Cursor fromCursor = this.getAll(); - VersionedMap toMap = this.store.createMap(toVersion); - Cursor toCursor = toMap.getAll(); - return new MapDiffCursor<>(this.hashProvider,this.defaultValue, fromCursor, toCursor); - - } - - - @Override - public long commit() { - return this.store.commit(root,this); - } - public void setRoot(Node root) { - this.root = root; - } - - @Override - public void restore(long state) { - root = this.store.revert(state); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((root == null) ? 0 : root.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - VersionedMapImpl other = (VersionedMapImpl) obj; - if (root == null) { - if (other.root != null) - return false; - } else if (!root.equals(other.root)) - return false; - return true; - } - public void prettyPrint() { - StringBuilder s = new StringBuilder(); - if(this.root != null) { - this.root.prettyPrint(s, 0, -1); - System.out.println(s.toString()); - } else { - System.out.println("empty tree"); - } - } - public void checkIntegrity() { - if(this.root != null) { - this.root.checkIntegrity(hashProvider, defaultValue, 0); - } - } - -} diff --git a/store/src/main/java/tools/refinery/data/model/Model.java b/store/src/main/java/tools/refinery/data/model/Model.java deleted file mode 100644 index 2d0b350a..00000000 --- a/store/src/main/java/tools/refinery/data/model/Model.java +++ /dev/null @@ -1,20 +0,0 @@ -package tools.refinery.data.model; - -import java.util.Set; - -import tools.refinery.data.map.Cursor; -import tools.refinery.data.map.Versioned; -import tools.refinery.data.model.representation.DataRepresentation; - -public interface Model extends Versioned{ - @SuppressWarnings("squid:S1452") - Set> getDataRepresentations(); - - V get(DataRepresentation representation, K key); - Cursor getAll(DataRepresentation representation); - V put(DataRepresentation representation, K key, V value); - void putAll(DataRepresentation representation, Cursor cursor); - long getSize(DataRepresentation representation); - - ModelDiffCursor getDiffCursor(long to); -} diff --git a/store/src/main/java/tools/refinery/data/model/ModelCursor.java b/store/src/main/java/tools/refinery/data/model/ModelCursor.java deleted file mode 100644 index e3537c7d..00000000 --- a/store/src/main/java/tools/refinery/data/model/ModelCursor.java +++ /dev/null @@ -1,25 +0,0 @@ -package tools.refinery.data.model; - -import java.util.Map; - -import tools.refinery.data.map.Cursor; -import tools.refinery.data.model.representation.DataRepresentation; - -public class ModelCursor { - final Map,Cursor> cursors; - - public ModelCursor(Map, Cursor> cursors) { - super(); - this.cursors = cursors; - } - - @SuppressWarnings("unchecked") - public Cursor getCursor(DataRepresentation representation) { - Cursor cursor = cursors.get(representation); - if(cursor != null) { - return (Cursor) cursor; - } else { - throw new IllegalArgumentException("ModelCursor does not contain cursor for representation "+representation); - } - } -} diff --git a/store/src/main/java/tools/refinery/data/model/ModelDiffCursor.java b/store/src/main/java/tools/refinery/data/model/ModelDiffCursor.java deleted file mode 100644 index cd3eb316..00000000 --- a/store/src/main/java/tools/refinery/data/model/ModelDiffCursor.java +++ /dev/null @@ -1,26 +0,0 @@ -package tools.refinery.data.model; - -import java.util.Map; - -import tools.refinery.data.map.Cursor; -import tools.refinery.data.map.DiffCursor; -import tools.refinery.data.model.representation.DataRepresentation; - -public class ModelDiffCursor { - final Map,DiffCursor> diffcursors; - - public ModelDiffCursor(Map, DiffCursor> diffcursors) { - super(); - this.diffcursors = diffcursors; - } - - @SuppressWarnings("unchecked") - public DiffCursor getCursor(DataRepresentation representation) { - Cursor cursor = diffcursors.get(representation); - if(cursor != null) { - return (DiffCursor) cursor; - } else { - throw new IllegalArgumentException("ModelCursor does not contain cursor for representation "+representation); - } - } -} diff --git a/store/src/main/java/tools/refinery/data/model/ModelStore.java b/store/src/main/java/tools/refinery/data/model/ModelStore.java deleted file mode 100644 index 03c1bcca..00000000 --- a/store/src/main/java/tools/refinery/data/model/ModelStore.java +++ /dev/null @@ -1,16 +0,0 @@ -package tools.refinery.data.model; - -import java.util.Set; - -import tools.refinery.data.model.representation.DataRepresentation; - -public interface ModelStore { - @SuppressWarnings("squid:S1452") - Set> getDataRepresentations(); - - Model createModel(); - Model createModel(long state); - - Set getStates(); - ModelDiffCursor getDiffCursor(long from, long to); -} \ No newline at end of file diff --git a/store/src/main/java/tools/refinery/data/model/ModelStoreImpl.java b/store/src/main/java/tools/refinery/data/model/ModelStoreImpl.java deleted file mode 100644 index fcad1d99..00000000 --- a/store/src/main/java/tools/refinery/data/model/ModelStoreImpl.java +++ /dev/null @@ -1,121 +0,0 @@ -package tools.refinery.data.model; - -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.DiffCursor; -import tools.refinery.data.map.VersionedMap; -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.VersionedMapStoreImpl; -import tools.refinery.data.model.internal.ModelImpl; -import tools.refinery.data.model.internal.SimilarRelationEquivalenceClass; -import tools.refinery.data.model.representation.AuxilaryData; -import tools.refinery.data.model.representation.DataRepresentation; -import tools.refinery.data.model.representation.Relation; - -public class ModelStoreImpl implements ModelStore { - - private final Map, VersionedMapStore> stores; - - public ModelStoreImpl(Set> dataRepresentations) { - stores = initStores(dataRepresentations); - } - - private Map, VersionedMapStore> initStores( - Set> dataRepresentations) { - Map, VersionedMapStore> result = new HashMap<>(); - - Map>> symbolRepresentationsPerHashPerArity = new HashMap<>(); - - for (DataRepresentation dataRepresentation : dataRepresentations) { - if (dataRepresentation instanceof Relation symbolRepresentation) { - addOrCreate(symbolRepresentationsPerHashPerArity, - new SimilarRelationEquivalenceClass(symbolRepresentation), symbolRepresentation); - } else if (dataRepresentation instanceof AuxilaryData) { - VersionedMapStoreImpl store = new VersionedMapStoreImpl<>(dataRepresentation.getHashProvider(), - dataRepresentation.getDefaultValue()); - result.put(dataRepresentation, store); - } else { - throw new UnsupportedOperationException( - "Model store does not have strategy to use " + dataRepresentation.getClass() + "!"); - } - } - for (List> symbolGroup : symbolRepresentationsPerHashPerArity.values()) { - initRepresentationGroup(result, symbolGroup); - } - - return result; - } - - private void initRepresentationGroup(Map, VersionedMapStore> result, - List> symbolGroup) { - final ContinousHashProvider hashProvider = symbolGroup.get(0).getHashProvider(); - final Object defaultValue = symbolGroup.get(0).getDefaultValue(); - - List> maps = VersionedMapStoreImpl - .createSharedVersionedMapStores(symbolGroup.size(), hashProvider, defaultValue); - - for (int i = 0; i < symbolGroup.size(); i++) { - result.put(symbolGroup.get(i), maps.get(i)); - } - } - - private static void addOrCreate(Map> map, K key, V value) { - List list; - if (map.containsKey(key)) { - list = map.get(key); - } else { - list = new LinkedList<>(); - map.put(key, list); - } - list.add(value); - } - - @Override - public Set> getDataRepresentations() { - return this.stores.keySet(); - } - - @Override - public ModelImpl createModel() { - Map, VersionedMap> maps = new HashMap<>(); - for (Entry, VersionedMapStore> entry : this.stores.entrySet()) { - maps.put(entry.getKey(), entry.getValue().createMap()); - } - return new ModelImpl(this, maps); - } - - @Override - public synchronized ModelImpl createModel(long state) { - Map, VersionedMap> maps = new HashMap<>(); - for (Entry, VersionedMapStore> entry : this.stores.entrySet()) { - maps.put(entry.getKey(), entry.getValue().createMap(state)); - } - return new ModelImpl(this, maps); - } - - @Override - public synchronized Set getStates() { - var iterator = stores.values().iterator(); - if (iterator.hasNext()) { - return Set.copyOf(iterator.next().getStates()); - } - return Set.of(0l); - } - - @Override - public synchronized ModelDiffCursor getDiffCursor(long from, long to) { - Map, DiffCursor> diffcursors = new HashMap<>(); - for (Entry, VersionedMapStore> entry : stores.entrySet()) { - DataRepresentation representation = entry.getKey(); - DiffCursor diffCursor = entry.getValue().getDiffCursor(from, to); - diffcursors.put(representation, diffCursor); - } - return new ModelDiffCursor(diffcursors); - } -} diff --git a/store/src/main/java/tools/refinery/data/model/Tuple.java b/store/src/main/java/tools/refinery/data/model/Tuple.java deleted file mode 100644 index 1bffae9e..00000000 --- a/store/src/main/java/tools/refinery/data/model/Tuple.java +++ /dev/null @@ -1,148 +0,0 @@ -package tools.refinery.data.model; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public abstract class Tuple { - private static final int CUSTOMTUPLESIZE = 2; - protected static final List tuple1Cash = new ArrayList<>(1024); - - public abstract int getSize(); - public abstract int get(int element); - public abstract int[] toArray(); - - @Override - public String toString() { - StringBuilder b = new StringBuilder(); - b.append("["); - for(int i = 0; i= tuple1Cash.size()) { - newlyCreated = new Tuple1(tuple1Cash.size()); - tuple1Cash.add(newlyCreated); - } - return newlyCreated; - } - } - - public static Tuple of(int... values) { - if(values.length == 0) { - return new Tuple0(); - } else if(values.length == 1) { - return of1(values[0]); - } else if(values.length == 2) { - return new Tuple2(values[0],values[1]); - } else return new TupleN(values); - } - - protected IllegalArgumentException doesNotContain(int element) { - return new IllegalArgumentException("Tuple does not contain element "+element); - } - - public static class Tuple0 extends Tuple{ - protected Tuple0() { } - @Override public int getSize() { return 0; } - @Override public int get(int element) { - throw doesNotContain(element); - } - @Override public int[] toArray() {return new int[]{};} - @Override public int hashCode() { return TupleHashProvider.singleton().getHash(this, 0); } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - return true; - } - } - public static class Tuple1 extends Tuple{ - final int value0; - protected Tuple1(int value0) { this.value0 = value0; } - @Override public int getSize() { return 1; } - @Override public int get(int element) { - if(element == 0) return value0; - throw doesNotContain(element); - } - @Override public int[] toArray() {return new int[]{ value0 };} - @Override public int hashCode() { return TupleHashProvider.singleton().getHash(this, 0); } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - Tuple1 other = (Tuple1) obj; - return value0 == other.value0; - } - } - public static class Tuple2 extends Tuple{ - final int value0; - final int value1; - protected Tuple2(int value0, int value1) { this.value0 = value0; this.value1 = value1; } - @Override public int getSize() { return 2; } - @Override public int get(int element) { - if(element == 0) return value0; - else if(element == 1) return value1; - throw doesNotContain(element); - } - @Override public int[] toArray() {return new int[]{ value0,value1 };} - @Override public int hashCode() { return TupleHashProvider.singleton().getHash(this, 0); } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - Tuple2 other = (Tuple2) obj; - return value0 == other.value0 && value1 == other.value1; - } - } - public static class TupleN extends Tuple{ - final int[] values; - protected TupleN(int[] values) { - if(values.length { - protected static TupleHashProvider instance; - - public static TupleHashProvider singleton() { - if (instance == null) { - instance = new TupleHashProvider(); - } - return instance; - } - - protected static final int[] primes = new int[] { 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, - 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, - 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, - 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, - 463, 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, - 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, 739, - 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, - 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, 1019, 1021, - 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, - 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, - 1289, 1291, 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, 1381, 1399, 1409, 1423, 1427, 1429, - 1433, 1439, 1447, 1451, 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, 1523, 1531, 1543, 1549, - 1553, 1559, 1567, 1571, 1579, 1583, 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, 1663, 1667, - 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, 1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, - 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, 1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, - 1979, 1987, 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, 2063, 2069, 2081, 2083, 2087, 2089, - 2099, 2111, 2113, 2129, 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, 2221, 2237, 2239, 2243, - 2251, 2267, 2269, 2273, 2281, 2287, 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, 2371, 2377, - 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, 2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, - 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, - 2683, 2687, 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, 2749, 2753, 2767, 2777, 2789, 2791, - 2797, 2801, 2803, 2819, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, 3187, 3191, 3203, 3209, 3217, - 3221, 3229, 3251, 3253, 3257, 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, 3343, 3347, 3359, - 3361, 3371, 3373, 3389, 3391, 3407, 3413, 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, 3517, - 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, - 3643, 3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727, 3733, 3739, 3761, 3767, 3769, 3779, 3793, - 3797, 3803, 3821, 3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, 3911 }; - - protected static final long LARGESTPRIME30BITS = 1073741789; - - public TupleHashProvider() { - if (primes.length < MAX_PRACTICAL_DEPTH) { - throw new UnsupportedOperationException( - "Not enough prime numbers to support the practical depth of continuous hash!"); - } - } - - @Override - public int getHash(Tuple key, int index) { - if (index >= primes.length) { - throw new IllegalArgumentException("Not enough prime numbers to support index"); - } - long accumulator = 0; - final int prime = primes[index]; - for (int i = 0; i < key.getSize(); i++) { - accumulator = (prime * accumulator + key.get(i)) % LARGESTPRIME30BITS; - } - - return (int) accumulator; - } -} diff --git a/store/src/main/java/tools/refinery/data/model/TupleHashProviderBitMagic.java b/store/src/main/java/tools/refinery/data/model/TupleHashProviderBitMagic.java deleted file mode 100644 index b13b6eca..00000000 --- a/store/src/main/java/tools/refinery/data/model/TupleHashProviderBitMagic.java +++ /dev/null @@ -1,28 +0,0 @@ -package tools.refinery.data.model; - -import tools.refinery.data.map.ContinousHashProvider; - -public class TupleHashProviderBitMagic implements ContinousHashProvider { - - @Override - public int getHash(Tuple key, int index) { - if(key.getSize() == 1) { - return key.get(0); - } - - int result = 0; - final int startBitIndex = index*30; - final int finalBitIndex = startBitIndex+30; - final int arity = key.getSize(); - - for(int i = startBitIndex; i<=finalBitIndex; i++) { - final int selectedKey = key.get(i%arity); - final int selectedPosition = 1<<(i/arity); - if((selectedKey&selectedPosition) != 0) { - result |= 1<<(i%30); - } - } - - return result; - } -} diff --git a/store/src/main/java/tools/refinery/data/model/internal/ModelImpl.java b/store/src/main/java/tools/refinery/data/model/internal/ModelImpl.java deleted file mode 100644 index dbf2a000..00000000 --- a/store/src/main/java/tools/refinery/data/model/internal/ModelImpl.java +++ /dev/null @@ -1,124 +0,0 @@ -package tools.refinery.data.model.internal; - -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.Cursor; -import tools.refinery.data.map.DiffCursor; -import tools.refinery.data.map.VersionedMap; -import tools.refinery.data.map.internal.MapDiffCursor; -import tools.refinery.data.model.Model; -import tools.refinery.data.model.ModelDiffCursor; -import tools.refinery.data.model.ModelStore; -import tools.refinery.data.model.representation.DataRepresentation; - -public class ModelImpl implements Model { - private final ModelStore store; - private final Map, VersionedMap> maps; - - public ModelImpl(ModelStore store, Map, VersionedMap> maps) { - this.store = store; - this.maps = maps; - } - - @Override - public Set> getDataRepresentations() { - return maps.keySet(); - } - - @SuppressWarnings("unchecked") - private VersionedMap getMap(DataRepresentation representation) { - if (maps.containsKey(representation)) { - return (VersionedMap) maps.get(representation); - } else { - throw new IllegalArgumentException("Model does have representation " + representation); - } - } - - private VersionedMap getMapValidateKey(DataRepresentation representation, K key) { - if (representation.isValidKey(key)) { - return getMap(representation); - } else { - throw new IllegalArgumentException( - "Key is not valid for representation! (representation=" + representation + ", key=" + key + ");"); - } - } - - @Override - public V get(DataRepresentation representation, K key) { - return getMapValidateKey(representation, key).get(key); - } - - @Override - public Cursor getAll(DataRepresentation representation) { - return getMap(representation).getAll(); - } - - @Override - public V put(DataRepresentation representation, K key, V value) { - return getMapValidateKey(representation, key).put(key, value); - } - - @Override - public void putAll(DataRepresentation representation, Cursor cursor) { - getMap(representation).putAll(cursor); - } - - @Override - public long getSize(DataRepresentation representation) { - return getMap(representation).getSize(); - } - - @Override - public ModelDiffCursor getDiffCursor(long to) { - Model toModel = store.createModel(to); - Map, DiffCursor> diffCursors = new HashMap<>(); - for (DataRepresentation representation : this.maps.keySet()) { - MapDiffCursor diffCursor = constructDiffCursor(toModel, representation); - diffCursors.put(representation, diffCursor); - } - return new ModelDiffCursor(diffCursors); - } - - private MapDiffCursor constructDiffCursor(Model toModel, DataRepresentation representation) { - @SuppressWarnings("unchecked") - Cursor fromCursor = (Cursor) this.maps.get(representation).getAll(); - Cursor toCursor = toModel.getAll(representation); - - ContinousHashProvider hashProvider = representation.getHashProvider(); - V defaultValue = representation.getDefaultValue(); - return new MapDiffCursor<>(hashProvider, defaultValue, fromCursor, toCursor); - } - - @Override - public long commit() { - long version = 0; - boolean versionSet = false; - for (VersionedMap map : maps.values()) { - long newVersion = map.commit(); - if (versionSet) { - if (version != newVersion) { - throw new IllegalStateException( - "Maps in model have different versions! (" + version + " and" + newVersion + ")"); - } - } else { - version = newVersion; - versionSet = true; - } - } - return version; - } - - @Override - public void restore(long state) { - if(store.getStates().contains(state)) { - for (VersionedMap map : maps.values()) { - map.restore(state); - } - } else { - throw new IllegalArgumentException("Map does not contain state "+state+"!"); - } - } -} diff --git a/store/src/main/java/tools/refinery/data/model/internal/SimilarRelationEquivalenceClass.java b/store/src/main/java/tools/refinery/data/model/internal/SimilarRelationEquivalenceClass.java deleted file mode 100644 index 7f073c0c..00000000 --- a/store/src/main/java/tools/refinery/data/model/internal/SimilarRelationEquivalenceClass.java +++ /dev/null @@ -1,33 +0,0 @@ -package tools.refinery.data.model.internal; - -import java.util.Objects; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.model.Tuple; -import tools.refinery.data.model.representation.Relation; - -public class SimilarRelationEquivalenceClass { - final ContinousHashProvider hashProvider; - final Object defaultValue; - final int arity; - public SimilarRelationEquivalenceClass(Relation representation) { - this.hashProvider = representation.getHashProvider(); - this.defaultValue = representation.getDefaultValue(); - this.arity = representation.getArity(); - } - @Override - public int hashCode() { - return Objects.hash(arity, defaultValue, hashProvider); - } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (!(obj instanceof SimilarRelationEquivalenceClass)) - return false; - SimilarRelationEquivalenceClass other = (SimilarRelationEquivalenceClass) obj; - return arity == other.arity && Objects.equals(defaultValue, other.defaultValue) - && Objects.equals(hashProvider, other.hashProvider); - } - -} diff --git a/store/src/main/java/tools/refinery/data/model/representation/AuxilaryData.java b/store/src/main/java/tools/refinery/data/model/representation/AuxilaryData.java deleted file mode 100644 index 9a68506f..00000000 --- a/store/src/main/java/tools/refinery/data/model/representation/AuxilaryData.java +++ /dev/null @@ -1,22 +0,0 @@ -package tools.refinery.data.model.representation; - -import tools.refinery.data.map.ContinousHashProvider; - -public class AuxilaryData extends DataRepresentation { - private final String name; - - public AuxilaryData(String name, ContinousHashProvider hashProvider, V defaultValue) { - super(hashProvider, defaultValue); - this.name = name; - } - - @Override - public String getName() { - return name; - } - - @Override - public boolean isValidKey(K key) { - return true; - } -} diff --git a/store/src/main/java/tools/refinery/data/model/representation/DataRepresentation.java b/store/src/main/java/tools/refinery/data/model/representation/DataRepresentation.java deleted file mode 100644 index c22abdab..00000000 --- a/store/src/main/java/tools/refinery/data/model/representation/DataRepresentation.java +++ /dev/null @@ -1,24 +0,0 @@ -package tools.refinery.data.model.representation; - -import tools.refinery.data.map.ContinousHashProvider; - -public abstract class DataRepresentation { - protected final ContinousHashProvider hashProvider; - protected final V defaultValue; - - protected DataRepresentation(ContinousHashProvider hashProvider, V defaultValue) { - this.hashProvider = hashProvider; - this.defaultValue = defaultValue; - } - - public abstract String getName(); - - public ContinousHashProvider getHashProvider() { - return hashProvider; - } - public abstract boolean isValidKey(K key); - - public V getDefaultValue() { - return defaultValue; - } -} diff --git a/store/src/main/java/tools/refinery/data/model/representation/Relation.java b/store/src/main/java/tools/refinery/data/model/representation/Relation.java deleted file mode 100644 index 3ae07249..00000000 --- a/store/src/main/java/tools/refinery/data/model/representation/Relation.java +++ /dev/null @@ -1,31 +0,0 @@ -package tools.refinery.data.model.representation; - -import tools.refinery.data.model.Tuple; -import tools.refinery.data.model.TupleHashProvider; - -public class Relation extends DataRepresentation { - private final String name; - private final int arity; - - public Relation(String name, int arity, D defaultValue) { - super(TupleHashProvider.singleton(), defaultValue); - this.name = name; - this.arity = arity; - } - - @Override - public String getName() { - return name; - } - - public int getArity() { - return arity; - } - - @Override - public boolean isValidKey(Tuple key) { - if(key == null) { - return false; - } else return key.getSize() == getArity(); - } -} diff --git a/store/src/main/java/tools/refinery/data/model/representation/TruthValue.java b/store/src/main/java/tools/refinery/data/model/representation/TruthValue.java deleted file mode 100644 index 795285a6..00000000 --- a/store/src/main/java/tools/refinery/data/model/representation/TruthValue.java +++ /dev/null @@ -1,51 +0,0 @@ -package tools.refinery.data.model.representation; - -public enum TruthValue { - TRUE("true"), - - FALSE("false"), - - UNKNOWN("unknown"), - - ERROR("error"); - - private final String name; - - private TruthValue(String name) { - this.name = name; - } - - public String getName() { - return name; - } - - public static TruthValue toTruthValue(boolean value) { - return value ? TRUE : FALSE; - } - - public boolean isConsistent() { - return this != ERROR; - } - - public boolean isComplete() { - return this != UNKNOWN; - } - - public boolean must() { - return this == TRUE || this == ERROR; - } - - public boolean may() { - return this == TRUE || this == UNKNOWN; - } - - public TruthValue not() { - if (this == TRUE) { - return FALSE; - } else if (this == FALSE) { - return TRUE; - } else { - return this; - } - } -} diff --git a/store/src/main/java/tools/refinery/data/query/RelationalScope.java b/store/src/main/java/tools/refinery/data/query/RelationalScope.java deleted file mode 100644 index d6d6133b..00000000 --- a/store/src/main/java/tools/refinery/data/query/RelationalScope.java +++ /dev/null @@ -1,35 +0,0 @@ -package tools.refinery.data.query; - -import java.util.Set; - -import org.apache.log4j.Logger; -import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine; -import org.eclipse.viatra.query.runtime.api.scope.IEngineContext; -import org.eclipse.viatra.query.runtime.api.scope.IIndexingErrorListener; -import org.eclipse.viatra.query.runtime.api.scope.QueryScope; - -import tools.refinery.data.model.Model; -import tools.refinery.data.model.Tuple; -import tools.refinery.data.query.internal.RelationUpdateListener; -import tools.refinery.data.query.internal.RelationalEngineContext; -import tools.refinery.data.query.view.RelationView; - -public class RelationalScope extends QueryScope{ - private final Model model; - private final RelationUpdateListener updateListener; - - public RelationalScope(Model model, Set> relationViews) { - this.model = model; - updateListener = new RelationUpdateListener(relationViews); - } - - public void processUpdate(RelationView relationView, Tuple key, D oldValue, D newValue) { - updateListener.processChange(relationView, key, oldValue, newValue); - } - - @Override - protected IEngineContext createEngineContext(ViatraQueryEngine engine, IIndexingErrorListener errorListener, - Logger logger) { - return new RelationalEngineContext(model, updateListener); - } -} diff --git a/store/src/main/java/tools/refinery/data/query/building/DNFAnd.java b/store/src/main/java/tools/refinery/data/query/building/DNFAnd.java deleted file mode 100644 index 2de68d38..00000000 --- a/store/src/main/java/tools/refinery/data/query/building/DNFAnd.java +++ /dev/null @@ -1,37 +0,0 @@ -package tools.refinery.data.query.building; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -public class DNFAnd { - private Set existentiallyQuantified; - private List constraints; - public DNFAnd(Set quantifiedVariables, List constraints) { - super(); - this.existentiallyQuantified = quantifiedVariables; - this.constraints = constraints; - } - public Set getExistentiallyQuantified() { - return existentiallyQuantified; - } - public List getConstraints() { - return constraints; - } - void unifyVariables(Map uniqueVariableMap) { - Map uniqueVariableMapForClause = new HashMap<>(uniqueVariableMap); - for(DNFAtom atom : constraints) { - atom.unifyVariables(uniqueVariableMapForClause); - } - } - void collectQuantifiedVariables(Set parameters) { - Set result = new HashSet<>(); - for(DNFAtom constraint : constraints) { - constraint.collectAllVariables(result); - } - result.removeAll(parameters); - existentiallyQuantified = result; - } -} diff --git a/store/src/main/java/tools/refinery/data/query/building/DNFAtom.java b/store/src/main/java/tools/refinery/data/query/building/DNFAtom.java deleted file mode 100644 index a91f27b6..00000000 --- a/store/src/main/java/tools/refinery/data/query/building/DNFAtom.java +++ /dev/null @@ -1,33 +0,0 @@ -package tools.refinery.data.query.building; - -import java.util.Collection; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; - -public interface DNFAtom { - void unifyVariables(Map variables); - static Variable unifyVariables(Map unifiedVariables, Variable variable) { - if(variable != null) { - if(variable.isNamed() && unifiedVariables.containsKey(variable.getName())) { - return unifiedVariables.get(variable.getName()); - } - return variable; - } else { - return null; - } - } - void collectAllVariables(Set variables); - static void addToCollection(Set variables, Variable variable) { - if(variable != null) { - variables.add(variable); - } - } - static void addToCollection(Set variables, Collection variableCollection) { - Iterator iterator = variableCollection.iterator(); - while(iterator.hasNext()) { - Variable variable = iterator.next(); - addToCollection(variables, variable); - } - } -} diff --git a/store/src/main/java/tools/refinery/data/query/building/DNFPredicate.java b/store/src/main/java/tools/refinery/data/query/building/DNFPredicate.java deleted file mode 100644 index b25e5a7d..00000000 --- a/store/src/main/java/tools/refinery/data/query/building/DNFPredicate.java +++ /dev/null @@ -1,72 +0,0 @@ -package tools.refinery.data.query.building; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.UUID; - -public class DNFPredicate { - private final String name; - private final String uniqueName; - private final List parameters; - private final List clauses; - - public DNFPredicate(String name, List parameters, List clauses) { - this.name = name; - this.uniqueName = generateUniqueName(name,"predicate"); - this.parameters = parameters; - this.clauses = clauses; - - postProcess(); - } - - public static String generateUniqueName(String originalName, String defaultPrefix) { - UUID uuid = UUID.randomUUID(); - String uniqueString = uuid.toString().replace('-', '_'); - if(originalName == null) { - return defaultPrefix+uniqueString; - } else { - return originalName+uniqueString; - } - } - - public String getName() { - return name; - } - public String getUniqueName() { - return uniqueName; - } - public List getVariables() { - return parameters; - } - public List getClauses() { - return clauses; - } - - public void unifyVariables() { - Map uniqueVariableMap = new HashMap<>(); - for(Variable parameter : this.parameters) { - if(parameter.isNamed()) { - String parameterName = parameter.getName(); - if(uniqueVariableMap.containsKey(parameterName)) { - throw new IllegalArgumentException("Multiple parameters has the name "+parameterName); - } else { - uniqueVariableMap.put(parameterName, parameter); - } - } - } - for(DNFAnd clause : this.clauses) { - clause.unifyVariables(uniqueVariableMap); - } - } - public void collectQuantifiedVariables() { - for(DNFAnd clause : this.clauses) { - clause.collectQuantifiedVariables(new HashSet<>(parameters)); - } - } - public void postProcess() { - unifyVariables(); - collectQuantifiedVariables(); - } -} diff --git a/store/src/main/java/tools/refinery/data/query/building/EquivalenceAtom.java b/store/src/main/java/tools/refinery/data/query/building/EquivalenceAtom.java deleted file mode 100644 index b1934391..00000000 --- a/store/src/main/java/tools/refinery/data/query/building/EquivalenceAtom.java +++ /dev/null @@ -1,44 +0,0 @@ -package tools.refinery.data.query.building; - -import java.util.Map; -import java.util.Set; - -public class EquivalenceAtom implements DNFAtom{ - private boolean positive; - private Variable left; - private Variable right; - public EquivalenceAtom(boolean positive, Variable left, Variable right) { - this.positive = positive; - this.left = left; - this.right = right; - } - public boolean isPositive() { - return positive; - } - public void setPositive(boolean positive) { - this.positive = positive; - } - public Variable getLeft() { - return left; - } - public void setLeft(Variable left) { - this.left = left; - } - public Variable getRight() { - return right; - } - public void setRight(Variable right) { - this.right = right; - } - - @Override - public void unifyVariables(Map variables) { - this.left = DNFAtom.unifyVariables(variables,left); - this.right = DNFAtom.unifyVariables(variables,right); - } - @Override - public void collectAllVariables(Set variables) { - DNFAtom.addToCollection(variables, left); - DNFAtom.addToCollection(variables, right); - } -} diff --git a/store/src/main/java/tools/refinery/data/query/building/PredicateAtom.java b/store/src/main/java/tools/refinery/data/query/building/PredicateAtom.java deleted file mode 100644 index e15448dd..00000000 --- a/store/src/main/java/tools/refinery/data/query/building/PredicateAtom.java +++ /dev/null @@ -1,66 +0,0 @@ -package tools.refinery.data.query.building; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -public class PredicateAtom implements DNFAtom { - private DNFPredicate referred; - private List substitution; - private boolean positive; - private boolean transitive; - - public PredicateAtom(boolean positive, boolean transitive, DNFPredicate referred, List substitution) { - this.positive = positive; - this.referred = referred; - this.substitution = substitution; - this.transitive = transitive; - } - - public DNFPredicate getReferred() { - return referred; - } - - public void setReferred(DNFPredicate referred) { - this.referred = referred; - } - - public List getSubstitution() { - return substitution; - } - - public void setSubstitution(List substitution) { - this.substitution = substitution; - } - - public boolean isPositive() { - return positive; - } - - public void setPositive(boolean positive) { - this.positive = positive; - } - - public boolean isTransitive() { - return transitive; - } - - public void setTransitive(boolean transitive) { - this.transitive = transitive; - } - - @Override - public void unifyVariables(Map variables) { - for (int i = 0; i < this.substitution.size(); i++) { - final Object term = this.substitution.get(i); - if (term instanceof Variable variableReference) { - this.substitution.set(i, DNFAtom.unifyVariables(variables, variableReference)); - } - } - } - - @Override - public void collectAllVariables(Set variables) { - DNFAtom.addToCollection(variables, substitution); - } -} diff --git a/store/src/main/java/tools/refinery/data/query/building/PredicateBuilder_string.java b/store/src/main/java/tools/refinery/data/query/building/PredicateBuilder_string.java deleted file mode 100644 index 8e852900..00000000 --- a/store/src/main/java/tools/refinery/data/query/building/PredicateBuilder_string.java +++ /dev/null @@ -1,107 +0,0 @@ -package tools.refinery.data.query.building; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; - -import tools.refinery.data.query.view.RelationView; - -public class PredicateBuilder_string { - private PredicateBuilder_string() {} - - public static PredicateBuild1 predicate(String name) { - return new PredicateBuild1(name); - } - public static class PredicateBuild1 { - private String name; - public PredicateBuild1(String name) { - this.name = name; - } - public PredicateBuild2 parameters(String... parameters) { - return new PredicateBuild2(name, parameters); - } - } - public static class PredicateBuild2 { - private String name; - private String[] parameters; - public PredicateBuild2(String name, String[] parameters) { - this.name = name; - this.parameters = parameters; - } - - public PredicateBuild3 clause(DNFAtom...constraints) { - return new PredicateBuild3(name,parameters,List.of(constraints)); - } - } - public static class PredicateBuild3 { - String name; - String[] parameters; - List clauses; - public PredicateBuild3(String name, String[] parameters, List clauses) { - super(); - this.name = name; - this.parameters = parameters; - this.clauses = clauses; - } - - public PredicateBuild3 clause(DNFAtom...constraints) { - List newClauses = new ArrayList<>(); - newClauses.addAll(clauses); - newClauses.add(constraints); - return new PredicateBuild3(name, parameters, newClauses); - } - public DNFPredicate build() { - List newParameters = new ArrayList<>(this.parameters.length); - for(int i = 0; i newClauses = new ArrayList<>(this.clauses.size()); - for(DNFAtom[] clause : this.clauses) { - List constraints = new ArrayList<>(clause.length); - Collections.addAll(constraints, clause); - newClauses.add(new DNFAnd(new HashSet<>(), constraints)); - } - - return new DNFPredicate(name,newParameters,newClauses); - } - } - - private static Variable stringToVariable(String name) { - if(name != null) { - return new Variable(name); - } else { - return null; - } - } - private static List stringToVariable(String[] names) { - List variables = new ArrayList<>(); - for(int i = 0; i view, String... variables) { - - return new RelationAtom(view, stringToVariable(variables)); - } - - public static PredicateAtom cInPredicate(DNFPredicate referred, String... variables) { - return new PredicateAtom(true, false, referred, stringToVariable(variables)); - } - public static PredicateAtom cInTransitivePredicate(DNFPredicate referred, String... variables) { - return new PredicateAtom(true, true, referred, stringToVariable(variables)); - } - public static PredicateAtom cNotInPredicate(DNFPredicate referred, String... variables) { - return new PredicateAtom(false, false, referred, stringToVariable(variables)); - } -} diff --git a/store/src/main/java/tools/refinery/data/query/building/RelationAtom.java b/store/src/main/java/tools/refinery/data/query/building/RelationAtom.java deleted file mode 100644 index 315122fe..00000000 --- a/store/src/main/java/tools/refinery/data/query/building/RelationAtom.java +++ /dev/null @@ -1,49 +0,0 @@ -package tools.refinery.data.query.building; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -import tools.refinery.data.query.view.FilteredRelationView; -import tools.refinery.data.query.view.RelationView; - -public class RelationAtom implements DNFAtom { - RelationView view; - List substitution; - - public RelationAtom(RelationView view, List substitution) { - this.view = view; - this.substitution = substitution; - } - - public RelationView getView() { - return view; - } - - public void setView(FilteredRelationView view) { - this.view = view; - } - - public List getSubstitution() { - return substitution; - } - - public void setSubstitution(List substitution) { - this.substitution = substitution; - } - - @Override - public void unifyVariables(Map variables) { - for (int i = 0; i < this.substitution.size(); i++) { - final Object term = this.substitution.get(i); - if (term instanceof Variable variableReference) { - this.substitution.set(i, DNFAtom.unifyVariables(variables, variableReference)); - } - } - } - - @Override - public void collectAllVariables(Set variables) { - DNFAtom.addToCollection(variables, substitution); - } -} diff --git a/store/src/main/java/tools/refinery/data/query/building/Variable.java b/store/src/main/java/tools/refinery/data/query/building/Variable.java deleted file mode 100644 index 5c2a2f1e..00000000 --- a/store/src/main/java/tools/refinery/data/query/building/Variable.java +++ /dev/null @@ -1,22 +0,0 @@ -package tools.refinery.data.query.building; - -public class Variable { - private final String name; - private final String uniqueName; - - public Variable(String name) { - super(); - this.name = name; - this.uniqueName = DNFPredicate.generateUniqueName(name, "variable"); - - } - public String getName() { - return name; - } - public String getUniqueName() { - return uniqueName; - } - public boolean isNamed() { - return name != null; - } -} diff --git a/store/src/main/java/tools/refinery/data/query/internal/DummyBaseIndexer.java b/store/src/main/java/tools/refinery/data/query/internal/DummyBaseIndexer.java deleted file mode 100644 index f0fb7c95..00000000 --- a/store/src/main/java/tools/refinery/data/query/internal/DummyBaseIndexer.java +++ /dev/null @@ -1,59 +0,0 @@ -package tools.refinery.data.query.internal; - -import java.lang.reflect.InvocationTargetException; -import java.util.concurrent.Callable; - -import org.eclipse.viatra.query.runtime.api.scope.IBaseIndex; -import org.eclipse.viatra.query.runtime.api.scope.IIndexingErrorListener; -import org.eclipse.viatra.query.runtime.api.scope.IInstanceObserver; -import org.eclipse.viatra.query.runtime.api.scope.ViatraBaseIndexChangeListener; - -/** - * copied from org.eclipse.viatra.query.runtime.tabular.TabularEngineContext; - */ -public class DummyBaseIndexer implements IBaseIndex{ - - @Override - public V coalesceTraversals(Callable callable) throws InvocationTargetException { - try { - return callable.call(); - } catch (Exception e) { - throw new InvocationTargetException(e); - } - } - - @Override - public void addBaseIndexChangeListener(ViatraBaseIndexChangeListener listener) { - // no notification support - } - - @Override - public void removeBaseIndexChangeListener(ViatraBaseIndexChangeListener listener) { - // no notification support - } - - @Override - public void resampleDerivedFeatures() { - throw new UnsupportedOperationException(); - } - - @Override - public boolean addIndexingErrorListener(IIndexingErrorListener listener) { - return true; - } - - @Override - public boolean removeIndexingErrorListener(IIndexingErrorListener listener) { - return true; - } - - @Override - public boolean addInstanceObserver(IInstanceObserver observer, Object observedObject) { - return true; - } - - @Override - public boolean removeInstanceObserver(IInstanceObserver observer, Object observedObject) { - return true; - } -} diff --git a/store/src/main/java/tools/refinery/data/query/internal/PredicateTranslator.java b/store/src/main/java/tools/refinery/data/query/internal/PredicateTranslator.java deleted file mode 100644 index f9b6c17f..00000000 --- a/store/src/main/java/tools/refinery/data/query/internal/PredicateTranslator.java +++ /dev/null @@ -1,210 +0,0 @@ -package tools.refinery.data.query.internal; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.eclipse.viatra.query.runtime.api.GenericPatternMatcher; -import org.eclipse.viatra.query.runtime.api.GenericQuerySpecification; -import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine; -import org.eclipse.viatra.query.runtime.api.scope.QueryScope; -import org.eclipse.viatra.query.runtime.matchers.backend.QueryEvaluationHint; -import org.eclipse.viatra.query.runtime.matchers.psystem.PBody; -import org.eclipse.viatra.query.runtime.matchers.psystem.PVariable; -import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.Equality; -import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.ExportedParameter; -import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.Inequality; -import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.NegativePatternCall; -import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.BinaryReflexiveTransitiveClosure; -import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.BinaryTransitiveClosure; -import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.PositivePatternCall; -import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.TypeConstraint; -import org.eclipse.viatra.query.runtime.matchers.psystem.queries.BasePQuery; -import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PParameter; -import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PQuery; -import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PVisibility; -import org.eclipse.viatra.query.runtime.matchers.tuple.Tuples; - -import tools.refinery.data.query.RelationalScope; -import tools.refinery.data.query.view.RelationView; - -public class PredicateTranslator extends BasePQuery { - - private final Map parameters = new HashMap(); - private String fullyQualifiedName; - private LinkedList bodies = new LinkedList(); - private List symbolicParameters; - - public PredicateTranslator(String fullyQualifiedName) { - super(PVisibility.PUBLIC); - this.fullyQualifiedName = fullyQualifiedName; - PBody body = new PBody(this); - bodies.add(body); - } - - @Override - public String getFullyQualifiedName() { - return fullyQualifiedName; - } - - public PredicateTranslator addParameter(String name, RelationView type) { - PParameter parameter = new PParameter(name); - parameters.put(name, parameter); - - PBody body = bodies.peekLast(); - List symbolicParameters = new ArrayList<>(); - parameters.forEach((pName, pParameter) -> { - PVariable var = body.getOrCreateVariableByName(pName); - symbolicParameters.add(new ExportedParameter(body, var, pParameter)); - }); - body.setSymbolicParameters(symbolicParameters); - - return this; - } - - @Override - public List getParameters() { - return new ArrayList(parameters.values()); - } - public PredicateTranslator addConstraint(RelationView view, String... name) { - if(name.length != view.getArity()) { - throw new IllegalArgumentException("Arity ("+view.getArity()+") does not match parameter numbers ("+name.length+")"); - } - PBody body = bodies.peekLast(); - Object[] variables = new Object[name.length]; - for(int i = 0; i symbolicParameters = new ArrayList<>(); - parameters.forEach((name, parameter) -> { - PVariable var = body.getOrCreateVariableByName(name); - symbolicParameters.add(new ExportedParameter(body, var, parameter)); - }); - body.setSymbolicParameters(symbolicParameters); - bodies.add(body); - return this; - } - - // Equality constraint - public PredicateTranslator addEquality(String sourceName, String targetName) { - PBody body = bodies.peekLast(); - PVariable var_source = body.getOrCreateVariableByName(sourceName); - PVariable var_target = body.getOrCreateVariableByName(targetName); - new Equality(body, var_source, var_target); - return this; - } - - // Inequality constraint - public PredicateTranslator addInequality(String sourceName, String targetName) { - PBody body = bodies.peekLast(); - PVariable var_source = body.getOrCreateVariableByName(sourceName); - PVariable var_target = body.getOrCreateVariableByName(targetName); - new Inequality(body, var_source, var_target); - return this; - } - - // Positive pattern call - public PredicateTranslator addPatternCall(PQuery query, String... names) { - PBody body = bodies.peekLast(); - PVariable[] vars = new PVariable[names.length]; - for (int i = 0; i < names.length; i++) { - vars[i] = body.getOrCreateVariableByName(names[i]); - } - new PositivePatternCall(body, Tuples.flatTupleOf(vars), query); - return this; - } - - // Negative pattern call - public PredicateTranslator addNegativePatternCall(PQuery query, String... names) { - PBody body = bodies.peekLast(); - PVariable[] vars = new PVariable[names.length]; - for (int i = 0; i < names.length; i++) { - vars[i] = body.getOrCreateVariableByName(names[i]); - } - new NegativePatternCall(body, Tuples.flatTupleOf(vars), query); - return this; - } - - // Binary transitive closure pattern call - public PredicateTranslator addBinaryTransitiveClosure(PQuery query, String sourceName, String targetName) { - PBody body = bodies.peekLast(); - PVariable var_source = body.getOrCreateVariableByName(sourceName); - PVariable var_target = body.getOrCreateVariableByName(targetName); - new BinaryTransitiveClosure(body, Tuples.flatTupleOf(var_source, var_target), query); - return this; - } - - // Binary reflexive transitive closure pattern call - public PredicateTranslator addBinaryReflexiveTransitiveClosure(PQuery query, String sourceName, String targetName) { - PBody body = bodies.peekLast(); - PVariable var_source = body.getOrCreateVariableByName(sourceName); - PVariable var_target = body.getOrCreateVariableByName(targetName); - new BinaryReflexiveTransitiveClosure(body, Tuples.flatTupleOf(var_source, var_target), query, - query.getParameters().get(0).getDeclaredUnaryType()); - return this; - } - - @Override - public Set doGetContainedBodies() { - setEvaluationHints(new QueryEvaluationHint(null, QueryEvaluationHint.BackendRequirement.UNSPECIFIED)); - return new LinkedHashSet(bodies); - } - - public void addSymbolicParameters(ExportedParameter symbolicParameter) { - checkMutability(); - if (symbolicParameters == null) { - symbolicParameters = new ArrayList<>(); - } - symbolicParameters.add(symbolicParameter); - } - - public GenericQuerySpecification build() { - return new GenericQuerySpecification(this) { - - @Override - public Class getPreferredScopeClass() { - return RelationalScope.class; - } - - @Override - protected GenericPatternMatcher instantiate(ViatraQueryEngine engine) { - return defaultInstantiate(engine); - } - - @Override - public GenericPatternMatcher instantiate() { - return new GenericPatternMatcher(this); - } - - }; - } -} \ No newline at end of file diff --git a/store/src/main/java/tools/refinery/data/query/internal/RelationUpdateListener.java b/store/src/main/java/tools/refinery/data/query/internal/RelationUpdateListener.java deleted file mode 100644 index a3b319c8..00000000 --- a/store/src/main/java/tools/refinery/data/query/internal/RelationUpdateListener.java +++ /dev/null @@ -1,52 +0,0 @@ -package tools.refinery.data.query.internal; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContextListener; -import org.eclipse.viatra.query.runtime.matchers.tuple.ITuple; - -import tools.refinery.data.model.Tuple; -import tools.refinery.data.query.view.RelationView; - -public class RelationUpdateListener { - private final Map,Set>> view2Listeners; - - public RelationUpdateListener(Set> relationViews) { - view2Listeners = new HashMap<>(); - for(RelationView relationView : relationViews) { - view2Listeners.put(relationView, new HashSet<>()); - } - } - public boolean containsRelationalView(RelationView relationalKey) { - RelationView relationView = relationalKey.getWrappedKey(); - return view2Listeners.containsKey(relationView); - } - public void addListener(RelationView relationalKey, ITuple seed, IQueryRuntimeContextListener listener) { - RelationView relationView = relationalKey.getWrappedKey(); - if(view2Listeners.containsKey(relationView)) { - RelationUpdateListenerEntry entry = new RelationUpdateListenerEntry<>(relationalKey, seed, listener); - view2Listeners.get(relationView).add(entry); - } else throw new IllegalArgumentException(); - } - public void removeListener(RelationView relationalKey, ITuple seed, IQueryRuntimeContextListener listener) { - RelationView relationView = relationalKey.getWrappedKey(); - if(view2Listeners.containsKey(relationView)) { - RelationUpdateListenerEntry entry = new RelationUpdateListenerEntry<>(relationalKey, seed, listener); - view2Listeners.get(relationView).remove(entry); - } else throw new IllegalArgumentException(); - } - - public void processChange(RelationView relationView, Tuple tuple, D oldValue, D newValue) { - Set> listeners = view2Listeners.get(relationView); - if(listeners != null) { - for(RelationUpdateListenerEntry listener : listeners) { - @SuppressWarnings("unchecked") - RelationUpdateListenerEntry typeCorrectListener = (RelationUpdateListenerEntry) listener; - typeCorrectListener.processChange(tuple, oldValue, newValue); - } - } else throw new IllegalArgumentException("View was not indexed in constructor "+relationView); - } -} diff --git a/store/src/main/java/tools/refinery/data/query/internal/RelationUpdateListenerEntry.java b/store/src/main/java/tools/refinery/data/query/internal/RelationUpdateListenerEntry.java deleted file mode 100644 index 45be26b1..00000000 --- a/store/src/main/java/tools/refinery/data/query/internal/RelationUpdateListenerEntry.java +++ /dev/null @@ -1,64 +0,0 @@ -package tools.refinery.data.query.internal; - -import java.util.Arrays; -import java.util.Objects; - -import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContextListener; -import org.eclipse.viatra.query.runtime.matchers.tuple.ITuple; -import org.eclipse.viatra.query.runtime.matchers.tuple.Tuples; - -import tools.refinery.data.model.Tuple; -import tools.refinery.data.query.view.RelationView; - -public class RelationUpdateListenerEntry { - final RelationView key; - final ITuple filter; - final IQueryRuntimeContextListener listener; - - public RelationUpdateListenerEntry(RelationView key, ITuple filter, IQueryRuntimeContextListener listener) { - super(); - this.key = key; - this.filter = filter; - this.listener = listener; - } - - public void processChange(Tuple tuple, D oldValue, D newValue) { - Object[] oldTuple = isMatching(key.getWrappedKey().transform(tuple, oldValue), filter); - Object[] newTuple = isMatching(key.getWrappedKey().transform(tuple, newValue), filter); - - if(!Arrays.equals(oldTuple, newTuple)) { - if(oldTuple != null) { - listener.update(key, Tuples.flatTupleOf(oldTuple), false); - } - if(newTuple != null) { - listener.update(key, Tuples.flatTupleOf(newTuple), true); - } - } - } - - private Object[] isMatching(Object[] tuple, ITuple filter) { - for(int i = 0; i other = (RelationUpdateListenerEntry) obj; - return Objects.equals(filter, other.filter) && Objects.equals(key, other.key) - && Objects.equals(listener, other.listener); - } -} diff --git a/store/src/main/java/tools/refinery/data/query/internal/RelationalEngineContext.java b/store/src/main/java/tools/refinery/data/query/internal/RelationalEngineContext.java deleted file mode 100644 index 08ab8927..00000000 --- a/store/src/main/java/tools/refinery/data/query/internal/RelationalEngineContext.java +++ /dev/null @@ -1,33 +0,0 @@ -package tools.refinery.data.query.internal; - -import org.eclipse.viatra.query.runtime.api.scope.IBaseIndex; -import org.eclipse.viatra.query.runtime.api.scope.IEngineContext; -import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContext; - -import tools.refinery.data.model.Model; - -public class RelationalEngineContext implements IEngineContext{ - private final IBaseIndex baseIndex = new DummyBaseIndexer(); - private final RelationalRuntimeContext runtimeContext; - - - public RelationalEngineContext(Model model, RelationUpdateListener updateListener) { - runtimeContext = new RelationalRuntimeContext(model, updateListener); - } - - @Override - public IBaseIndex getBaseIndex() { - return this.baseIndex; - } - - @Override - public void dispose() { - //lifecycle not controlled by engine - } - - @Override - public IQueryRuntimeContext getQueryRuntimeContext() { - return runtimeContext; - } - -} diff --git a/store/src/main/java/tools/refinery/data/query/internal/RelationalQueryMetaContext.java b/store/src/main/java/tools/refinery/data/query/internal/RelationalQueryMetaContext.java deleted file mode 100644 index 6226b483..00000000 --- a/store/src/main/java/tools/refinery/data/query/internal/RelationalQueryMetaContext.java +++ /dev/null @@ -1,58 +0,0 @@ -package tools.refinery.data.query.internal; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.eclipse.viatra.query.runtime.matchers.context.AbstractQueryMetaContext; -import org.eclipse.viatra.query.runtime.matchers.context.IInputKey; -import org.eclipse.viatra.query.runtime.matchers.context.InputKeyImplication; - -import tools.refinery.data.query.view.RelationView; - -/** - * The meta context information for String scopes. - */ -public final class RelationalQueryMetaContext extends AbstractQueryMetaContext { - - @Override - public boolean isEnumerable(IInputKey key) { - ensureValidKey(key); - return key.isEnumerable(); - } - - @Override - public boolean isStateless(IInputKey key) { - ensureValidKey(key); - return key instanceof RelationView; - } - - @Override - public Collection getImplications(IInputKey implyingKey) { - ensureValidKey(implyingKey); - return new HashSet(); - } - - @Override - public Map, Set> getFunctionalDependencies(IInputKey key) { - ensureValidKey(key); - if (key instanceof RelationView) { - return new HashMap, Set>(); - } else { - return Collections.emptyMap(); - } - } - - public void ensureValidKey(IInputKey key) { - if (! (key instanceof RelationView)) - illegalInputKey(key); - } - - public void illegalInputKey(IInputKey key) { - throw new IllegalArgumentException("The input key " + key + " is not a valid input key."); - } - -} diff --git a/store/src/main/java/tools/refinery/data/query/internal/RelationalRuntimeContext.java b/store/src/main/java/tools/refinery/data/query/internal/RelationalRuntimeContext.java deleted file mode 100644 index 8ee185af..00000000 --- a/store/src/main/java/tools/refinery/data/query/internal/RelationalRuntimeContext.java +++ /dev/null @@ -1,186 +0,0 @@ -package tools.refinery.data.query.internal; - -import static tools.refinery.data.util.CollectionsUtil.filter; -import static tools.refinery.data.util.CollectionsUtil.map; - -import java.lang.reflect.InvocationTargetException; -import java.util.Iterator; -import java.util.Optional; -import java.util.concurrent.Callable; - -import org.eclipse.viatra.query.runtime.base.core.NavigationHelperImpl; -import org.eclipse.viatra.query.runtime.matchers.context.IInputKey; -import org.eclipse.viatra.query.runtime.matchers.context.IQueryMetaContext; -import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContext; -import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContextListener; -import org.eclipse.viatra.query.runtime.matchers.context.IndexingService; -import org.eclipse.viatra.query.runtime.matchers.tuple.ITuple; -import org.eclipse.viatra.query.runtime.matchers.tuple.Tuple; -import org.eclipse.viatra.query.runtime.matchers.tuple.TupleMask; -import org.eclipse.viatra.query.runtime.matchers.tuple.Tuples; -import org.eclipse.viatra.query.runtime.matchers.util.Accuracy; - -import tools.refinery.data.model.Model; -import tools.refinery.data.query.view.RelationView; - -public class RelationalRuntimeContext implements IQueryRuntimeContext { - private final RelationalQueryMetaContext metaContext = new RelationalQueryMetaContext(); - private final RelationUpdateListener relationUpdateListener; - private final Model model; - - public RelationalRuntimeContext(Model model, RelationUpdateListener relationUpdateListener) { - this.model = model; - this.relationUpdateListener = relationUpdateListener; - } - - @Override - public IQueryMetaContext getMetaContext() { - return metaContext; - } - - /** - * TODO: check {@link NavigationHelperImpl#coalesceTraversals(Callable)} - */ - @Override - public V coalesceTraversals(Callable callable) throws InvocationTargetException { - try { - return callable.call(); - } catch (Exception e) { - throw new InvocationTargetException(e); - } - } - - @Override - public boolean isCoalescing() { - return true; - } - - @Override - public boolean isIndexed(IInputKey key, IndexingService service) { - if(key instanceof RelationView relationalKey) { - return this.relationUpdateListener.containsRelationalView(relationalKey); - } else { - return false; - } - } - - @Override - public void ensureIndexed(IInputKey key, IndexingService service) { - if(!isIndexed(key, service)) { - throw new IllegalStateException("Engine tries to index a new key " +key); - } - } - - RelationView checkKey(IInputKey key) { - if(key instanceof RelationView) { - RelationView relationViewKey = (RelationView) key; - if(relationUpdateListener.containsRelationalView(relationViewKey)) { - return relationViewKey; - } else { - throw new IllegalStateException("Query is asking for non-indexed key"); - } - } else { - throw new IllegalStateException("Query is asking for non-relational key"); - } - } - - @Override - public int countTuples(IInputKey key, TupleMask seedMask, ITuple seed) { - RelationView relationalViewKey = checkKey(key); - Iterable allObjects = relationalViewKey.getAll(model); - Iterable filteredBySeed = filter(allObjects,objectArray -> isMatching(objectArray,seedMask,seed)); - Iterator iterator = filteredBySeed.iterator(); - int result = 0; - while(iterator.hasNext()) { - iterator.next(); - result++; - } - return result; - } - - @Override - public Optional estimateCardinality(IInputKey key, TupleMask groupMask, Accuracy requiredAccuracy) { - return Optional.empty(); - } - - @Override - public Iterable enumerateTuples(IInputKey key, TupleMask seedMask, ITuple seed) { - RelationView relationalViewKey = checkKey(key); - Iterable allObjects = relationalViewKey.getAll(model); - Iterable filteredBySeed = filter(allObjects,objectArray -> isMatching(objectArray,seedMask,seed)); - return map(filteredBySeed,Tuples::flatTupleOf); - } - - private boolean isMatching(Object[] tuple, TupleMask seedMask, ITuple seed) { - for(int i=0; i relationalViewKey, TupleMask seedMask, ITuple seed) { -// final int arity = relationalViewKey.getArity(); -// Object[] result = new Object[arity]; -// for(int i = 0; i enumerateValues(IInputKey key, TupleMask seedMask, ITuple seed) { - return enumerateTuples(key, seedMask, seed); - } - - @Override - public boolean containsTuple(IInputKey key, ITuple seed) { - RelationView relationalViewKey = checkKey(key); - return relationalViewKey.get(model,seed.getElements()); - } - - @Override - public void addUpdateListener(IInputKey key, Tuple seed, IQueryRuntimeContextListener listener) { - RelationView relationalKey = checkKey(key); - this.relationUpdateListener.addListener(relationalKey, seed, listener); - - } - - @Override - public void removeUpdateListener(IInputKey key, Tuple seed, IQueryRuntimeContextListener listener) { - RelationView relationalKey = checkKey(key); - this.relationUpdateListener.removeListener(relationalKey, seed, listener); - } - - @Override - public Object wrapElement(Object externalElement) { - return externalElement; - } - - @Override - public Object unwrapElement(Object internalElement) { - return internalElement; - } - - @Override - public Tuple wrapTuple(Tuple externalElements) { - return externalElements; - } - - @Override - public Tuple unwrapTuple(Tuple internalElements) { - return internalElements; - } - - @Override - public void ensureWildcardIndexing(IndexingService service) { - throw new UnsupportedOperationException(); - } - - @Override - public void executeAfterTraversal(Runnable runnable) throws InvocationTargetException { - runnable.run(); - } -} diff --git a/store/src/main/java/tools/refinery/data/query/view/FilteredRelationView.java b/store/src/main/java/tools/refinery/data/query/view/FilteredRelationView.java deleted file mode 100644 index b33a47af..00000000 --- a/store/src/main/java/tools/refinery/data/query/view/FilteredRelationView.java +++ /dev/null @@ -1,48 +0,0 @@ -package tools.refinery.data.query.view; - -import java.util.function.BiPredicate; - -import tools.refinery.data.model.Model; -import tools.refinery.data.model.Tuple; -import tools.refinery.data.model.Tuple.Tuple1; -import tools.refinery.data.model.representation.Relation; - -public class FilteredRelationView extends RelationView{ - private final BiPredicate predicate; - - public FilteredRelationView(Relation representation, BiPredicate predicate) { - super(representation); - this.predicate = predicate; - } - @Override - protected Object[] forwardMap(Tuple key, D value) { - return toTuple1Array(key); - } - @Override - public boolean get(Model model, Object[] tuple) { - int[] content = new int[tuple.length]; - for(int i = 0; i extends RelationView { - - public FunctionalRelationView(Relation representation) { - super(representation); - } - - @Override - protected boolean filter(Tuple key, D value) { - return true; - } - - @Override - protected Object[] forwardMap(Tuple key, D value) { - return toTuple1ArrayPlusValue(key, value); - } - - @Override - public boolean get(Model model, Object[] tuple) { - int[] content = new int[tuple.length-1]; - for(int i = 0; i Object[] toTuple1ArrayPlusValue(Tuple t, D value) { - Object[] result = new Object[t.getSize()+1]; - for(int i = 0; i{ - - public KeyOnlyRelationView(Relation representation) { - super(representation, (k,v)->true); - } - @Override - protected boolean filter(Tuple key, Boolean value) { - return true; - } - -} diff --git a/store/src/main/java/tools/refinery/data/query/view/RelationView.java b/store/src/main/java/tools/refinery/data/query/view/RelationView.java deleted file mode 100644 index 10b67221..00000000 --- a/store/src/main/java/tools/refinery/data/query/view/RelationView.java +++ /dev/null @@ -1,86 +0,0 @@ -package tools.refinery.data.query.view; - -import java.util.Objects; - -import org.eclipse.viatra.query.runtime.matchers.context.common.BaseInputKeyWrapper; - -import tools.refinery.data.map.CursorAsIterator; -import tools.refinery.data.model.Model; -import tools.refinery.data.model.Tuple; -import tools.refinery.data.model.representation.Relation; - -/** - * Represents a view of a {@link Relation} that can be queried. - * - * @author Oszkar Semerath - * - * @param - */ -public abstract class RelationView extends BaseInputKeyWrapper> { - protected final Relation representation; - - protected RelationView(Relation representation) { - super(null); - this.wrappedKey = this; - this.representation = representation; - } - - @Override - public String getPrettyPrintableName() { - return representation.getName(); - } - - @Override - public String getStringID() { - return representation.getName() + this.getClass().getName(); - } - - public Relation getRepresentation() { - return representation; - } - - @Override - public boolean isEnumerable() { - return true; - } - - protected abstract boolean filter(Tuple key, D value); - - protected abstract Object[] forwardMap(Tuple key, D value); - - public abstract boolean get(Model model, Object[] tuple); - - public Object[] transform(Tuple tuple, D value) { - if (filter(tuple, value)) { - return forwardMap(tuple, value); - } else - return null; - } - - public Iterable getAll(Model model) { - return (() -> new CursorAsIterator<>(model.getAll(representation), (k, v) -> forwardMap(k, v), - (k, v) -> filter(k, v))); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + Objects.hash(representation); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (!super.equals(obj)) - return false; - if (!(obj instanceof RelationView)) - return false; - @SuppressWarnings("unchecked") - RelationView other = ((RelationView) obj); - return Objects.equals(representation, other.representation); - } - -} diff --git a/store/src/main/java/tools/refinery/data/util/CollectionsUtil.java b/store/src/main/java/tools/refinery/data/util/CollectionsUtil.java deleted file mode 100644 index bd813b20..00000000 --- a/store/src/main/java/tools/refinery/data/util/CollectionsUtil.java +++ /dev/null @@ -1,72 +0,0 @@ -package tools.refinery.data.util; - -import java.util.Iterator; -import java.util.NoSuchElementException; -import java.util.function.Function; -import java.util.function.Predicate; - -public final class CollectionsUtil { - private CollectionsUtil() { - throw new UnsupportedOperationException(); - } - - public static Iterator map(Iterator source, Function transformation) { - return new Iterator() { - - @Override - public boolean hasNext() { - return source.hasNext(); - } - - @Override - public T next() { - return transformation.apply(source.next()); - } - }; - } - - public static Iterable map(Iterable source, Function transformation) { - return (()->map(source.iterator(),transformation)); - } - - public static Iterator filter(Iterator source, Predicate condition) { - return new Iterator() { - T internalNext = move(); - boolean internalHasNext; - - private T move() { - internalHasNext = source.hasNext(); - if(internalHasNext) { - internalNext = source.next(); - } - while(internalHasNext && !condition.test(internalNext)) { - internalHasNext = source.hasNext(); - if(internalHasNext) { - internalNext = source.next(); - } - } - return internalNext; - } - - @Override - public boolean hasNext() { - return internalHasNext; - } - - @Override - public T next() { - if(!internalHasNext) { - throw new NoSuchElementException(); - } else { - T result = internalNext; - move(); - return result; - } - } - }; - } - - public static Iterable filter(Iterable source, Predicate condition) { - return (()->filter(source.iterator(),condition)); - } -} diff --git a/store/src/main/java/tools/refinery/store/map/ContinousHashProvider.java b/store/src/main/java/tools/refinery/store/map/ContinousHashProvider.java new file mode 100644 index 00000000..75f1e2ab --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/ContinousHashProvider.java @@ -0,0 +1,69 @@ +package tools.refinery.store.map; + +import tools.refinery.store.map.internal.Node; + +/** + * A class representing an equivalence relation for a type {@code K} with a + * continuous hash function. + * + * @author Oszkar Semerath + * + * @param Target java type. + */ +public interface ContinousHashProvider { + public static final int EFFECTIVE_BITS = Node.EFFECTIVE_BITS; + public static final int EFFECTIVE_BIT_MASK = (1 << (EFFECTIVE_BITS)) - 1; + + /** + * Maximal practical depth for differentiating keys. If two keys have the same + * hash code until that depth, the algorithm can stop. + */ + public static final int MAX_PRACTICAL_DEPTH = 500; + + /** + * Provides a hash code for a object {@code key} with a given {@code index}. It + * has the following contracts: + *
    + *
  • If {@link #equals}{@code (key1,key2)}, then + * {@code getHash(key1, index) == getHash(key2, index)} for all values of + * {@code index}.
  • + *
  • If {@code getHash(key1,index) == getHash(key2, index)} for all values of + * {@code index}, then {@link #equals}{@code (key1, key2)}
  • + *
  • In current implementation, we use only the least significant + * {@link #EFFECTIVE_BITS} + *
+ * Check {@link #equals} for further details. + * + * @param key The target data object. + * @param index The depth of the the hash code. Needs to be non-negative. + * @return A hash code. + */ + public int getHash(K key, int index); + + public default int getEffectiveHash(K key, int index) { + return getHash(key, index) & EFFECTIVE_BIT_MASK; + } + + public default int compare(K key1, K key2) { + if (key1.equals(key2)) { + return 0; + } else { + for (int i = 0; i < ContinousHashProvider.MAX_PRACTICAL_DEPTH; i++) { + int hash1 = getEffectiveHash(key1, i); + int hash2 = getEffectiveHash(key2, i); + for(int j = 0; j>>j*Node.BRANCHING_FACTOR_BITS) & factorMask; + int hashFragment2 = (hash2>>>j*Node.BRANCHING_FACTOR_BITS) & factorMask; + var result = Integer.compare(hashFragment1, hashFragment2); + if (result != 0) { + return result; + } + } + } + throw new IllegalArgumentException("Two different keys (" + key1 + " and " + key2 + + ") have the same hashcode over the practical depth limitation (" + + ContinousHashProvider.MAX_PRACTICAL_DEPTH + ")!"); + } + } +} diff --git a/store/src/main/java/tools/refinery/store/map/Cursor.java b/store/src/main/java/tools/refinery/store/map/Cursor.java new file mode 100644 index 00000000..9c465ddc --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/Cursor.java @@ -0,0 +1,14 @@ +package tools.refinery.store.map; + +import java.util.List; + +public interface Cursor { + public K getKey(); + public V getValue(); + public boolean isTerminated(); + public boolean move(); + public boolean isDirty(); + + @SuppressWarnings("squid:S1452") + public List> getDependingMaps(); +} diff --git a/store/src/main/java/tools/refinery/store/map/CursorAsIterator.java b/store/src/main/java/tools/refinery/store/map/CursorAsIterator.java new file mode 100644 index 00000000..65ae6648 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/CursorAsIterator.java @@ -0,0 +1,57 @@ +package tools.refinery.store.map; + +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.function.BiFunction; +import java.util.function.BiPredicate; + +public class CursorAsIterator implements Iterator { + private final Cursor internal; + private final BiFunction entryTransformation; + private final BiPredicate filtering; + + D lastValidElement; + + public CursorAsIterator(Cursor internal, BiFunction entryTransformation, BiPredicate filtering) { + this.internal = internal; + this.entryTransformation = entryTransformation; + this.filtering = filtering; + + moveToNext(); + } + public CursorAsIterator(Cursor internal, BiFunction entryTransformation) { + this.internal = internal; + this.entryTransformation = entryTransformation; + this.filtering = ((k,v)->true); + + moveToNext(); + } + + private void moveToNext() { + internal.move(); + while(!internal.isTerminated() && !filtering.test(internal.getKey(), internal.getValue())) { + internal.move(); + } + if(!internal.isTerminated()) { + lastValidElement = entryTransformation.apply(internal.getKey(), internal.getValue()); + } + } + + + @Override + public boolean hasNext() { + return !internal.isTerminated(); + } + @Override + public D next() { + if(hasNext()) { + D last = lastValidElement; + moveToNext(); + return last; + } else { + throw new NoSuchElementException(); + } + + } + +} diff --git a/store/src/main/java/tools/refinery/store/map/DiffCursor.java b/store/src/main/java/tools/refinery/store/map/DiffCursor.java new file mode 100644 index 00000000..701f3ec8 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/DiffCursor.java @@ -0,0 +1,6 @@ +package tools.refinery.store.map; + +public interface DiffCursor extends Cursor { + public V getFromValue(); + public V getToValue(); +} \ No newline at end of file diff --git a/store/src/main/java/tools/refinery/store/map/MapAsIterable.java b/store/src/main/java/tools/refinery/store/map/MapAsIterable.java new file mode 100644 index 00000000..6b986732 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/MapAsIterable.java @@ -0,0 +1,26 @@ +package tools.refinery.store.map; + +import java.util.Iterator; +import java.util.function.BiFunction; +import java.util.function.BiPredicate; + +public class MapAsIterable implements Iterable { + private final VersionedMap internal; + private final BiFunction entryTransformation; + private final BiPredicate filtering; + + public MapAsIterable(VersionedMap internal, BiFunction entryTransformation, BiPredicate filtering) { + this.internal = internal; + this.entryTransformation = entryTransformation; + this.filtering = filtering; + } + public MapAsIterable(VersionedMap internal, BiFunction entryTransformation) { + this.internal = internal; + this.entryTransformation = entryTransformation; + this.filtering = ((k,v)->true); + } + @Override + public Iterator iterator() { + return new CursorAsIterator<>(internal.getAll(), entryTransformation, filtering); + } +} diff --git a/store/src/main/java/tools/refinery/store/map/Versioned.java b/store/src/main/java/tools/refinery/store/map/Versioned.java new file mode 100644 index 00000000..6a23e9d5 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/Versioned.java @@ -0,0 +1,7 @@ +package tools.refinery.store.map; + +public interface Versioned { + public long commit(); + //maybe revert()? + public void restore(long state); +} diff --git a/store/src/main/java/tools/refinery/store/map/VersionedMap.java b/store/src/main/java/tools/refinery/store/map/VersionedMap.java new file mode 100644 index 00000000..a8a64d08 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/VersionedMap.java @@ -0,0 +1,13 @@ +package tools.refinery.store.map; + +public interface VersionedMap extends Versioned{ + public V get(K key); + public Cursor getAll(); + + public V put(K key, V value); + public void putAll(Cursor cursor); + + public long getSize(); + + public DiffCursor getDiffCursor(long state); +} diff --git a/store/src/main/java/tools/refinery/store/map/VersionedMapStore.java b/store/src/main/java/tools/refinery/store/map/VersionedMapStore.java new file mode 100644 index 00000000..a8d7fb1a --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/VersionedMapStore.java @@ -0,0 +1,14 @@ +package tools.refinery.store.map; + +import java.util.Set; + +public interface VersionedMapStore { + + public VersionedMap createMap(); + + public VersionedMap createMap(long state); + + public Set getStates(); + + public DiffCursor getDiffCursor(long fromState, long toState); +} \ No newline at end of file diff --git a/store/src/main/java/tools/refinery/store/map/VersionedMapStoreConfiguration.java b/store/src/main/java/tools/refinery/store/map/VersionedMapStoreConfiguration.java new file mode 100644 index 00000000..723e5ec4 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/VersionedMapStoreConfiguration.java @@ -0,0 +1,48 @@ +package tools.refinery.store.map; + +public class VersionedMapStoreConfiguration { + + public VersionedMapStoreConfiguration() { + + } + public VersionedMapStoreConfiguration(boolean immutableWhenCommiting, boolean sharedNodeCacheInStore, + boolean sharedNodeCacheInStoreGroups) { + super(); + this.immutableWhenCommiting = immutableWhenCommiting; + this.sharedNodeCacheInStore = sharedNodeCacheInStore; + this.sharedNodeCacheInStoreGroups = sharedNodeCacheInStoreGroups; + } + + /** + * If true root is replaced with immutable node when committed. Frees up memory + * by releasing immutable nodes, but it may decrease performance by recreating + * immutable nodes upon changes (some evidence). + */ + private boolean immutableWhenCommiting = true; + public boolean isImmutableWhenCommiting() { + return immutableWhenCommiting; + } + + /** + * If true, all subnodes are cached within a {@link VersionedMapStore}. It + * decreases the memory requirements. It may increase performance by discovering + * existing immutable copy of a node (some evidence). Additional overhead may + * decrease performance (no example found). The option permits the efficient + * implementation of version deletion. + */ + private boolean sharedNodeCacheInStore = true; + public boolean isSharedNodeCacheInStore() { + return sharedNodeCacheInStore; + } + + /** + * If true, all subnodes are cached within a group of + * {@link VersionedMapStoreImpl#createSharedVersionedMapStores(int, ContinousHashProvider, Object, VersionedMapStoreConfiguration)}. + * If {@link VersionedMapStoreConfiguration#sharedNodeCacheInStore} is + * false, then it has currently no impact. + */ + private boolean sharedNodeCacheInStoreGroups = true; + public boolean isSharedNodeCacheInStoreGroups() { + return sharedNodeCacheInStoreGroups; + } +} diff --git a/store/src/main/java/tools/refinery/store/map/VersionedMapStoreImpl.java b/store/src/main/java/tools/refinery/store/map/VersionedMapStoreImpl.java new file mode 100644 index 00000000..a626a5e8 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/VersionedMapStoreImpl.java @@ -0,0 +1,135 @@ +package tools.refinery.store.map; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import tools.refinery.store.map.internal.ImmutableNode; +import tools.refinery.store.map.internal.MapDiffCursor; +import tools.refinery.store.map.internal.Node; +import tools.refinery.store.map.internal.VersionedMapImpl; + +public class VersionedMapStoreImpl implements VersionedMapStore { + // Configuration + private final boolean immutableWhenCommiting; + + // Static data + protected final ContinousHashProvider hashProvider; + protected final V defaultValue; + + // Dynamic data + protected final Map> states = new HashMap<>(); + protected final Map, ImmutableNode> nodeCache; + protected long nextID = 0; + + public VersionedMapStoreImpl(ContinousHashProvider hashProvider, V defaultValue, + VersionedMapStoreConfiguration config) { + this.immutableWhenCommiting = config.isImmutableWhenCommiting(); + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + if (config.isSharedNodeCacheInStore()) { + nodeCache = new HashMap<>(); + } else { + nodeCache = null; + } + } + + private VersionedMapStoreImpl(ContinousHashProvider hashProvider, V defaultValue, + Map, ImmutableNode> nodeCache, VersionedMapStoreConfiguration config) { + this.immutableWhenCommiting = config.isImmutableWhenCommiting(); + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + this.nodeCache = nodeCache; + } + + public VersionedMapStoreImpl(ContinousHashProvider hashProvider, V defaultValue) { + this(hashProvider, defaultValue, new VersionedMapStoreConfiguration()); + } + + public static List> createSharedVersionedMapStores(int amount, + ContinousHashProvider hashProvider, V defaultValue, + VersionedMapStoreConfiguration config) { + List> result = new ArrayList<>(amount); + if (config.isSharedNodeCacheInStoreGroups()) { + Map, ImmutableNode> nodeCache; + if (config.isSharedNodeCacheInStore()) { + nodeCache = new HashMap<>(); + } else { + nodeCache = null; + } + for (int i = 0; i < amount; i++) { + result.add(new VersionedMapStoreImpl<>(hashProvider, defaultValue, nodeCache, config)); + } + } else { + for (int i = 0; i < amount; i++) { + result.add(new VersionedMapStoreImpl<>(hashProvider, defaultValue, config)); + } + } + return result; + } + + public static List> createSharedVersionedMapStores(int amount, + ContinousHashProvider hashProvider, V defaultValue) { + return createSharedVersionedMapStores(amount, hashProvider, defaultValue, new VersionedMapStoreConfiguration()); + } + + @Override + public synchronized Set getStates() { + return new HashSet<>(states.keySet()); + } + + @Override + public VersionedMap createMap() { + return new VersionedMapImpl<>(this, hashProvider, defaultValue); + } + + @Override + public VersionedMap createMap(long state) { + ImmutableNode data = revert(state); + return new VersionedMapImpl<>(this, hashProvider, defaultValue, data); + } + + + public synchronized ImmutableNode revert(long state) { + if (states.containsKey(state)) { + return states.get(state); + } else { + ArrayList existingKeys = new ArrayList<>(states.keySet()); + Collections.sort(existingKeys); + throw new IllegalArgumentException("Store does not contain state " + state + "! Avaliable states: " + + Arrays.toString(existingKeys.toArray())); + } + } + + public synchronized long commit(Node data, VersionedMapImpl mapToUpdateRoot) { + ImmutableNode immutable; + if (data != null) { + immutable = data.toImmutable(this.nodeCache); + } else { + immutable = null; + } + + if (nextID == Long.MAX_VALUE) + throw new IllegalStateException("Map store run out of Id-s"); + long id = nextID++; + this.states.put(id, immutable); + if (this.immutableWhenCommiting) { + mapToUpdateRoot.setRoot(immutable); + } + return id; + } + + @Override + public DiffCursor getDiffCursor(long fromState, long toState) { + VersionedMap map1 = createMap(fromState); + VersionedMap map2 = createMap(toState); + Cursor cursor1 = map1.getAll(); + Cursor cursor2 = map2.getAll(); + return new MapDiffCursor<>(this.hashProvider, this.defaultValue, cursor1, cursor2); + } +} diff --git a/store/src/main/java/tools/refinery/store/map/internal/HashClash.java b/store/src/main/java/tools/refinery/store/map/internal/HashClash.java new file mode 100644 index 00000000..5402ed4a --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/internal/HashClash.java @@ -0,0 +1,18 @@ +package tools.refinery.store.map.internal; + +enum HashClash { + /** + * Not stuck. + */ + NONE, + + /** + * Clashed, next we should return the key of cursor 1. + */ + STUCK_CURSOR_1, + + /** + * Clashed, next we should return the key of cursor 2. + */ + STUCK_CURSOR_2 +} diff --git a/store/src/main/java/tools/refinery/store/map/internal/ImmutableNode.java b/store/src/main/java/tools/refinery/store/map/internal/ImmutableNode.java new file mode 100644 index 00000000..f68734ab --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/internal/ImmutableNode.java @@ -0,0 +1,378 @@ +package tools.refinery.store.map.internal; + +import java.util.Arrays; +import java.util.Map; + +import tools.refinery.store.map.ContinousHashProvider; + +public class ImmutableNode extends Node { + /** + * Bitmap defining the stored key and values. + */ + final int dataMap; + /** + * Bitmap defining the positions of further nodes. + */ + final int nodeMap; + /** + * Stores Keys, Values, and subnodes. Structure: (K,V)*,NODE; NODES are stored + * backwards. + */ + final Object[] content; + + /** + * Hash code derived from immutable hash code + */ + final int precalculatedHash; + + private ImmutableNode(int dataMap, int nodeMap, Object[] content, int precalculatedHash) { + super(); + this.dataMap = dataMap; + this.nodeMap = nodeMap; + this.content = content; + this.precalculatedHash = precalculatedHash; + } + + /** + * Constructor that copies a mutable node to an immutable. + * + * @param node A mutable node. + * @param cache A cache of existing immutable nodes. It can be used to search + * and place reference immutable nodes. It can be null, if no cache + * available. + * @return an immutable version of the input node. + */ + static ImmutableNode constructImmutable(MutableNode node, + Map, ImmutableNode> cache) { + // 1. try to return from cache + if (cache != null) { + ImmutableNode cachedResult = cache.get(node); + if (cachedResult != null) { + // 1.1 Already cached, return from cache. + return cachedResult; + } + } + + // 2. otherwise construct a new ImmutableNode + int size = 0; + for (int i = 0; i < node.content.length; i++) { + if (node.content[i] != null) { + size++; + } + } + + int datas = 0; + int nodes = 0; + int resultDataMap = 0; + int resultNodeMap = 0; + final Object[] resultContent = new Object[size]; + int bitposition = 1; + for (int i = 0; i < FACTOR; i++) { + Object key = node.content[i * 2]; + if (key != null) { + resultDataMap |= bitposition; + resultContent[datas * 2] = key; + resultContent[datas * 2 + 1] = node.content[i * 2 + 1]; + datas++; + } else { + @SuppressWarnings("unchecked") + var subnode = (Node) node.content[i * 2 + 1]; + if (subnode != null) { + ImmutableNode immutableSubnode = subnode.toImmutable(cache); + resultNodeMap |= bitposition; + resultContent[size - 1 - nodes] = immutableSubnode; + nodes++; + } + } + bitposition <<= 1; + } + final int resultHash = node.hashCode(); + var newImmutable = new ImmutableNode(resultDataMap, resultNodeMap, resultContent, resultHash); + + // 3. save new immutable. + if (cache != null) { + cache.put(newImmutable, newImmutable); + } + return newImmutable; + } + + private int index(int bitmap, int bitpos) { + return Integer.bitCount(bitmap & (bitpos - 1)); + } + + @Override + public V getValue(K key, ContinousHashProvider hashProvider, V defaultValue, int hash, int depth) { + int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); + int bitposition = 1 << selectedHashFragment; + // If the key is stored as a data + if ((dataMap & bitposition) != 0) { + int keyIndex = 2 * index(dataMap, bitposition); + @SuppressWarnings("unchecked") + K keyCandidate = (K) content[keyIndex]; + if (keyCandidate.equals(key)) { + @SuppressWarnings("unchecked") + V value = (V) content[keyIndex + 1]; + return value; + } else { + return defaultValue; + } + } + // the key is stored as a node + else if ((nodeMap & bitposition) != 0) { + int keyIndex = content.length - 1 - index(nodeMap, bitposition); + @SuppressWarnings("unchecked") + var subNode = (ImmutableNode) content[keyIndex]; + int newDepth = depth + 1; + int newHash = newHash(hashProvider, key, hash, newDepth); + return subNode.getValue(key, hashProvider, defaultValue, newHash, newDepth); + } + // the key is not stored at all + else { + return defaultValue; + } + } + + @Override + public Node putValue(K key, V value, OldValueBox oldValue, ContinousHashProvider hashProvider, + V defaultValue, int hash, int depth) { + int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); + int bitposition = 1 << selectedHashFragment; + if ((dataMap & bitposition) != 0) { + int keyIndex = 2 * index(dataMap, bitposition); + @SuppressWarnings("unchecked") + K keyCandidate = (K) content[keyIndex]; + if (keyCandidate.equals(key)) { + if (value == defaultValue) { + // delete + MutableNode mutable = this.toMutable(); + return mutable.removeEntry(selectedHashFragment, oldValue); + } else if (value == content[keyIndex + 1]) { + // dont change + oldValue.setOldValue(value); + return this; + } else { + // update existing value + MutableNode mutable = this.toMutable(); + return mutable.updateValue(value, oldValue, selectedHashFragment); + } + } else { + if (value == defaultValue) { + // dont change + oldValue.setOldValue(defaultValue); + return this; + } else { + // add new key + value + MutableNode mutable = this.toMutable(); + return mutable.putValue(key, value, oldValue, hashProvider, defaultValue, hash, depth); + } + } + } else if ((nodeMap & bitposition) != 0) { + int keyIndex = content.length - 1 - index(nodeMap, bitposition); + @SuppressWarnings("unchecked") + var subNode = (ImmutableNode) content[keyIndex]; + int newDepth = depth + 1; + int newHash = newHash(hashProvider, key, hash, newDepth); + var newsubNode = subNode.putValue(key, value, oldValue, hashProvider, defaultValue, newHash, newDepth); + + if (subNode == newsubNode) { + // nothing changed + return this; + } else { + MutableNode mutable = toMutable(); + return mutable.updateWithSubNode(selectedHashFragment, newsubNode, value.equals(defaultValue)); + } + } else { + // add new key + value + MutableNode mutable = this.toMutable(); + return mutable.putValue(key, value, oldValue, hashProvider, defaultValue, hash, depth); + } + } + + @Override + public long getSize() { + int result = Integer.bitCount(this.dataMap); + for (int subnodeIndex = 0; subnodeIndex < Integer.bitCount(this.nodeMap); subnodeIndex++) { + @SuppressWarnings("unchecked") + var subnode = (ImmutableNode) this.content[this.content.length - 1 - subnodeIndex]; + result += subnode.getSize(); + } + return result; + } + + @Override + protected MutableNode toMutable() { + return new MutableNode<>(this); + } + + @Override + public ImmutableNode toImmutable(Map, ImmutableNode> cache) { + return this; + } + + @Override + protected MutableNode isMutable() { + return null; + } + + @SuppressWarnings("unchecked") + @Override + boolean moveToNext(MapCursor cursor) { + // 1. try to move to data + int datas = Integer.bitCount(this.dataMap); + if (cursor.dataIndex != MapCursor.INDEX_FINISH) { + int newDataIndex = cursor.dataIndex + 1; + if (newDataIndex < datas) { + cursor.dataIndex = newDataIndex; + cursor.key = (K) this.content[newDataIndex * 2]; + cursor.value = (V) this.content[newDataIndex * 2 + 1]; + return true; + } else { + cursor.dataIndex = MapCursor.INDEX_FINISH; + } + } + + // 2. look inside the subnodes + int nodes = Integer.bitCount(this.nodeMap); + int newNodeIndex = cursor.nodeIndexStack.peek() + 1; + if (newNodeIndex < nodes) { + // 2.1 found next subnode, move down to the subnode + Node subnode = (Node) this.content[this.content.length - 1 - newNodeIndex]; + cursor.dataIndex = MapCursor.INDEX_START; + cursor.nodeIndexStack.pop(); + cursor.nodeIndexStack.push(newNodeIndex); + cursor.nodeIndexStack.push(MapCursor.INDEX_START); + cursor.nodeStack.push(subnode); + return subnode.moveToNext(cursor); + } else { + // 3. no subnode found, move up + cursor.nodeStack.pop(); + cursor.nodeIndexStack.pop(); + if (!cursor.nodeStack.isEmpty()) { + Node supernode = cursor.nodeStack.peek(); + return supernode.moveToNext(cursor); + } else { + cursor.key = null; + cursor.value = null; + return false; + } + } + } + + @Override + public void prettyPrint(StringBuilder builder, int depth, int code) { + for (int i = 0; i < depth; i++) { + builder.append("\t"); + } + if (code >= 0) { + builder.append(code); + builder.append(":"); + } + builder.append("Immutable("); + boolean hadContent = false; + int dataMask = 1; + for (int i = 0; i < FACTOR; i++) { + if ((dataMask & dataMap) != 0) { + if (hadContent) { + builder.append(","); + } + builder.append(i); + builder.append(":["); + builder.append(content[2 * index(dataMap, dataMask)].toString()); + builder.append("]->["); + builder.append(content[2 * index(dataMap, dataMask) + 1].toString()); + builder.append("]"); + hadContent = true; + } + dataMask <<= 1; + } + builder.append(")"); + int nodeMask = 1; + for (int i = 0; i < FACTOR; i++) { + if ((nodeMask & nodeMap) != 0) { + @SuppressWarnings("unchecked") + Node subNode = (Node) content[content.length - 1 - index(nodeMap, nodeMask)]; + builder.append("\n"); + subNode.prettyPrint(builder, depth + 1, i); + } + nodeMask <<= 1; + } + } + + @Override + public void checkIntegrity(ContinousHashProvider hashProvider, V defaultValue, int depth) { + if (depth > 0) { + boolean orphaned = Integer.bitCount(dataMap) == 1 && nodeMap == 0; + if (orphaned) { + throw new IllegalStateException("Orphaned node! " + dataMap + ": " + content[0]); + } + } + // check the place of data + + // check subnodes + for (int i = 0; i < Integer.bitCount(nodeMap); i++) { + @SuppressWarnings("unchecked") + var subnode = (Node) this.content[this.content.length - 1 - i]; + if (!(subnode instanceof ImmutableNode)) { + throw new IllegalStateException("Immutable node contains mutable subnodes!"); + } else { + subnode.checkIntegrity(hashProvider, defaultValue, depth + 1); + } + } + } + + @Override + public int hashCode() { + return this.precalculatedHash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (obj instanceof ImmutableNode other) { + return precalculatedHash == other.precalculatedHash && dataMap == other.dataMap && nodeMap == other.nodeMap + && Arrays.deepEquals(content, other.content); + } else if (obj instanceof MutableNode mutableObj) { + return ImmutableNode.compareImmutableMutable(this, mutableObj); + } else { + return false; + } + } + + public static boolean compareImmutableMutable(ImmutableNode immutable, MutableNode mutable) { + int datas = 0; + int nodes = 0; + final int immutableLength = immutable.content.length; + for (int i = 0; i < FACTOR; i++) { + Object key = mutable.content[i * 2]; + // For each key candidate + if (key != null) { + // Check whether a new Key-Value pair can fit into the immutable container + if (datas * 2 + nodes + 2 <= immutableLength) { + if (!immutable.content[datas * 2].equals(key) + || !immutable.content[datas * 2 + 1].equals(mutable.content[i * 2 + 1])) { + return false; + } + } else + return false; + datas++; + } else { + var mutableSubnode = (Node) mutable.content[i * 2 + 1]; + if (mutableSubnode != null) { + if (datas * 2 + nodes + 1 <= immutableLength) { + Object immutableSubnode = immutable.content[immutableLength - 1 - nodes]; + if (!mutableSubnode.equals(immutableSubnode)) { + return false; + } + nodes++; + } else { + return false; + } + } + } + } + return true; + } +} diff --git a/store/src/main/java/tools/refinery/store/map/internal/MapCursor.java b/store/src/main/java/tools/refinery/store/map/internal/MapCursor.java new file mode 100644 index 00000000..b90f5b71 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/internal/MapCursor.java @@ -0,0 +1,131 @@ +package tools.refinery.store.map.internal; + +import java.util.ArrayDeque; +import java.util.ConcurrentModificationException; +import java.util.Iterator; +import java.util.List; + +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.VersionedMap; + +public class MapCursor implements Cursor { + // Constants + static final int INDEX_START = -1; + static final int INDEX_FINISH = -2; + + // Tree stack + ArrayDeque> nodeStack; + ArrayDeque nodeIndexStack; + int dataIndex; + + // Values + K key; + V value; + + // Hash code for checking concurrent modifications + final VersionedMap map; + final int creationHash; + + public MapCursor(Node root, VersionedMap map) { + // Initializing tree stack + super(); + this.nodeStack = new ArrayDeque<>(); + this.nodeIndexStack = new ArrayDeque<>(); + if(root != null) { + this.nodeStack.add(root); + this.nodeIndexStack.push(INDEX_START); + } + + this.dataIndex = INDEX_START; + + // Initializing cache + this.key = null; + this.value = null; + + // Initializing state + this.map=map; + this.creationHash = map.hashCode(); + } + + public K getKey() { + return key; + } + + public V getValue() { + return value; + } + + public boolean isTerminated() { + return this.nodeStack.isEmpty(); + } + + public boolean move() { + if(isDirty()) { + throw new ConcurrentModificationException(); + } + if(!isTerminated()) { + boolean result = this.nodeStack.peek().moveToNext(this); + if(this.nodeIndexStack.size() != this.nodeStack.size()) { + throw new IllegalArgumentException("Node stack is corrupted by illegal moves!"); + } + return result; + } + return false; + } + public boolean skipCurrentNode() { + nodeStack.pop(); + nodeIndexStack.pop(); + dataIndex = INDEX_FINISH; + return move(); + } + @Override + public boolean isDirty() { + return this.map.hashCode() != this.creationHash; + } + @Override + public List> getDependingMaps() { + return List.of(this.map); + } + + public static boolean sameSubnode(MapCursor cursor1, MapCursor cursor2) { + Node nodeOfCursor1 = cursor1.nodeStack.peek(); + Node nodeOfCursor2 = cursor2.nodeStack.peek(); + if(nodeOfCursor1 != null && nodeOfCursor2 != null) { + return nodeOfCursor1.equals(nodeOfCursor2); + } else { + return false; + } + } + + /** + * + * @param + * @param + * @param cursor1 + * @param cursor2 + * @return Positive number if cursor 1 is behind, negative number if cursor 2 is behind, and 0 if they are at the same position. + */ + public static int compare(MapCursor cursor1, MapCursor cursor2) { + // two cursors are equally deep + Iterator stack1 = cursor1.nodeIndexStack.descendingIterator(); + Iterator stack2 = cursor2.nodeIndexStack.descendingIterator(); + if(stack1.hasNext()) { + if(!stack2.hasNext()) { + // stack 2 has no more element, thus stack 1 is deeper + return 1; + } + int val1 = stack1.next(); + int val2 = stack2.next(); + if(val1 < val2) { + return -1; + } else if(val2 < val1) { + return 1; + } + } + if(stack2.hasNext()) { + // stack 2 has more element, thus stack 2 is deeper + return 1; + } + return Integer.compare(cursor1.dataIndex, cursor2.dataIndex); + } +} diff --git a/store/src/main/java/tools/refinery/store/map/internal/MapDiffCursor.java b/store/src/main/java/tools/refinery/store/map/internal/MapDiffCursor.java new file mode 100644 index 00000000..42333635 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/internal/MapDiffCursor.java @@ -0,0 +1,221 @@ +package tools.refinery.store.map.internal; + +import java.util.List; +import java.util.stream.Stream; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.map.VersionedMap; + +/** + * A cursor representing the difference between two states of a map. + * + * @author Oszkar Semerath + * + */ +public class MapDiffCursor implements DiffCursor, Cursor { + /** + * Default value representing missing elements. + */ + private V defaultValue; + private MapCursor cursor1; + private MapCursor cursor2; + private ContinousHashProvider hashProvider; + + // Values + private K key; + private V fromValue; + private V toValue; + + // State + /** + * Positive number if cursor 1 is behind, negative number if cursor 2 is behind, + * and 0 if they are at the same position. + */ + private int cursorRelation; + private HashClash hashClash = HashClash.NONE; + + public MapDiffCursor(ContinousHashProvider hashProvider, V defaultValue, Cursor cursor1, + Cursor cursor2) { + super(); + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + this.cursor1 = (MapCursor) cursor1; + this.cursor2 = (MapCursor) cursor2; + } + + @Override + public K getKey() { + return key; + } + + @Override + public V getFromValue() { + return fromValue; + } + + @Override + public V getToValue() { + return toValue; + } + + @Override + public V getValue() { + return getToValue(); + } + + public boolean isTerminated() { + return cursor1.isTerminated() && cursor2.isTerminated(); + } + + @Override + public boolean isDirty() { + return this.cursor1.isDirty() || this.cursor2.isDirty(); + } + + @Override + public List> getDependingMaps() { + return Stream.concat(cursor1.getDependingMaps().stream(), cursor2.getDependingMaps().stream()).toList(); + } + + protected void updateState() { + if (!isTerminated()) { + this.cursorRelation = MapCursor.compare(cursor1, cursor2); + if (cursorRelation > 0 || cursor2.isTerminated()) { + this.key = cursor1.getKey(); + this.fromValue = cursor1.getValue(); + this.toValue = defaultValue; + } else if (cursorRelation < 0 || cursor1.isTerminated()) { + this.key = cursor2.getKey(); + this.fromValue = defaultValue; + this.toValue = cursor1.getValue(); + } else { + // cursor1 = cursor2 + if (cursor1.getKey().equals(cursor2.getKey())) { + this.key = cursor1.getKey(); + this.fromValue = cursor1.getValue(); + this.toValue = defaultValue; + } else { + resolveHashClashWithFirstEntry(); + } + } + } + } + + protected void resolveHashClashWithFirstEntry() { + int compareResult = this.hashProvider.compare(cursor1.key, cursor2.key); + if (compareResult < 0) { + this.hashClash = HashClash.STUCK_CURSOR_2; + this.cursorRelation = 0; + this.key = cursor1.key; + this.fromValue = cursor1.value; + this.toValue = defaultValue; + } else if (compareResult > 0) { + this.hashClash = HashClash.STUCK_CURSOR_1; + this.cursorRelation = 0; + this.key = cursor2.key; + this.fromValue = defaultValue; + this.toValue = cursor2.value; + } else { + throw new IllegalArgumentException("Inconsistent compare result for diffcursor"); + } + } + + protected boolean isInHashClash() { + return this.hashClash != HashClash.NONE; + } + + protected void resolveHashClashWithSecondEntry() { + switch (this.hashClash) { + case STUCK_CURSOR_1: + this.hashClash = HashClash.NONE; + this.cursorRelation = 0; + this.key = cursor1.key; + this.fromValue = cursor1.value; + this.toValue = defaultValue; + break; + case STUCK_CURSOR_2: + this.hashClash = HashClash.NONE; + this.cursorRelation = 0; + this.key = cursor2.key; + this.fromValue = defaultValue; + this.toValue = cursor2.value; + break; + default: + throw new IllegalArgumentException("Inconsistent compare result for diffcursor"); + } + } + + protected boolean sameValues() { + if (this.fromValue == null) { + return this.toValue == null; + } else { + return this.fromValue.equals(this.toValue); + } + } + + protected boolean moveOne() { + if (isTerminated()) { + return false; + } + if (this.cursorRelation > 0 || cursor2.isTerminated()) { + return cursor1.move(); + } else if (this.cursorRelation < 0 || cursor1.isTerminated()) { + return cursor2.move(); + } else { + boolean moved1 = cursor1.move(); + boolean moved2 = cursor2.move(); + return moved1 && moved2; + } + } + + private boolean skipNode() { + if (isTerminated()) { + throw new IllegalStateException("DiffCursor tries to skip when terminated!"); + } + boolean update1 = cursor1.skipCurrentNode(); + boolean update2 = cursor2.skipCurrentNode(); + updateState(); + return update1 && update2; + } + + protected boolean moveToConsistentState() { + if (!isTerminated()) { + boolean changed; + boolean lastResult = true; + do { + changed = false; + if (MapCursor.sameSubnode(cursor1, cursor2)) { + lastResult = skipNode(); + changed = true; + } + if (sameValues()) { + lastResult = moveOne(); + changed = true; + } + updateState(); + } while (changed && !isTerminated()); + return lastResult; + } else { + return false; + } + } + + public boolean move() { + if (!isTerminated()) { + if (isInHashClash()) { + this.resolveHashClashWithSecondEntry(); + return true; + } else { + if (moveOne()) { + return moveToConsistentState(); + } else { + return false; + } + } + + } else + return false; + } +} diff --git a/store/src/main/java/tools/refinery/store/map/internal/MutableNode.java b/store/src/main/java/tools/refinery/store/map/internal/MutableNode.java new file mode 100644 index 00000000..7e94758c --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/internal/MutableNode.java @@ -0,0 +1,456 @@ +package tools.refinery.store.map.internal; + +import java.util.Arrays; +import java.util.Map; + +import tools.refinery.store.map.ContinousHashProvider; + +public class MutableNode extends Node { + int cachedHash; + protected Object[] content; + + protected MutableNode() { + this.content = new Object[2 * FACTOR]; + updateHash(); + } + + public static MutableNode initialize(K key, V value, ContinousHashProvider hashProvider, + V defaultValue) { + if (value == defaultValue) { + return null; + } else { + int hash = hashProvider.getHash(key, 0); + int fragment = hashFragment(hash, 0); + MutableNode res = new MutableNode<>(); + res.content[2 * fragment] = key; + res.content[2 * fragment + 1] = value; + res.updateHash(); + return res; + } + } + + /** + * Constructs a {@link MutableNode} as a copy of an {@link ImmutableNode} + * + * @param node + */ + protected MutableNode(ImmutableNode node) { + this.content = new Object[2 * FACTOR]; + int dataUsed = 0; + int nodeUsed = 0; + for (int i = 0; i < FACTOR; i++) { + int bitposition = 1 << i; + if ((node.dataMap & bitposition) != 0) { + content[2 * i] = node.content[dataUsed * 2]; + content[2 * i + 1] = node.content[dataUsed * 2 + 1]; + dataUsed++; + } else if ((node.nodeMap & bitposition) != 0) { + content[2 * i + 1] = node.content[node.content.length - 1 - nodeUsed]; + nodeUsed++; + } + } + this.cachedHash = node.hashCode(); + } + + @Override + public V getValue(K key, ContinousHashProvider hashProvider, V defaultValue, int hash, int depth) { + int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); + @SuppressWarnings("unchecked") + K keyCandidate = (K) this.content[2 * selectedHashFragment]; + if (keyCandidate != null) { + if (keyCandidate.equals(key)) { + @SuppressWarnings("unchecked") + V value = (V) this.content[2 * selectedHashFragment + 1]; + return value; + } else { + return defaultValue; + } + } else { + @SuppressWarnings("unchecked") + var nodeCandidate = (Node) content[2 * selectedHashFragment + 1]; + if (nodeCandidate != null) { + int newDepth = depth + 1; + int newHash = newHash(hashProvider, key, hash, newDepth); + return nodeCandidate.getValue(key, hashProvider, defaultValue, newHash, newDepth); + } else { + return defaultValue; + } + } + } + + @Override + public Node putValue(K key, V value, OldValueBox oldValue, ContinousHashProvider hashProvider, + V defaultValue, int hash, int depth) { + int selectedHashFragment = hashFragment(hash, shiftDepth(depth)); + @SuppressWarnings("unchecked") + K keyCandidate = (K) content[2 * selectedHashFragment]; + if (keyCandidate != null) { + // If has key + if (keyCandidate.equals(key)) { + // The key is equals to an existing key -> update entry + if (value == defaultValue) { + return removeEntry(selectedHashFragment, oldValue); + } else { + return updateValue(value, oldValue, selectedHashFragment); + } + } else { + // The key is not equivalent to an existing key on the same hash bin + // -> split entry if it is necessary + if (value == defaultValue) { + // Value is default -> do not need to add new node + oldValue.setOldValue(defaultValue); + return this; + } else { + // Value is not default -> Split entry data to a new node + oldValue.setOldValue(defaultValue); + return moveDownAndSplit(hashProvider, key, value, keyCandidate, hash, depth, selectedHashFragment); + } + } + } else { + // If it does not have key, check for value + @SuppressWarnings("unchecked") + var nodeCandidate = (Node) content[2 * selectedHashFragment + 1]; + if (nodeCandidate != null) { + // If it has value, it is a subnode -> upate that + var newNode = nodeCandidate.putValue(key, value, oldValue, hashProvider, defaultValue, + newHash(hashProvider, key, hash, depth + 1), depth + 1); + return updateWithSubNode(selectedHashFragment, newNode, value.equals(defaultValue)); + } else { + // If it does not have value, put it in the empty place + if (value == defaultValue) { + // dont need to add new key-value pair + oldValue.setOldValue(defaultValue); + return this; + } else { + return addEntry(key, value, oldValue, selectedHashFragment); + } + + } + } + } + + private Node addEntry(K key, V value, OldValueBox oldValueBox, int selectedHashFragment) { + content[2 * selectedHashFragment] = key; + @SuppressWarnings("unchecked") + V oldValue = (V) content[2 * selectedHashFragment + 1]; + oldValueBox.setOldValue(oldValue); + content[2 * selectedHashFragment + 1] = value; + updateHash(); + return this; + } + + /** + * Updates an entry in a selected hash-fragment to a non-default value. + * + * @param value + * @param selectedHashFragment + * @return + */ + @SuppressWarnings("unchecked") + Node updateValue(V value, OldValueBox oldValue, int selectedHashFragment) { + oldValue.setOldValue((V) content[2 * selectedHashFragment + 1]); + content[2 * selectedHashFragment + 1] = value; + updateHash(); + return this; + } + + /** + * + * @param selectedHashFragment + * @param newNode + * @return + */ + Node updateWithSubNode(int selectedHashFragment, Node newNode, boolean deletionHappened) { + if (deletionHappened) { + if (newNode == null) { + // Check whether this node become empty + content[2 * selectedHashFragment + 1] = null; // i.e. the new node + if (hasContent()) { + updateHash(); + return this; + } else { + return null; + } + } else { + // check whether newNode is orphan + MutableNode immutableNewNode = newNode.isMutable(); + if (immutableNewNode != null) { + int orphaned = immutableNewNode.isOrphaned(); + if (orphaned >= 0) { + // orphan subnode data is replaced with data + content[2 * selectedHashFragment] = immutableNewNode.content[orphaned * 2]; + content[2 * selectedHashFragment + 1] = immutableNewNode.content[orphaned * 2 + 1]; + updateHash(); + return this; + } + } + } + } + // normal behaviour + content[2 * selectedHashFragment + 1] = newNode; + updateHash(); + return this; + + } + + private boolean hasContent() { + for (Object element : this.content) { + if (element != null) + return true; + } + return false; + } + + @Override + protected MutableNode isMutable() { + return this; + } + + protected int isOrphaned() { + int dataFound = -2; + for (int i = 0; i < FACTOR; i++) { + if (content[i * 2] != null) { + if (dataFound >= 0) { + return -1; + } else { + dataFound = i; + } + } else if (content[i * 2 + 1] != null) { + return -3; + } + } + return dataFound; + } + + @SuppressWarnings("unchecked") + private Node moveDownAndSplit(ContinousHashProvider hashProvider, K newKey, V newValue, + K previousKey, int hashOfNewKey, int depth, int selectedHashFragmentOfCurrentDepth) { + V previousValue = (V) content[2 * selectedHashFragmentOfCurrentDepth + 1]; + + MutableNode newSubNode = newNodeWithTwoEntries(hashProvider, previousKey, previousValue, + hashProvider.getHash(previousKey, hashDepth(depth)), newKey, newValue, hashOfNewKey, depth + 1); + + content[2 * selectedHashFragmentOfCurrentDepth] = null; + content[2 * selectedHashFragmentOfCurrentDepth + 1] = newSubNode; + updateHash(); + return this; + } + + // Pass everything as parameters for performance. + @SuppressWarnings("squid:S107") + private MutableNode newNodeWithTwoEntries(ContinousHashProvider hashProvider, K key1, V value1, + int oldHash1, K key2, V value2, int oldHash2, int newdepth) { + int newHash1 = newHash(hashProvider, key1, oldHash1, newdepth); + int newHash2 = newHash(hashProvider, key2, oldHash2, newdepth); + int newFragment1 = hashFragment(newHash1, shiftDepth(newdepth)); + int newFragment2 = hashFragment(newHash2, shiftDepth(newdepth)); + + MutableNode subNode = new MutableNode<>(); + if (newFragment1 != newFragment2) { + subNode.content[newFragment1 * 2] = key1; + subNode.content[newFragment1 * 2 + 1] = value1; + + subNode.content[newFragment2 * 2] = key2; + subNode.content[newFragment2 * 2 + 1] = value2; + } else { + MutableNode subSubNode = newNodeWithTwoEntries(hashProvider, key1, value1, newHash1, key2, value2, + newHash2, newdepth + 1); + subNode.content[newFragment1 * 2 + 1] = subSubNode; + } + subNode.updateHash(); + return subNode; + } + + @SuppressWarnings("unchecked") + Node removeEntry(int selectedHashFragment, OldValueBox oldValue) { + content[2 * selectedHashFragment] = null; + oldValue.setOldValue((V) content[2 * selectedHashFragment + 1]); + content[2 * selectedHashFragment + 1] = null; + if (hasContent()) { + updateHash(); + return this; + } else { + return null; + } + } + + @SuppressWarnings("unchecked") + @Override + public long getSize() { + int size = 0; + for (int i = 0; i < FACTOR; i++) { + if (content[i * 2] != null) { + size++; + } else { + Node nodeCandidate = (Node) content[i * 2 + 1]; + if (nodeCandidate != null) { + size += nodeCandidate.getSize(); + } + } + } + return size; + } + + @Override + protected MutableNode toMutable() { + return this; + } + + @Override + public ImmutableNode toImmutable(Map, ImmutableNode> cache) { + return ImmutableNode.constructImmutable(this, cache); + } + + @SuppressWarnings("unchecked") + @Override + boolean moveToNext(MapCursor cursor) { + // 1. try to move to data + if (cursor.dataIndex != MapCursor.INDEX_FINISH) { + for (int index = cursor.dataIndex + 1; index < FACTOR; index++) { + if (this.content[index * 2] != null) { + // 1.1 found next data + cursor.dataIndex = index; + cursor.key = (K) this.content[index * 2]; + cursor.value = (V) this.content[index * 2 + 1]; + return true; + } + } + cursor.dataIndex = MapCursor.INDEX_FINISH; + } + + // 2. look inside the subnodes + for (int index = cursor.nodeIndexStack.peek() + 1; index < FACTOR; index++) { + if (this.content[index * 2] == null && this.content[index * 2 + 1] != null) { + // 2.1 found next subnode, move down to the subnode + Node subnode = (Node) this.content[index * 2 + 1]; + + cursor.dataIndex = MapCursor.INDEX_START; + cursor.nodeIndexStack.pop(); + cursor.nodeIndexStack.push(index); + cursor.nodeIndexStack.push(MapCursor.INDEX_START); + cursor.nodeStack.push(subnode); + + return subnode.moveToNext(cursor); + } + } + // 3. no subnode found, move up + cursor.nodeStack.pop(); + cursor.nodeIndexStack.pop(); + if (!cursor.nodeStack.isEmpty()) { + Node supernode = cursor.nodeStack.peek(); + return supernode.moveToNext(cursor); + } else { + cursor.key = null; + cursor.value = null; + return false; + } + } + + @Override + public void prettyPrint(StringBuilder builder, int depth, int code) { + for (int i = 0; i < depth; i++) { + builder.append("\t"); + } + if (code >= 0) { + builder.append(code); + builder.append(":"); + } + builder.append("Mutable("); + // print content + boolean hadContent = false; + for (int i = 0; i < FACTOR; i++) { + if (content[2 * i] != null) { + if (hadContent) { + builder.append(","); + } + builder.append(i); + builder.append(":["); + builder.append(content[2 * i].toString()); + builder.append("]->["); + builder.append(content[2 * i + 1].toString()); + builder.append("]"); + hadContent = true; + } + } + builder.append(")"); + // print subnodes + for (int i = 0; i < FACTOR; i++) { + if (content[2 * i] == null && content[2 * i + 1] != null) { + @SuppressWarnings("unchecked") + Node subNode = (Node) content[2 * i + 1]; + builder.append("\n"); + subNode.prettyPrint(builder, depth + 1, i); + } + } + } + + @Override + public void checkIntegrity(ContinousHashProvider hashProvider, V defaultValue, int depth) { + // check for orphan nodes + if (depth > 0) { + int orphaned = isOrphaned(); + if (orphaned >= 0) { + throw new IllegalStateException("Orphaned node! " + orphaned + ": " + content[2 * orphaned]); + } + } + // check the place of data + for (int i = 0; i < FACTOR; i++) { + if (this.content[2 * i] != null) { + @SuppressWarnings("unchecked") + K key = (K) this.content[2 * i]; + @SuppressWarnings("unchecked") + V value = (V) this.content[2 * i + 1]; + + if (value == defaultValue) { + throw new IllegalStateException("Node contains default value!"); + } + int hashCode = hashProvider.getHash(key, hashDepth(depth)); + int shiftDepth = shiftDepth(depth); + int selectedHashFragment = hashFragment(hashCode, shiftDepth); + if (i != selectedHashFragment) { + throw new IllegalStateException("Key " + key + " with hash code " + hashCode + + " is in bad place! Fragment=" + selectedHashFragment + ", Place=" + i); + } + } + } + // check subnodes + for (int i = 0; i < FACTOR; i++) { + if (this.content[2 * i + 1] != null && this.content[2 * i] == null) { + @SuppressWarnings("unchecked") + var subNode = (Node) this.content[2 * i + 1]; + subNode.checkIntegrity(hashProvider, defaultValue, depth + 1); + } + } + // check the hash + int oldHash = this.cachedHash; + updateHash(); + int newHash = this.cachedHash; + if (oldHash != newHash) { + throw new IllegalStateException("Hash code was not up to date! (old=" + oldHash + ",new=" + newHash + ")"); + } + } + + protected void updateHash() { + this.cachedHash = Arrays.hashCode(content); + } + + @Override + public int hashCode() { + return this.cachedHash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (obj instanceof MutableNode mutableObj) { + return Arrays.deepEquals(this.content, mutableObj.content); + } else if (obj instanceof ImmutableNode immutableObj) { + return ImmutableNode.compareImmutableMutable(immutableObj, this); + } else { + return false; + } + } +} diff --git a/store/src/main/java/tools/refinery/store/map/internal/Node.java b/store/src/main/java/tools/refinery/store/map/internal/Node.java new file mode 100644 index 00000000..234a4ff3 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/internal/Node.java @@ -0,0 +1,85 @@ +package tools.refinery.store.map.internal; + +import java.util.Map; + +import tools.refinery.store.map.ContinousHashProvider; + +public abstract class Node{ + public static final int BRANCHING_FACTOR_BITS = 5; + public static final int FACTOR = 1< hashProvider, V defaultValue, int hash, int depth); + public abstract Node putValue(K key, V value, OldValueBox old, ContinousHashProvider hashProvider, V defaultValue, int hash, int depth); + public abstract long getSize(); + + abstract MutableNode toMutable(); + public abstract ImmutableNode toImmutable( + Map,ImmutableNode> cache); + protected abstract MutableNode isMutable(); + /** + * Moves a {@link MapCursor} to its next position. + * @param cursor the cursor + * @return Whether there was a next value to move on. + */ + abstract boolean moveToNext(MapCursor cursor); + + ///////// FOR printing + public abstract void prettyPrint(StringBuilder builder, int depth, int code); + @Override + public String toString() { + StringBuilder stringBuilder = new StringBuilder(); + prettyPrint(stringBuilder, 0, -1); + return stringBuilder.toString(); + } + public void checkIntegrity(ContinousHashProvider hashProvider, V defaultValue, int depth) {} + +} diff --git a/store/src/main/java/tools/refinery/store/map/internal/OldValueBox.java b/store/src/main/java/tools/refinery/store/map/internal/OldValueBox.java new file mode 100644 index 00000000..5534c703 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/internal/OldValueBox.java @@ -0,0 +1,19 @@ +package tools.refinery.store.map.internal; + +public class OldValueBox{ + V oldValue; + boolean isSet = false; + + public V getOldValue() { + if(!isSet) throw new IllegalStateException(); + isSet = false; + return oldValue; + } + + public void setOldValue(V ouldValue) { + if(isSet) throw new IllegalStateException(); + this.oldValue = ouldValue; + isSet = true; + } + +} diff --git a/store/src/main/java/tools/refinery/store/map/internal/VersionedMapImpl.java b/store/src/main/java/tools/refinery/store/map/internal/VersionedMapImpl.java new file mode 100644 index 00000000..346fe596 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/map/internal/VersionedMapImpl.java @@ -0,0 +1,171 @@ +package tools.refinery.store.map.internal; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.map.VersionedMap; +import tools.refinery.store.map.VersionedMapStoreImpl; + +/** + * Not threadSafe in itself + * @author Oszkar Semerath + * + * @param + * @param + */ +public class VersionedMapImpl implements VersionedMap{ + protected final VersionedMapStoreImpl store; + + protected final ContinousHashProvider hashProvider; + protected final V defaultValue; + protected Node root; + + private OldValueBox oldValueBox = new OldValueBox<>(); + + public VersionedMapImpl( + VersionedMapStoreImpl store, + ContinousHashProvider hashProvider, + V defaultValue) + { + this.store = store; + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + this.root = null; + } + public VersionedMapImpl( + VersionedMapStoreImpl store, + ContinousHashProvider hashProvider, + V defaultValue, Node data) + { + this.store = store; + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + this.root = data; + } + + public V getDefaultValue() { + return defaultValue; + } + public ContinousHashProvider getHashProvider() { + return hashProvider; + } + @Override + public V put(K key, V value) { + if(root!=null) { + root = root.putValue(key, value, oldValueBox, hashProvider, defaultValue, hashProvider.getHash(key, 0), 0); + return oldValueBox.getOldValue(); + } else { + root = MutableNode.initialize(key, value, hashProvider, defaultValue); + return defaultValue; + } + } + + @Override + public void putAll(Cursor cursor) { + if(cursor.getDependingMaps().contains(this)) { + List keys = new LinkedList<>(); + List values = new LinkedList<>(); + while(cursor.move()) { + keys.add(cursor.getKey()); + values.add(cursor.getValue()); + } + Iterator keyIterator = keys.iterator(); + Iterator valueIterator = values.iterator(); + while(keyIterator.hasNext()) { + this.put(keyIterator.next(), valueIterator.next()); + } + } else { + while(cursor.move()) { + this.put(cursor.getKey(), cursor.getValue()); + } + } + } + + @Override + public V get(K key) { + if(root!=null) { + return root.getValue(key, hashProvider, defaultValue, hashProvider.getHash(key, 0), 0); + } else { + return defaultValue; + } + } + @Override + public long getSize() { + if(root == null) { + return 0; + } else { + return root.getSize(); + } + } + + @Override + public Cursor getAll() { + return new MapCursor<>(this.root,this); + } + @Override + public DiffCursor getDiffCursor(long toVersion) { + Cursor fromCursor = this.getAll(); + VersionedMap toMap = this.store.createMap(toVersion); + Cursor toCursor = toMap.getAll(); + return new MapDiffCursor<>(this.hashProvider,this.defaultValue, fromCursor, toCursor); + + } + + + @Override + public long commit() { + return this.store.commit(root,this); + } + public void setRoot(Node root) { + this.root = root; + } + + @Override + public void restore(long state) { + root = this.store.revert(state); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((root == null) ? 0 : root.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + VersionedMapImpl other = (VersionedMapImpl) obj; + if (root == null) { + if (other.root != null) + return false; + } else if (!root.equals(other.root)) + return false; + return true; + } + public void prettyPrint() { + StringBuilder s = new StringBuilder(); + if(this.root != null) { + this.root.prettyPrint(s, 0, -1); + System.out.println(s.toString()); + } else { + System.out.println("empty tree"); + } + } + public void checkIntegrity() { + if(this.root != null) { + this.root.checkIntegrity(hashProvider, defaultValue, 0); + } + } + +} diff --git a/store/src/main/java/tools/refinery/store/model/Model.java b/store/src/main/java/tools/refinery/store/model/Model.java new file mode 100644 index 00000000..a42d711a --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/Model.java @@ -0,0 +1,20 @@ +package tools.refinery.store.model; + +import java.util.Set; + +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.Versioned; +import tools.refinery.store.model.representation.DataRepresentation; + +public interface Model extends Versioned{ + @SuppressWarnings("squid:S1452") + Set> getDataRepresentations(); + + V get(DataRepresentation representation, K key); + Cursor getAll(DataRepresentation representation); + V put(DataRepresentation representation, K key, V value); + void putAll(DataRepresentation representation, Cursor cursor); + long getSize(DataRepresentation representation); + + ModelDiffCursor getDiffCursor(long to); +} diff --git a/store/src/main/java/tools/refinery/store/model/ModelCursor.java b/store/src/main/java/tools/refinery/store/model/ModelCursor.java new file mode 100644 index 00000000..a835cf69 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/ModelCursor.java @@ -0,0 +1,25 @@ +package tools.refinery.store.model; + +import java.util.Map; + +import tools.refinery.store.map.Cursor; +import tools.refinery.store.model.representation.DataRepresentation; + +public class ModelCursor { + final Map,Cursor> cursors; + + public ModelCursor(Map, Cursor> cursors) { + super(); + this.cursors = cursors; + } + + @SuppressWarnings("unchecked") + public Cursor getCursor(DataRepresentation representation) { + Cursor cursor = cursors.get(representation); + if(cursor != null) { + return (Cursor) cursor; + } else { + throw new IllegalArgumentException("ModelCursor does not contain cursor for representation "+representation); + } + } +} diff --git a/store/src/main/java/tools/refinery/store/model/ModelDiffCursor.java b/store/src/main/java/tools/refinery/store/model/ModelDiffCursor.java new file mode 100644 index 00000000..91990fa6 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/ModelDiffCursor.java @@ -0,0 +1,26 @@ +package tools.refinery.store.model; + +import java.util.Map; + +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.model.representation.DataRepresentation; + +public class ModelDiffCursor { + final Map,DiffCursor> diffcursors; + + public ModelDiffCursor(Map, DiffCursor> diffcursors) { + super(); + this.diffcursors = diffcursors; + } + + @SuppressWarnings("unchecked") + public DiffCursor getCursor(DataRepresentation representation) { + Cursor cursor = diffcursors.get(representation); + if(cursor != null) { + return (DiffCursor) cursor; + } else { + throw new IllegalArgumentException("ModelCursor does not contain cursor for representation "+representation); + } + } +} diff --git a/store/src/main/java/tools/refinery/store/model/ModelStore.java b/store/src/main/java/tools/refinery/store/model/ModelStore.java new file mode 100644 index 00000000..682a0e78 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/ModelStore.java @@ -0,0 +1,16 @@ +package tools.refinery.store.model; + +import java.util.Set; + +import tools.refinery.store.model.representation.DataRepresentation; + +public interface ModelStore { + @SuppressWarnings("squid:S1452") + Set> getDataRepresentations(); + + Model createModel(); + Model createModel(long state); + + Set getStates(); + ModelDiffCursor getDiffCursor(long from, long to); +} \ No newline at end of file diff --git a/store/src/main/java/tools/refinery/store/model/ModelStoreImpl.java b/store/src/main/java/tools/refinery/store/model/ModelStoreImpl.java new file mode 100644 index 00000000..97406cbb --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/ModelStoreImpl.java @@ -0,0 +1,122 @@ +package tools.refinery.store.model; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.map.VersionedMap; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.model.internal.ModelImpl; +import tools.refinery.store.model.internal.SimilarRelationEquivalenceClass; +import tools.refinery.store.model.representation.AuxilaryData; +import tools.refinery.store.model.representation.DataRepresentation; +import tools.refinery.store.model.representation.Relation; + +import java.util.Set; + +public class ModelStoreImpl implements ModelStore { + + private final Map, VersionedMapStore> stores; + + public ModelStoreImpl(Set> dataRepresentations) { + stores = initStores(dataRepresentations); + } + + private Map, VersionedMapStore> initStores( + Set> dataRepresentations) { + Map, VersionedMapStore> result = new HashMap<>(); + + Map>> symbolRepresentationsPerHashPerArity = new HashMap<>(); + + for (DataRepresentation dataRepresentation : dataRepresentations) { + if (dataRepresentation instanceof Relation symbolRepresentation) { + addOrCreate(symbolRepresentationsPerHashPerArity, + new SimilarRelationEquivalenceClass(symbolRepresentation), symbolRepresentation); + } else if (dataRepresentation instanceof AuxilaryData) { + VersionedMapStoreImpl store = new VersionedMapStoreImpl<>(dataRepresentation.getHashProvider(), + dataRepresentation.getDefaultValue()); + result.put(dataRepresentation, store); + } else { + throw new UnsupportedOperationException( + "Model store does not have strategy to use " + dataRepresentation.getClass() + "!"); + } + } + for (List> symbolGroup : symbolRepresentationsPerHashPerArity.values()) { + initRepresentationGroup(result, symbolGroup); + } + + return result; + } + + private void initRepresentationGroup(Map, VersionedMapStore> result, + List> symbolGroup) { + final ContinousHashProvider hashProvider = symbolGroup.get(0).getHashProvider(); + final Object defaultValue = symbolGroup.get(0).getDefaultValue(); + + List> maps = VersionedMapStoreImpl + .createSharedVersionedMapStores(symbolGroup.size(), hashProvider, defaultValue); + + for (int i = 0; i < symbolGroup.size(); i++) { + result.put(symbolGroup.get(i), maps.get(i)); + } + } + + private static void addOrCreate(Map> map, K key, V value) { + List list; + if (map.containsKey(key)) { + list = map.get(key); + } else { + list = new LinkedList<>(); + map.put(key, list); + } + list.add(value); + } + + @Override + public Set> getDataRepresentations() { + return this.stores.keySet(); + } + + @Override + public ModelImpl createModel() { + Map, VersionedMap> maps = new HashMap<>(); + for (Entry, VersionedMapStore> entry : this.stores.entrySet()) { + maps.put(entry.getKey(), entry.getValue().createMap()); + } + return new ModelImpl(this, maps); + } + + @Override + public synchronized ModelImpl createModel(long state) { + Map, VersionedMap> maps = new HashMap<>(); + for (Entry, VersionedMapStore> entry : this.stores.entrySet()) { + maps.put(entry.getKey(), entry.getValue().createMap(state)); + } + return new ModelImpl(this, maps); + } + + @Override + public synchronized Set getStates() { + var iterator = stores.values().iterator(); + if (iterator.hasNext()) { + return Set.copyOf(iterator.next().getStates()); + } + return Set.of(0l); + } + + @Override + public synchronized ModelDiffCursor getDiffCursor(long from, long to) { + Map, DiffCursor> diffcursors = new HashMap<>(); + for (Entry, VersionedMapStore> entry : stores.entrySet()) { + DataRepresentation representation = entry.getKey(); + DiffCursor diffCursor = entry.getValue().getDiffCursor(from, to); + diffcursors.put(representation, diffCursor); + } + return new ModelDiffCursor(diffcursors); + } +} diff --git a/store/src/main/java/tools/refinery/store/model/Tuple.java b/store/src/main/java/tools/refinery/store/model/Tuple.java new file mode 100644 index 00000000..0aae3727 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/Tuple.java @@ -0,0 +1,148 @@ +package tools.refinery.store.model; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public abstract class Tuple { + private static final int CUSTOMTUPLESIZE = 2; + protected static final List tuple1Cash = new ArrayList<>(1024); + + public abstract int getSize(); + public abstract int get(int element); + public abstract int[] toArray(); + + @Override + public String toString() { + StringBuilder b = new StringBuilder(); + b.append("["); + for(int i = 0; i= tuple1Cash.size()) { + newlyCreated = new Tuple1(tuple1Cash.size()); + tuple1Cash.add(newlyCreated); + } + return newlyCreated; + } + } + + public static Tuple of(int... values) { + if(values.length == 0) { + return new Tuple0(); + } else if(values.length == 1) { + return of1(values[0]); + } else if(values.length == 2) { + return new Tuple2(values[0],values[1]); + } else return new TupleN(values); + } + + protected IllegalArgumentException doesNotContain(int element) { + return new IllegalArgumentException("Tuple does not contain element "+element); + } + + public static class Tuple0 extends Tuple{ + protected Tuple0() { } + @Override public int getSize() { return 0; } + @Override public int get(int element) { + throw doesNotContain(element); + } + @Override public int[] toArray() {return new int[]{};} + @Override public int hashCode() { return TupleHashProvider.singleton().getHash(this, 0); } + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + return true; + } + } + public static class Tuple1 extends Tuple{ + final int value0; + protected Tuple1(int value0) { this.value0 = value0; } + @Override public int getSize() { return 1; } + @Override public int get(int element) { + if(element == 0) return value0; + throw doesNotContain(element); + } + @Override public int[] toArray() {return new int[]{ value0 };} + @Override public int hashCode() { return TupleHashProvider.singleton().getHash(this, 0); } + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Tuple1 other = (Tuple1) obj; + return value0 == other.value0; + } + } + public static class Tuple2 extends Tuple{ + final int value0; + final int value1; + protected Tuple2(int value0, int value1) { this.value0 = value0; this.value1 = value1; } + @Override public int getSize() { return 2; } + @Override public int get(int element) { + if(element == 0) return value0; + else if(element == 1) return value1; + throw doesNotContain(element); + } + @Override public int[] toArray() {return new int[]{ value0,value1 };} + @Override public int hashCode() { return TupleHashProvider.singleton().getHash(this, 0); } + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Tuple2 other = (Tuple2) obj; + return value0 == other.value0 && value1 == other.value1; + } + } + public static class TupleN extends Tuple{ + final int[] values; + protected TupleN(int[] values) { + if(values.length { + protected static TupleHashProvider instance; + + public static TupleHashProvider singleton() { + if (instance == null) { + instance = new TupleHashProvider(); + } + return instance; + } + + protected static final int[] primes = new int[] { 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, + 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, + 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, + 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, + 463, 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, + 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, 739, + 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, + 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, 1019, 1021, + 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, + 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, + 1289, 1291, 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, 1381, 1399, 1409, 1423, 1427, 1429, + 1433, 1439, 1447, 1451, 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, 1523, 1531, 1543, 1549, + 1553, 1559, 1567, 1571, 1579, 1583, 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, 1663, 1667, + 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, 1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, + 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, 1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, + 1979, 1987, 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, 2063, 2069, 2081, 2083, 2087, 2089, + 2099, 2111, 2113, 2129, 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, 2221, 2237, 2239, 2243, + 2251, 2267, 2269, 2273, 2281, 2287, 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, 2371, 2377, + 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, 2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, + 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, + 2683, 2687, 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, 2749, 2753, 2767, 2777, 2789, 2791, + 2797, 2801, 2803, 2819, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, 3187, 3191, 3203, 3209, 3217, + 3221, 3229, 3251, 3253, 3257, 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, 3343, 3347, 3359, + 3361, 3371, 3373, 3389, 3391, 3407, 3413, 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, 3517, + 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, + 3643, 3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727, 3733, 3739, 3761, 3767, 3769, 3779, 3793, + 3797, 3803, 3821, 3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, 3911 }; + + protected static final long LARGESTPRIME30BITS = 1073741789; + + public TupleHashProvider() { + if (primes.length < MAX_PRACTICAL_DEPTH) { + throw new UnsupportedOperationException( + "Not enough prime numbers to support the practical depth of continuous hash!"); + } + } + + @Override + public int getHash(Tuple key, int index) { + if (index >= primes.length) { + throw new IllegalArgumentException("Not enough prime numbers to support index"); + } + long accumulator = 0; + final int prime = primes[index]; + for (int i = 0; i < key.getSize(); i++) { + accumulator = (prime * accumulator + key.get(i)) % LARGESTPRIME30BITS; + } + + return (int) accumulator; + } +} diff --git a/store/src/main/java/tools/refinery/store/model/TupleHashProviderBitMagic.java b/store/src/main/java/tools/refinery/store/model/TupleHashProviderBitMagic.java new file mode 100644 index 00000000..5b053229 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/TupleHashProviderBitMagic.java @@ -0,0 +1,28 @@ +package tools.refinery.store.model; + +import tools.refinery.store.map.ContinousHashProvider; + +public class TupleHashProviderBitMagic implements ContinousHashProvider { + + @Override + public int getHash(Tuple key, int index) { + if(key.getSize() == 1) { + return key.get(0); + } + + int result = 0; + final int startBitIndex = index*30; + final int finalBitIndex = startBitIndex+30; + final int arity = key.getSize(); + + for(int i = startBitIndex; i<=finalBitIndex; i++) { + final int selectedKey = key.get(i%arity); + final int selectedPosition = 1<<(i/arity); + if((selectedKey&selectedPosition) != 0) { + result |= 1<<(i%30); + } + } + + return result; + } +} diff --git a/store/src/main/java/tools/refinery/store/model/internal/ModelImpl.java b/store/src/main/java/tools/refinery/store/model/internal/ModelImpl.java new file mode 100644 index 00000000..2a5f2925 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/internal/ModelImpl.java @@ -0,0 +1,124 @@ +package tools.refinery.store.model.internal; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.map.VersionedMap; +import tools.refinery.store.map.internal.MapDiffCursor; +import tools.refinery.store.model.Model; +import tools.refinery.store.model.ModelDiffCursor; +import tools.refinery.store.model.ModelStore; +import tools.refinery.store.model.representation.DataRepresentation; + +public class ModelImpl implements Model { + private final ModelStore store; + private final Map, VersionedMap> maps; + + public ModelImpl(ModelStore store, Map, VersionedMap> maps) { + this.store = store; + this.maps = maps; + } + + @Override + public Set> getDataRepresentations() { + return maps.keySet(); + } + + @SuppressWarnings("unchecked") + private VersionedMap getMap(DataRepresentation representation) { + if (maps.containsKey(representation)) { + return (VersionedMap) maps.get(representation); + } else { + throw new IllegalArgumentException("Model does have representation " + representation); + } + } + + private VersionedMap getMapValidateKey(DataRepresentation representation, K key) { + if (representation.isValidKey(key)) { + return getMap(representation); + } else { + throw new IllegalArgumentException( + "Key is not valid for representation! (representation=" + representation + ", key=" + key + ");"); + } + } + + @Override + public V get(DataRepresentation representation, K key) { + return getMapValidateKey(representation, key).get(key); + } + + @Override + public Cursor getAll(DataRepresentation representation) { + return getMap(representation).getAll(); + } + + @Override + public V put(DataRepresentation representation, K key, V value) { + return getMapValidateKey(representation, key).put(key, value); + } + + @Override + public void putAll(DataRepresentation representation, Cursor cursor) { + getMap(representation).putAll(cursor); + } + + @Override + public long getSize(DataRepresentation representation) { + return getMap(representation).getSize(); + } + + @Override + public ModelDiffCursor getDiffCursor(long to) { + Model toModel = store.createModel(to); + Map, DiffCursor> diffCursors = new HashMap<>(); + for (DataRepresentation representation : this.maps.keySet()) { + MapDiffCursor diffCursor = constructDiffCursor(toModel, representation); + diffCursors.put(representation, diffCursor); + } + return new ModelDiffCursor(diffCursors); + } + + private MapDiffCursor constructDiffCursor(Model toModel, DataRepresentation representation) { + @SuppressWarnings("unchecked") + Cursor fromCursor = (Cursor) this.maps.get(representation).getAll(); + Cursor toCursor = toModel.getAll(representation); + + ContinousHashProvider hashProvider = representation.getHashProvider(); + V defaultValue = representation.getDefaultValue(); + return new MapDiffCursor<>(hashProvider, defaultValue, fromCursor, toCursor); + } + + @Override + public long commit() { + long version = 0; + boolean versionSet = false; + for (VersionedMap map : maps.values()) { + long newVersion = map.commit(); + if (versionSet) { + if (version != newVersion) { + throw new IllegalStateException( + "Maps in model have different versions! (" + version + " and" + newVersion + ")"); + } + } else { + version = newVersion; + versionSet = true; + } + } + return version; + } + + @Override + public void restore(long state) { + if(store.getStates().contains(state)) { + for (VersionedMap map : maps.values()) { + map.restore(state); + } + } else { + throw new IllegalArgumentException("Map does not contain state "+state+"!"); + } + } +} diff --git a/store/src/main/java/tools/refinery/store/model/internal/SimilarRelationEquivalenceClass.java b/store/src/main/java/tools/refinery/store/model/internal/SimilarRelationEquivalenceClass.java new file mode 100644 index 00000000..9d1b1dd0 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/internal/SimilarRelationEquivalenceClass.java @@ -0,0 +1,33 @@ +package tools.refinery.store.model.internal; + +import java.util.Objects; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.representation.Relation; + +public class SimilarRelationEquivalenceClass { + final ContinousHashProvider hashProvider; + final Object defaultValue; + final int arity; + public SimilarRelationEquivalenceClass(Relation representation) { + this.hashProvider = representation.getHashProvider(); + this.defaultValue = representation.getDefaultValue(); + this.arity = representation.getArity(); + } + @Override + public int hashCode() { + return Objects.hash(arity, defaultValue, hashProvider); + } + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!(obj instanceof SimilarRelationEquivalenceClass)) + return false; + SimilarRelationEquivalenceClass other = (SimilarRelationEquivalenceClass) obj; + return arity == other.arity && Objects.equals(defaultValue, other.defaultValue) + && Objects.equals(hashProvider, other.hashProvider); + } + +} diff --git a/store/src/main/java/tools/refinery/store/model/representation/AuxilaryData.java b/store/src/main/java/tools/refinery/store/model/representation/AuxilaryData.java new file mode 100644 index 00000000..ddd8a5f2 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/representation/AuxilaryData.java @@ -0,0 +1,22 @@ +package tools.refinery.store.model.representation; + +import tools.refinery.store.map.ContinousHashProvider; + +public class AuxilaryData extends DataRepresentation { + private final String name; + + public AuxilaryData(String name, ContinousHashProvider hashProvider, V defaultValue) { + super(hashProvider, defaultValue); + this.name = name; + } + + @Override + public String getName() { + return name; + } + + @Override + public boolean isValidKey(K key) { + return true; + } +} diff --git a/store/src/main/java/tools/refinery/store/model/representation/DataRepresentation.java b/store/src/main/java/tools/refinery/store/model/representation/DataRepresentation.java new file mode 100644 index 00000000..585e7b88 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/representation/DataRepresentation.java @@ -0,0 +1,24 @@ +package tools.refinery.store.model.representation; + +import tools.refinery.store.map.ContinousHashProvider; + +public abstract class DataRepresentation { + protected final ContinousHashProvider hashProvider; + protected final V defaultValue; + + protected DataRepresentation(ContinousHashProvider hashProvider, V defaultValue) { + this.hashProvider = hashProvider; + this.defaultValue = defaultValue; + } + + public abstract String getName(); + + public ContinousHashProvider getHashProvider() { + return hashProvider; + } + public abstract boolean isValidKey(K key); + + public V getDefaultValue() { + return defaultValue; + } +} diff --git a/store/src/main/java/tools/refinery/store/model/representation/Relation.java b/store/src/main/java/tools/refinery/store/model/representation/Relation.java new file mode 100644 index 00000000..fc2a3185 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/representation/Relation.java @@ -0,0 +1,31 @@ +package tools.refinery.store.model.representation; + +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.TupleHashProvider; + +public class Relation extends DataRepresentation { + private final String name; + private final int arity; + + public Relation(String name, int arity, D defaultValue) { + super(TupleHashProvider.singleton(), defaultValue); + this.name = name; + this.arity = arity; + } + + @Override + public String getName() { + return name; + } + + public int getArity() { + return arity; + } + + @Override + public boolean isValidKey(Tuple key) { + if(key == null) { + return false; + } else return key.getSize() == getArity(); + } +} diff --git a/store/src/main/java/tools/refinery/store/model/representation/TruthValue.java b/store/src/main/java/tools/refinery/store/model/representation/TruthValue.java new file mode 100644 index 00000000..610713f3 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/model/representation/TruthValue.java @@ -0,0 +1,51 @@ +package tools.refinery.store.model.representation; + +public enum TruthValue { + TRUE("true"), + + FALSE("false"), + + UNKNOWN("unknown"), + + ERROR("error"); + + private final String name; + + private TruthValue(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public static TruthValue toTruthValue(boolean value) { + return value ? TRUE : FALSE; + } + + public boolean isConsistent() { + return this != ERROR; + } + + public boolean isComplete() { + return this != UNKNOWN; + } + + public boolean must() { + return this == TRUE || this == ERROR; + } + + public boolean may() { + return this == TRUE || this == UNKNOWN; + } + + public TruthValue not() { + if (this == TRUE) { + return FALSE; + } else if (this == FALSE) { + return TRUE; + } else { + return this; + } + } +} diff --git a/store/src/main/java/tools/refinery/store/query/RelationalScope.java b/store/src/main/java/tools/refinery/store/query/RelationalScope.java new file mode 100644 index 00000000..5fe8083a --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/RelationalScope.java @@ -0,0 +1,35 @@ +package tools.refinery.store.query; + +import java.util.Set; + +import org.apache.log4j.Logger; +import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine; +import org.eclipse.viatra.query.runtime.api.scope.IEngineContext; +import org.eclipse.viatra.query.runtime.api.scope.IIndexingErrorListener; +import org.eclipse.viatra.query.runtime.api.scope.QueryScope; + +import tools.refinery.store.model.Model; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.query.internal.RelationUpdateListener; +import tools.refinery.store.query.internal.RelationalEngineContext; +import tools.refinery.store.query.view.RelationView; + +public class RelationalScope extends QueryScope{ + private final Model model; + private final RelationUpdateListener updateListener; + + public RelationalScope(Model model, Set> relationViews) { + this.model = model; + updateListener = new RelationUpdateListener(relationViews); + } + + public void processUpdate(RelationView relationView, Tuple key, D oldValue, D newValue) { + updateListener.processChange(relationView, key, oldValue, newValue); + } + + @Override + protected IEngineContext createEngineContext(ViatraQueryEngine engine, IIndexingErrorListener errorListener, + Logger logger) { + return new RelationalEngineContext(model, updateListener); + } +} diff --git a/store/src/main/java/tools/refinery/store/query/building/DNFAnd.java b/store/src/main/java/tools/refinery/store/query/building/DNFAnd.java new file mode 100644 index 00000000..48dabce2 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/building/DNFAnd.java @@ -0,0 +1,37 @@ +package tools.refinery.store.query.building; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class DNFAnd { + private Set existentiallyQuantified; + private List constraints; + public DNFAnd(Set quantifiedVariables, List constraints) { + super(); + this.existentiallyQuantified = quantifiedVariables; + this.constraints = constraints; + } + public Set getExistentiallyQuantified() { + return existentiallyQuantified; + } + public List getConstraints() { + return constraints; + } + void unifyVariables(Map uniqueVariableMap) { + Map uniqueVariableMapForClause = new HashMap<>(uniqueVariableMap); + for(DNFAtom atom : constraints) { + atom.unifyVariables(uniqueVariableMapForClause); + } + } + void collectQuantifiedVariables(Set parameters) { + Set result = new HashSet<>(); + for(DNFAtom constraint : constraints) { + constraint.collectAllVariables(result); + } + result.removeAll(parameters); + existentiallyQuantified = result; + } +} diff --git a/store/src/main/java/tools/refinery/store/query/building/DNFAtom.java b/store/src/main/java/tools/refinery/store/query/building/DNFAtom.java new file mode 100644 index 00000000..b047d7c8 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/building/DNFAtom.java @@ -0,0 +1,33 @@ +package tools.refinery.store.query.building; + +import java.util.Collection; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +public interface DNFAtom { + void unifyVariables(Map variables); + static Variable unifyVariables(Map unifiedVariables, Variable variable) { + if(variable != null) { + if(variable.isNamed() && unifiedVariables.containsKey(variable.getName())) { + return unifiedVariables.get(variable.getName()); + } + return variable; + } else { + return null; + } + } + void collectAllVariables(Set variables); + static void addToCollection(Set variables, Variable variable) { + if(variable != null) { + variables.add(variable); + } + } + static void addToCollection(Set variables, Collection variableCollection) { + Iterator iterator = variableCollection.iterator(); + while(iterator.hasNext()) { + Variable variable = iterator.next(); + addToCollection(variables, variable); + } + } +} diff --git a/store/src/main/java/tools/refinery/store/query/building/DNFPredicate.java b/store/src/main/java/tools/refinery/store/query/building/DNFPredicate.java new file mode 100644 index 00000000..f0c9ac42 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/building/DNFPredicate.java @@ -0,0 +1,72 @@ +package tools.refinery.store.query.building; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +public class DNFPredicate { + private final String name; + private final String uniqueName; + private final List parameters; + private final List clauses; + + public DNFPredicate(String name, List parameters, List clauses) { + this.name = name; + this.uniqueName = generateUniqueName(name,"predicate"); + this.parameters = parameters; + this.clauses = clauses; + + postProcess(); + } + + public static String generateUniqueName(String originalName, String defaultPrefix) { + UUID uuid = UUID.randomUUID(); + String uniqueString = uuid.toString().replace('-', '_'); + if(originalName == null) { + return defaultPrefix+uniqueString; + } else { + return originalName+uniqueString; + } + } + + public String getName() { + return name; + } + public String getUniqueName() { + return uniqueName; + } + public List getVariables() { + return parameters; + } + public List getClauses() { + return clauses; + } + + public void unifyVariables() { + Map uniqueVariableMap = new HashMap<>(); + for(Variable parameter : this.parameters) { + if(parameter.isNamed()) { + String parameterName = parameter.getName(); + if(uniqueVariableMap.containsKey(parameterName)) { + throw new IllegalArgumentException("Multiple parameters has the name "+parameterName); + } else { + uniqueVariableMap.put(parameterName, parameter); + } + } + } + for(DNFAnd clause : this.clauses) { + clause.unifyVariables(uniqueVariableMap); + } + } + public void collectQuantifiedVariables() { + for(DNFAnd clause : this.clauses) { + clause.collectQuantifiedVariables(new HashSet<>(parameters)); + } + } + public void postProcess() { + unifyVariables(); + collectQuantifiedVariables(); + } +} diff --git a/store/src/main/java/tools/refinery/store/query/building/EquivalenceAtom.java b/store/src/main/java/tools/refinery/store/query/building/EquivalenceAtom.java new file mode 100644 index 00000000..fede2518 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/building/EquivalenceAtom.java @@ -0,0 +1,44 @@ +package tools.refinery.store.query.building; + +import java.util.Map; +import java.util.Set; + +public class EquivalenceAtom implements DNFAtom{ + private boolean positive; + private Variable left; + private Variable right; + public EquivalenceAtom(boolean positive, Variable left, Variable right) { + this.positive = positive; + this.left = left; + this.right = right; + } + public boolean isPositive() { + return positive; + } + public void setPositive(boolean positive) { + this.positive = positive; + } + public Variable getLeft() { + return left; + } + public void setLeft(Variable left) { + this.left = left; + } + public Variable getRight() { + return right; + } + public void setRight(Variable right) { + this.right = right; + } + + @Override + public void unifyVariables(Map variables) { + this.left = DNFAtom.unifyVariables(variables,left); + this.right = DNFAtom.unifyVariables(variables,right); + } + @Override + public void collectAllVariables(Set variables) { + DNFAtom.addToCollection(variables, left); + DNFAtom.addToCollection(variables, right); + } +} diff --git a/store/src/main/java/tools/refinery/store/query/building/PredicateAtom.java b/store/src/main/java/tools/refinery/store/query/building/PredicateAtom.java new file mode 100644 index 00000000..42394922 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/building/PredicateAtom.java @@ -0,0 +1,66 @@ +package tools.refinery.store.query.building; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class PredicateAtom implements DNFAtom { + private DNFPredicate referred; + private List substitution; + private boolean positive; + private boolean transitive; + + public PredicateAtom(boolean positive, boolean transitive, DNFPredicate referred, List substitution) { + this.positive = positive; + this.referred = referred; + this.substitution = substitution; + this.transitive = transitive; + } + + public DNFPredicate getReferred() { + return referred; + } + + public void setReferred(DNFPredicate referred) { + this.referred = referred; + } + + public List getSubstitution() { + return substitution; + } + + public void setSubstitution(List substitution) { + this.substitution = substitution; + } + + public boolean isPositive() { + return positive; + } + + public void setPositive(boolean positive) { + this.positive = positive; + } + + public boolean isTransitive() { + return transitive; + } + + public void setTransitive(boolean transitive) { + this.transitive = transitive; + } + + @Override + public void unifyVariables(Map variables) { + for (int i = 0; i < this.substitution.size(); i++) { + final Object term = this.substitution.get(i); + if (term instanceof Variable variableReference) { + this.substitution.set(i, DNFAtom.unifyVariables(variables, variableReference)); + } + } + } + + @Override + public void collectAllVariables(Set variables) { + DNFAtom.addToCollection(variables, substitution); + } +} diff --git a/store/src/main/java/tools/refinery/store/query/building/PredicateBuilder_string.java b/store/src/main/java/tools/refinery/store/query/building/PredicateBuilder_string.java new file mode 100644 index 00000000..b99407a3 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/building/PredicateBuilder_string.java @@ -0,0 +1,107 @@ +package tools.refinery.store.query.building; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; + +import tools.refinery.store.query.view.RelationView; + +public class PredicateBuilder_string { + private PredicateBuilder_string() {} + + public static PredicateBuild1 predicate(String name) { + return new PredicateBuild1(name); + } + public static class PredicateBuild1 { + private String name; + public PredicateBuild1(String name) { + this.name = name; + } + public PredicateBuild2 parameters(String... parameters) { + return new PredicateBuild2(name, parameters); + } + } + public static class PredicateBuild2 { + private String name; + private String[] parameters; + public PredicateBuild2(String name, String[] parameters) { + this.name = name; + this.parameters = parameters; + } + + public PredicateBuild3 clause(DNFAtom...constraints) { + return new PredicateBuild3(name,parameters,List.of(constraints)); + } + } + public static class PredicateBuild3 { + String name; + String[] parameters; + List clauses; + public PredicateBuild3(String name, String[] parameters, List clauses) { + super(); + this.name = name; + this.parameters = parameters; + this.clauses = clauses; + } + + public PredicateBuild3 clause(DNFAtom...constraints) { + List newClauses = new ArrayList<>(); + newClauses.addAll(clauses); + newClauses.add(constraints); + return new PredicateBuild3(name, parameters, newClauses); + } + public DNFPredicate build() { + List newParameters = new ArrayList<>(this.parameters.length); + for(int i = 0; i newClauses = new ArrayList<>(this.clauses.size()); + for(DNFAtom[] clause : this.clauses) { + List constraints = new ArrayList<>(clause.length); + Collections.addAll(constraints, clause); + newClauses.add(new DNFAnd(new HashSet<>(), constraints)); + } + + return new DNFPredicate(name,newParameters,newClauses); + } + } + + private static Variable stringToVariable(String name) { + if(name != null) { + return new Variable(name); + } else { + return null; + } + } + private static List stringToVariable(String[] names) { + List variables = new ArrayList<>(); + for(int i = 0; i view, String... variables) { + + return new RelationAtom(view, stringToVariable(variables)); + } + + public static PredicateAtom cInPredicate(DNFPredicate referred, String... variables) { + return new PredicateAtom(true, false, referred, stringToVariable(variables)); + } + public static PredicateAtom cInTransitivePredicate(DNFPredicate referred, String... variables) { + return new PredicateAtom(true, true, referred, stringToVariable(variables)); + } + public static PredicateAtom cNotInPredicate(DNFPredicate referred, String... variables) { + return new PredicateAtom(false, false, referred, stringToVariable(variables)); + } +} diff --git a/store/src/main/java/tools/refinery/store/query/building/RelationAtom.java b/store/src/main/java/tools/refinery/store/query/building/RelationAtom.java new file mode 100644 index 00000000..1238f1d7 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/building/RelationAtom.java @@ -0,0 +1,49 @@ +package tools.refinery.store.query.building; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import tools.refinery.store.query.view.FilteredRelationView; +import tools.refinery.store.query.view.RelationView; + +public class RelationAtom implements DNFAtom { + RelationView view; + List substitution; + + public RelationAtom(RelationView view, List substitution) { + this.view = view; + this.substitution = substitution; + } + + public RelationView getView() { + return view; + } + + public void setView(FilteredRelationView view) { + this.view = view; + } + + public List getSubstitution() { + return substitution; + } + + public void setSubstitution(List substitution) { + this.substitution = substitution; + } + + @Override + public void unifyVariables(Map variables) { + for (int i = 0; i < this.substitution.size(); i++) { + final Object term = this.substitution.get(i); + if (term instanceof Variable variableReference) { + this.substitution.set(i, DNFAtom.unifyVariables(variables, variableReference)); + } + } + } + + @Override + public void collectAllVariables(Set variables) { + DNFAtom.addToCollection(variables, substitution); + } +} diff --git a/store/src/main/java/tools/refinery/store/query/building/Variable.java b/store/src/main/java/tools/refinery/store/query/building/Variable.java new file mode 100644 index 00000000..9ea7ce83 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/building/Variable.java @@ -0,0 +1,22 @@ +package tools.refinery.store.query.building; + +public class Variable { + private final String name; + private final String uniqueName; + + public Variable(String name) { + super(); + this.name = name; + this.uniqueName = DNFPredicate.generateUniqueName(name, "variable"); + + } + public String getName() { + return name; + } + public String getUniqueName() { + return uniqueName; + } + public boolean isNamed() { + return name != null; + } +} diff --git a/store/src/main/java/tools/refinery/store/query/internal/DummyBaseIndexer.java b/store/src/main/java/tools/refinery/store/query/internal/DummyBaseIndexer.java new file mode 100644 index 00000000..49637071 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/internal/DummyBaseIndexer.java @@ -0,0 +1,59 @@ +package tools.refinery.store.query.internal; + +import java.lang.reflect.InvocationTargetException; +import java.util.concurrent.Callable; + +import org.eclipse.viatra.query.runtime.api.scope.IBaseIndex; +import org.eclipse.viatra.query.runtime.api.scope.IIndexingErrorListener; +import org.eclipse.viatra.query.runtime.api.scope.IInstanceObserver; +import org.eclipse.viatra.query.runtime.api.scope.ViatraBaseIndexChangeListener; + +/** + * copied from org.eclipse.viatra.query.runtime.tabular.TabularEngineContext; + */ +public class DummyBaseIndexer implements IBaseIndex{ + + @Override + public V coalesceTraversals(Callable callable) throws InvocationTargetException { + try { + return callable.call(); + } catch (Exception e) { + throw new InvocationTargetException(e); + } + } + + @Override + public void addBaseIndexChangeListener(ViatraBaseIndexChangeListener listener) { + // no notification support + } + + @Override + public void removeBaseIndexChangeListener(ViatraBaseIndexChangeListener listener) { + // no notification support + } + + @Override + public void resampleDerivedFeatures() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean addIndexingErrorListener(IIndexingErrorListener listener) { + return true; + } + + @Override + public boolean removeIndexingErrorListener(IIndexingErrorListener listener) { + return true; + } + + @Override + public boolean addInstanceObserver(IInstanceObserver observer, Object observedObject) { + return true; + } + + @Override + public boolean removeInstanceObserver(IInstanceObserver observer, Object observedObject) { + return true; + } +} diff --git a/store/src/main/java/tools/refinery/store/query/internal/PredicateTranslator.java b/store/src/main/java/tools/refinery/store/query/internal/PredicateTranslator.java new file mode 100644 index 00000000..6b050182 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/internal/PredicateTranslator.java @@ -0,0 +1,210 @@ +package tools.refinery.store.query.internal; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.eclipse.viatra.query.runtime.api.GenericPatternMatcher; +import org.eclipse.viatra.query.runtime.api.GenericQuerySpecification; +import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine; +import org.eclipse.viatra.query.runtime.api.scope.QueryScope; +import org.eclipse.viatra.query.runtime.matchers.backend.QueryEvaluationHint; +import org.eclipse.viatra.query.runtime.matchers.psystem.PBody; +import org.eclipse.viatra.query.runtime.matchers.psystem.PVariable; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.Equality; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.ExportedParameter; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.Inequality; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.NegativePatternCall; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.BinaryReflexiveTransitiveClosure; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.BinaryTransitiveClosure; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.PositivePatternCall; +import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.TypeConstraint; +import org.eclipse.viatra.query.runtime.matchers.psystem.queries.BasePQuery; +import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PParameter; +import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PQuery; +import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PVisibility; +import org.eclipse.viatra.query.runtime.matchers.tuple.Tuples; + +import tools.refinery.store.query.RelationalScope; +import tools.refinery.store.query.view.RelationView; + +public class PredicateTranslator extends BasePQuery { + + private final Map parameters = new HashMap(); + private String fullyQualifiedName; + private LinkedList bodies = new LinkedList(); + private List symbolicParameters; + + public PredicateTranslator(String fullyQualifiedName) { + super(PVisibility.PUBLIC); + this.fullyQualifiedName = fullyQualifiedName; + PBody body = new PBody(this); + bodies.add(body); + } + + @Override + public String getFullyQualifiedName() { + return fullyQualifiedName; + } + + public PredicateTranslator addParameter(String name, RelationView type) { + PParameter parameter = new PParameter(name); + parameters.put(name, parameter); + + PBody body = bodies.peekLast(); + List symbolicParameters = new ArrayList<>(); + parameters.forEach((pName, pParameter) -> { + PVariable var = body.getOrCreateVariableByName(pName); + symbolicParameters.add(new ExportedParameter(body, var, pParameter)); + }); + body.setSymbolicParameters(symbolicParameters); + + return this; + } + + @Override + public List getParameters() { + return new ArrayList(parameters.values()); + } + public PredicateTranslator addConstraint(RelationView view, String... name) { + if(name.length != view.getArity()) { + throw new IllegalArgumentException("Arity ("+view.getArity()+") does not match parameter numbers ("+name.length+")"); + } + PBody body = bodies.peekLast(); + Object[] variables = new Object[name.length]; + for(int i = 0; i symbolicParameters = new ArrayList<>(); + parameters.forEach((name, parameter) -> { + PVariable var = body.getOrCreateVariableByName(name); + symbolicParameters.add(new ExportedParameter(body, var, parameter)); + }); + body.setSymbolicParameters(symbolicParameters); + bodies.add(body); + return this; + } + + // Equality constraint + public PredicateTranslator addEquality(String sourceName, String targetName) { + PBody body = bodies.peekLast(); + PVariable var_source = body.getOrCreateVariableByName(sourceName); + PVariable var_target = body.getOrCreateVariableByName(targetName); + new Equality(body, var_source, var_target); + return this; + } + + // Inequality constraint + public PredicateTranslator addInequality(String sourceName, String targetName) { + PBody body = bodies.peekLast(); + PVariable var_source = body.getOrCreateVariableByName(sourceName); + PVariable var_target = body.getOrCreateVariableByName(targetName); + new Inequality(body, var_source, var_target); + return this; + } + + // Positive pattern call + public PredicateTranslator addPatternCall(PQuery query, String... names) { + PBody body = bodies.peekLast(); + PVariable[] vars = new PVariable[names.length]; + for (int i = 0; i < names.length; i++) { + vars[i] = body.getOrCreateVariableByName(names[i]); + } + new PositivePatternCall(body, Tuples.flatTupleOf(vars), query); + return this; + } + + // Negative pattern call + public PredicateTranslator addNegativePatternCall(PQuery query, String... names) { + PBody body = bodies.peekLast(); + PVariable[] vars = new PVariable[names.length]; + for (int i = 0; i < names.length; i++) { + vars[i] = body.getOrCreateVariableByName(names[i]); + } + new NegativePatternCall(body, Tuples.flatTupleOf(vars), query); + return this; + } + + // Binary transitive closure pattern call + public PredicateTranslator addBinaryTransitiveClosure(PQuery query, String sourceName, String targetName) { + PBody body = bodies.peekLast(); + PVariable var_source = body.getOrCreateVariableByName(sourceName); + PVariable var_target = body.getOrCreateVariableByName(targetName); + new BinaryTransitiveClosure(body, Tuples.flatTupleOf(var_source, var_target), query); + return this; + } + + // Binary reflexive transitive closure pattern call + public PredicateTranslator addBinaryReflexiveTransitiveClosure(PQuery query, String sourceName, String targetName) { + PBody body = bodies.peekLast(); + PVariable var_source = body.getOrCreateVariableByName(sourceName); + PVariable var_target = body.getOrCreateVariableByName(targetName); + new BinaryReflexiveTransitiveClosure(body, Tuples.flatTupleOf(var_source, var_target), query, + query.getParameters().get(0).getDeclaredUnaryType()); + return this; + } + + @Override + public Set doGetContainedBodies() { + setEvaluationHints(new QueryEvaluationHint(null, QueryEvaluationHint.BackendRequirement.UNSPECIFIED)); + return new LinkedHashSet(bodies); + } + + public void addSymbolicParameters(ExportedParameter symbolicParameter) { + checkMutability(); + if (symbolicParameters == null) { + symbolicParameters = new ArrayList<>(); + } + symbolicParameters.add(symbolicParameter); + } + + public GenericQuerySpecification build() { + return new GenericQuerySpecification(this) { + + @Override + public Class getPreferredScopeClass() { + return RelationalScope.class; + } + + @Override + protected GenericPatternMatcher instantiate(ViatraQueryEngine engine) { + return defaultInstantiate(engine); + } + + @Override + public GenericPatternMatcher instantiate() { + return new GenericPatternMatcher(this); + } + + }; + } +} \ No newline at end of file diff --git a/store/src/main/java/tools/refinery/store/query/internal/RelationUpdateListener.java b/store/src/main/java/tools/refinery/store/query/internal/RelationUpdateListener.java new file mode 100644 index 00000000..cf5260f6 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/internal/RelationUpdateListener.java @@ -0,0 +1,52 @@ +package tools.refinery.store.query.internal; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContextListener; +import org.eclipse.viatra.query.runtime.matchers.tuple.ITuple; + +import tools.refinery.store.model.Tuple; +import tools.refinery.store.query.view.RelationView; + +public class RelationUpdateListener { + private final Map,Set>> view2Listeners; + + public RelationUpdateListener(Set> relationViews) { + view2Listeners = new HashMap<>(); + for(RelationView relationView : relationViews) { + view2Listeners.put(relationView, new HashSet<>()); + } + } + public boolean containsRelationalView(RelationView relationalKey) { + RelationView relationView = relationalKey.getWrappedKey(); + return view2Listeners.containsKey(relationView); + } + public void addListener(RelationView relationalKey, ITuple seed, IQueryRuntimeContextListener listener) { + RelationView relationView = relationalKey.getWrappedKey(); + if(view2Listeners.containsKey(relationView)) { + RelationUpdateListenerEntry entry = new RelationUpdateListenerEntry<>(relationalKey, seed, listener); + view2Listeners.get(relationView).add(entry); + } else throw new IllegalArgumentException(); + } + public void removeListener(RelationView relationalKey, ITuple seed, IQueryRuntimeContextListener listener) { + RelationView relationView = relationalKey.getWrappedKey(); + if(view2Listeners.containsKey(relationView)) { + RelationUpdateListenerEntry entry = new RelationUpdateListenerEntry<>(relationalKey, seed, listener); + view2Listeners.get(relationView).remove(entry); + } else throw new IllegalArgumentException(); + } + + public void processChange(RelationView relationView, Tuple tuple, D oldValue, D newValue) { + Set> listeners = view2Listeners.get(relationView); + if(listeners != null) { + for(RelationUpdateListenerEntry listener : listeners) { + @SuppressWarnings("unchecked") + RelationUpdateListenerEntry typeCorrectListener = (RelationUpdateListenerEntry) listener; + typeCorrectListener.processChange(tuple, oldValue, newValue); + } + } else throw new IllegalArgumentException("View was not indexed in constructor "+relationView); + } +} diff --git a/store/src/main/java/tools/refinery/store/query/internal/RelationUpdateListenerEntry.java b/store/src/main/java/tools/refinery/store/query/internal/RelationUpdateListenerEntry.java new file mode 100644 index 00000000..860a80b7 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/internal/RelationUpdateListenerEntry.java @@ -0,0 +1,64 @@ +package tools.refinery.store.query.internal; + +import java.util.Arrays; +import java.util.Objects; + +import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContextListener; +import org.eclipse.viatra.query.runtime.matchers.tuple.ITuple; +import org.eclipse.viatra.query.runtime.matchers.tuple.Tuples; + +import tools.refinery.store.model.Tuple; +import tools.refinery.store.query.view.RelationView; + +public class RelationUpdateListenerEntry { + final RelationView key; + final ITuple filter; + final IQueryRuntimeContextListener listener; + + public RelationUpdateListenerEntry(RelationView key, ITuple filter, IQueryRuntimeContextListener listener) { + super(); + this.key = key; + this.filter = filter; + this.listener = listener; + } + + public void processChange(Tuple tuple, D oldValue, D newValue) { + Object[] oldTuple = isMatching(key.getWrappedKey().transform(tuple, oldValue), filter); + Object[] newTuple = isMatching(key.getWrappedKey().transform(tuple, newValue), filter); + + if(!Arrays.equals(oldTuple, newTuple)) { + if(oldTuple != null) { + listener.update(key, Tuples.flatTupleOf(oldTuple), false); + } + if(newTuple != null) { + listener.update(key, Tuples.flatTupleOf(newTuple), true); + } + } + } + + private Object[] isMatching(Object[] tuple, ITuple filter) { + for(int i = 0; i other = (RelationUpdateListenerEntry) obj; + return Objects.equals(filter, other.filter) && Objects.equals(key, other.key) + && Objects.equals(listener, other.listener); + } +} diff --git a/store/src/main/java/tools/refinery/store/query/internal/RelationalEngineContext.java b/store/src/main/java/tools/refinery/store/query/internal/RelationalEngineContext.java new file mode 100644 index 00000000..691baf81 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/internal/RelationalEngineContext.java @@ -0,0 +1,33 @@ +package tools.refinery.store.query.internal; + +import org.eclipse.viatra.query.runtime.api.scope.IBaseIndex; +import org.eclipse.viatra.query.runtime.api.scope.IEngineContext; +import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContext; + +import tools.refinery.store.model.Model; + +public class RelationalEngineContext implements IEngineContext{ + private final IBaseIndex baseIndex = new DummyBaseIndexer(); + private final RelationalRuntimeContext runtimeContext; + + + public RelationalEngineContext(Model model, RelationUpdateListener updateListener) { + runtimeContext = new RelationalRuntimeContext(model, updateListener); + } + + @Override + public IBaseIndex getBaseIndex() { + return this.baseIndex; + } + + @Override + public void dispose() { + //lifecycle not controlled by engine + } + + @Override + public IQueryRuntimeContext getQueryRuntimeContext() { + return runtimeContext; + } + +} diff --git a/store/src/main/java/tools/refinery/store/query/internal/RelationalQueryMetaContext.java b/store/src/main/java/tools/refinery/store/query/internal/RelationalQueryMetaContext.java new file mode 100644 index 00000000..05fb0904 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/internal/RelationalQueryMetaContext.java @@ -0,0 +1,58 @@ +package tools.refinery.store.query.internal; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.eclipse.viatra.query.runtime.matchers.context.AbstractQueryMetaContext; +import org.eclipse.viatra.query.runtime.matchers.context.IInputKey; +import org.eclipse.viatra.query.runtime.matchers.context.InputKeyImplication; + +import tools.refinery.store.query.view.RelationView; + +/** + * The meta context information for String scopes. + */ +public final class RelationalQueryMetaContext extends AbstractQueryMetaContext { + + @Override + public boolean isEnumerable(IInputKey key) { + ensureValidKey(key); + return key.isEnumerable(); + } + + @Override + public boolean isStateless(IInputKey key) { + ensureValidKey(key); + return key instanceof RelationView; + } + + @Override + public Collection getImplications(IInputKey implyingKey) { + ensureValidKey(implyingKey); + return new HashSet(); + } + + @Override + public Map, Set> getFunctionalDependencies(IInputKey key) { + ensureValidKey(key); + if (key instanceof RelationView) { + return new HashMap, Set>(); + } else { + return Collections.emptyMap(); + } + } + + public void ensureValidKey(IInputKey key) { + if (! (key instanceof RelationView)) + illegalInputKey(key); + } + + public void illegalInputKey(IInputKey key) { + throw new IllegalArgumentException("The input key " + key + " is not a valid input key."); + } + +} diff --git a/store/src/main/java/tools/refinery/store/query/internal/RelationalRuntimeContext.java b/store/src/main/java/tools/refinery/store/query/internal/RelationalRuntimeContext.java new file mode 100644 index 00000000..da118f26 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/internal/RelationalRuntimeContext.java @@ -0,0 +1,186 @@ +package tools.refinery.store.query.internal; + +import static tools.refinery.store.util.CollectionsUtil.filter; +import static tools.refinery.store.util.CollectionsUtil.map; + +import java.lang.reflect.InvocationTargetException; +import java.util.Iterator; +import java.util.Optional; +import java.util.concurrent.Callable; + +import org.eclipse.viatra.query.runtime.base.core.NavigationHelperImpl; +import org.eclipse.viatra.query.runtime.matchers.context.IInputKey; +import org.eclipse.viatra.query.runtime.matchers.context.IQueryMetaContext; +import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContext; +import org.eclipse.viatra.query.runtime.matchers.context.IQueryRuntimeContextListener; +import org.eclipse.viatra.query.runtime.matchers.context.IndexingService; +import org.eclipse.viatra.query.runtime.matchers.tuple.ITuple; +import org.eclipse.viatra.query.runtime.matchers.tuple.Tuple; +import org.eclipse.viatra.query.runtime.matchers.tuple.TupleMask; +import org.eclipse.viatra.query.runtime.matchers.tuple.Tuples; +import org.eclipse.viatra.query.runtime.matchers.util.Accuracy; + +import tools.refinery.store.model.Model; +import tools.refinery.store.query.view.RelationView; + +public class RelationalRuntimeContext implements IQueryRuntimeContext { + private final RelationalQueryMetaContext metaContext = new RelationalQueryMetaContext(); + private final RelationUpdateListener relationUpdateListener; + private final Model model; + + public RelationalRuntimeContext(Model model, RelationUpdateListener relationUpdateListener) { + this.model = model; + this.relationUpdateListener = relationUpdateListener; + } + + @Override + public IQueryMetaContext getMetaContext() { + return metaContext; + } + + /** + * TODO: check {@link NavigationHelperImpl#coalesceTraversals(Callable)} + */ + @Override + public V coalesceTraversals(Callable callable) throws InvocationTargetException { + try { + return callable.call(); + } catch (Exception e) { + throw new InvocationTargetException(e); + } + } + + @Override + public boolean isCoalescing() { + return true; + } + + @Override + public boolean isIndexed(IInputKey key, IndexingService service) { + if(key instanceof RelationView relationalKey) { + return this.relationUpdateListener.containsRelationalView(relationalKey); + } else { + return false; + } + } + + @Override + public void ensureIndexed(IInputKey key, IndexingService service) { + if(!isIndexed(key, service)) { + throw new IllegalStateException("Engine tries to index a new key " +key); + } + } + + RelationView checkKey(IInputKey key) { + if(key instanceof RelationView) { + RelationView relationViewKey = (RelationView) key; + if(relationUpdateListener.containsRelationalView(relationViewKey)) { + return relationViewKey; + } else { + throw new IllegalStateException("Query is asking for non-indexed key"); + } + } else { + throw new IllegalStateException("Query is asking for non-relational key"); + } + } + + @Override + public int countTuples(IInputKey key, TupleMask seedMask, ITuple seed) { + RelationView relationalViewKey = checkKey(key); + Iterable allObjects = relationalViewKey.getAll(model); + Iterable filteredBySeed = filter(allObjects,objectArray -> isMatching(objectArray,seedMask,seed)); + Iterator iterator = filteredBySeed.iterator(); + int result = 0; + while(iterator.hasNext()) { + iterator.next(); + result++; + } + return result; + } + + @Override + public Optional estimateCardinality(IInputKey key, TupleMask groupMask, Accuracy requiredAccuracy) { + return Optional.empty(); + } + + @Override + public Iterable enumerateTuples(IInputKey key, TupleMask seedMask, ITuple seed) { + RelationView relationalViewKey = checkKey(key); + Iterable allObjects = relationalViewKey.getAll(model); + Iterable filteredBySeed = filter(allObjects,objectArray -> isMatching(objectArray,seedMask,seed)); + return map(filteredBySeed,Tuples::flatTupleOf); + } + + private boolean isMatching(Object[] tuple, TupleMask seedMask, ITuple seed) { + for(int i=0; i relationalViewKey, TupleMask seedMask, ITuple seed) { +// final int arity = relationalViewKey.getArity(); +// Object[] result = new Object[arity]; +// for(int i = 0; i enumerateValues(IInputKey key, TupleMask seedMask, ITuple seed) { + return enumerateTuples(key, seedMask, seed); + } + + @Override + public boolean containsTuple(IInputKey key, ITuple seed) { + RelationView relationalViewKey = checkKey(key); + return relationalViewKey.get(model,seed.getElements()); + } + + @Override + public void addUpdateListener(IInputKey key, Tuple seed, IQueryRuntimeContextListener listener) { + RelationView relationalKey = checkKey(key); + this.relationUpdateListener.addListener(relationalKey, seed, listener); + + } + + @Override + public void removeUpdateListener(IInputKey key, Tuple seed, IQueryRuntimeContextListener listener) { + RelationView relationalKey = checkKey(key); + this.relationUpdateListener.removeListener(relationalKey, seed, listener); + } + + @Override + public Object wrapElement(Object externalElement) { + return externalElement; + } + + @Override + public Object unwrapElement(Object internalElement) { + return internalElement; + } + + @Override + public Tuple wrapTuple(Tuple externalElements) { + return externalElements; + } + + @Override + public Tuple unwrapTuple(Tuple internalElements) { + return internalElements; + } + + @Override + public void ensureWildcardIndexing(IndexingService service) { + throw new UnsupportedOperationException(); + } + + @Override + public void executeAfterTraversal(Runnable runnable) throws InvocationTargetException { + runnable.run(); + } +} diff --git a/store/src/main/java/tools/refinery/store/query/view/FilteredRelationView.java b/store/src/main/java/tools/refinery/store/query/view/FilteredRelationView.java new file mode 100644 index 00000000..2e264c44 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/view/FilteredRelationView.java @@ -0,0 +1,48 @@ +package tools.refinery.store.query.view; + +import java.util.function.BiPredicate; + +import tools.refinery.store.model.Model; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.Tuple.Tuple1; +import tools.refinery.store.model.representation.Relation; + +public class FilteredRelationView extends RelationView{ + private final BiPredicate predicate; + + public FilteredRelationView(Relation representation, BiPredicate predicate) { + super(representation); + this.predicate = predicate; + } + @Override + protected Object[] forwardMap(Tuple key, D value) { + return toTuple1Array(key); + } + @Override + public boolean get(Model model, Object[] tuple) { + int[] content = new int[tuple.length]; + for(int i = 0; i extends RelationView { + + public FunctionalRelationView(Relation representation) { + super(representation); + } + + @Override + protected boolean filter(Tuple key, D value) { + return true; + } + + @Override + protected Object[] forwardMap(Tuple key, D value) { + return toTuple1ArrayPlusValue(key, value); + } + + @Override + public boolean get(Model model, Object[] tuple) { + int[] content = new int[tuple.length-1]; + for(int i = 0; i Object[] toTuple1ArrayPlusValue(Tuple t, D value) { + Object[] result = new Object[t.getSize()+1]; + for(int i = 0; i{ + + public KeyOnlyRelationView(Relation representation) { + super(representation, (k,v)->true); + } + @Override + protected boolean filter(Tuple key, Boolean value) { + return true; + } + +} diff --git a/store/src/main/java/tools/refinery/store/query/view/RelationView.java b/store/src/main/java/tools/refinery/store/query/view/RelationView.java new file mode 100644 index 00000000..2c2a37c4 --- /dev/null +++ b/store/src/main/java/tools/refinery/store/query/view/RelationView.java @@ -0,0 +1,86 @@ +package tools.refinery.store.query.view; + +import java.util.Objects; + +import org.eclipse.viatra.query.runtime.matchers.context.common.BaseInputKeyWrapper; + +import tools.refinery.store.map.CursorAsIterator; +import tools.refinery.store.model.Model; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.representation.Relation; + +/** + * Represents a view of a {@link Relation} that can be queried. + * + * @author Oszkar Semerath + * + * @param + */ +public abstract class RelationView extends BaseInputKeyWrapper> { + protected final Relation representation; + + protected RelationView(Relation representation) { + super(null); + this.wrappedKey = this; + this.representation = representation; + } + + @Override + public String getPrettyPrintableName() { + return representation.getName(); + } + + @Override + public String getStringID() { + return representation.getName() + this.getClass().getName(); + } + + public Relation getRepresentation() { + return representation; + } + + @Override + public boolean isEnumerable() { + return true; + } + + protected abstract boolean filter(Tuple key, D value); + + protected abstract Object[] forwardMap(Tuple key, D value); + + public abstract boolean get(Model model, Object[] tuple); + + public Object[] transform(Tuple tuple, D value) { + if (filter(tuple, value)) { + return forwardMap(tuple, value); + } else + return null; + } + + public Iterable getAll(Model model) { + return (() -> new CursorAsIterator<>(model.getAll(representation), (k, v) -> forwardMap(k, v), + (k, v) -> filter(k, v))); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Objects.hash(representation); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!super.equals(obj)) + return false; + if (!(obj instanceof RelationView)) + return false; + @SuppressWarnings("unchecked") + RelationView other = ((RelationView) obj); + return Objects.equals(representation, other.representation); + } + +} diff --git a/store/src/main/java/tools/refinery/store/util/CollectionsUtil.java b/store/src/main/java/tools/refinery/store/util/CollectionsUtil.java new file mode 100644 index 00000000..841d0dfa --- /dev/null +++ b/store/src/main/java/tools/refinery/store/util/CollectionsUtil.java @@ -0,0 +1,72 @@ +package tools.refinery.store.util; + +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.function.Function; +import java.util.function.Predicate; + +public final class CollectionsUtil { + private CollectionsUtil() { + throw new UnsupportedOperationException(); + } + + public static Iterator map(Iterator source, Function transformation) { + return new Iterator() { + + @Override + public boolean hasNext() { + return source.hasNext(); + } + + @Override + public T next() { + return transformation.apply(source.next()); + } + }; + } + + public static Iterable map(Iterable source, Function transformation) { + return (()->map(source.iterator(),transformation)); + } + + public static Iterator filter(Iterator source, Predicate condition) { + return new Iterator() { + T internalNext = move(); + boolean internalHasNext; + + private T move() { + internalHasNext = source.hasNext(); + if(internalHasNext) { + internalNext = source.next(); + } + while(internalHasNext && !condition.test(internalNext)) { + internalHasNext = source.hasNext(); + if(internalHasNext) { + internalNext = source.next(); + } + } + return internalNext; + } + + @Override + public boolean hasNext() { + return internalHasNext; + } + + @Override + public T next() { + if(!internalHasNext) { + throw new NoSuchElementException(); + } else { + T result = internalNext; + move(); + return result; + } + } + }; + } + + public static Iterable filter(Iterable source, Predicate condition) { + return (()->filter(source.iterator(),condition)); + } +} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/CommitFuzzTest.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/CommitFuzzTest.java deleted file mode 100644 index d744a79d..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/CommitFuzzTest.java +++ /dev/null @@ -1,96 +0,0 @@ -package tools.refinery.data.map.tests.fuzz; - -import static org.junit.jupiter.api.Assertions.fail; - -import java.util.Random; -import java.util.stream.Stream; - -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.VersionedMapStoreImpl; -import tools.refinery.data.map.internal.VersionedMapImpl; -import tools.refinery.data.map.tests.fuzz.utils.FuzzTestUtils; -import tools.refinery.data.map.tests.utils.MapTestEnvironment; - -class CommitFuzzTest { - private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, - boolean evilHash) { - String[] values = MapTestEnvironment.prepareValues(maxValue); - ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); - - VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); - VersionedMapImpl sut = (VersionedMapImpl) store.createMap(); - MapTestEnvironment e = new MapTestEnvironment(sut); - - Random r = new Random(seed); - - iterativeRandomPutsAndCommits(scenario, steps, maxKey, values, e, r, commitFrequency); - } - - private void iterativeRandomPutsAndCommits(String scenario, int steps, int maxKey, String[] values, - MapTestEnvironment e, Random r, int commitFrequency) { - int stopAt = -1; - for (int i = 0; i < steps; i++) { - int index = i + 1; - int nextKey = r.nextInt(maxKey); - String nextValue = values[r.nextInt(values.length)]; - if (index == stopAt) { - System.out.println("issue!"); - System.out.println("State before:"); - e.printComparison(); - e.sut.prettyPrint(); - System.out.println("Next: put(" + nextKey + "," + nextValue + ")"); - } - try { - e.put(nextKey, nextValue); - if (index == stopAt) { - e.sut.prettyPrint(); - } - e.checkEquivalence(scenario + ":" + index); - } catch (Exception exception) { - exception.printStackTrace(); - fail(scenario + ":" + index + ": exception happened: " + exception); - } - MapTestEnvironment.printStatus(scenario, index, steps, null); - if (index % commitFrequency == 0) { - e.sut.commit(); - } - } - } - - @ParameterizedTest(name = "Commit {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Timeout(value = 10) - @Tag("fuzz") - void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("CommitS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, - commitFrequency, evilHash); - } - - static Stream parametrizedFastFuzz() { - return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, - new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, - new Object[] { false, true }); - } - - @ParameterizedTest(name = "Commit {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Tag("fuzz") - @Tag("slow") - void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("CommitS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, - commitFrequency, evilHash); - } - - static Stream parametrizedSlowFuzz() { - return FuzzTestUtils.changeStepCount(parametrizedFastFuzz(), 1); - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/ContentEqualsFuzzTest.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/ContentEqualsFuzzTest.java deleted file mode 100644 index 1f6f9609..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/ContentEqualsFuzzTest.java +++ /dev/null @@ -1,143 +0,0 @@ -package tools.refinery.data.map.tests.fuzz; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.fail; - -import java.util.AbstractMap.SimpleEntry; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.Random; -import java.util.stream.Stream; - -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.Cursor; -import tools.refinery.data.map.VersionedMap; -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.VersionedMapStoreImpl; -import tools.refinery.data.map.internal.VersionedMapImpl; -import tools.refinery.data.map.tests.fuzz.utils.FuzzTestUtils; -import tools.refinery.data.map.tests.utils.MapTestEnvironment; - -class ContentEqualsFuzzTest { - private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, - boolean evilHash) { - String[] values = MapTestEnvironment.prepareValues(maxValue); - ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); - - Random r = new Random(seed); - - iterativeRandomPutsAndCommitsThenCompare(scenario, chp, steps, maxKey, values, r, commitFrequency); - } - - private void iterativeRandomPutsAndCommitsThenCompare(String scenario, ContinousHashProvider chp, int steps, int maxKey, String[] values, Random r, int commitFrequency) { - - VersionedMapStore store1 = new VersionedMapStoreImpl(chp, values[0]); - VersionedMap sut1 = store1.createMap(); - - // Fill one map - for (int i = 0; i < steps; i++) { - int index1 = i + 1; - int nextKey = r.nextInt(maxKey); - String nextValue = values[r.nextInt(values.length)]; - try { - sut1.put(nextKey, nextValue); - } catch (Exception exception) { - exception.printStackTrace(); - fail(scenario + ":" + index1 + ": exception happened: " + exception); - } - MapTestEnvironment.printStatus(scenario, index1, steps, "Fill"); - if (index1 % commitFrequency == 0) { - sut1.commit(); - } - } - - // Get the content of the first map - List> content = new LinkedList<>(); - Cursor cursor = sut1.getAll(); - while (cursor.move()) { - content.add(new SimpleEntry<>(cursor.getKey(), cursor.getValue())); - } - - // Randomize the order of the content - Collections.shuffle(content, r); - - VersionedMapStore store2 = new VersionedMapStoreImpl(chp, values[0]); - VersionedMap sut2 = store2.createMap(); - int index2 = 1; - for (SimpleEntry entry : content) { - sut2.put(entry.getKey(), entry.getValue()); - if(index2++%commitFrequency == 0) - sut2.commit(); - } - - // Check the integrity of the maps - ((VersionedMapImpl) sut1).checkIntegrity(); - ((VersionedMapImpl) sut2).checkIntegrity(); - -// // Compare the two maps - // By size - assertEquals(sut1.getSize(), content.size()); - assertEquals(sut2.getSize(), content.size()); - - - - // By cursors - Cursor cursor1 = sut1.getAll(); - Cursor cursor2 = sut2.getAll(); - int index3 = 1; - boolean canMove = true; - do{ - boolean canMove1 = cursor1.move(); - boolean canMove2 = cursor2.move(); - assertEquals(canMove1, canMove2, scenario + ":" + index3 +" Cursors stopped at different times!"); - assertEquals(cursor1.getKey(), cursor2.getKey(), scenario + ":" + index3 +" Cursors have different keys!"); - assertEquals(cursor1.getValue(), cursor2.getValue(), scenario + ":" + index3 +" Cursors have different values!"); - - canMove = canMove1; - MapTestEnvironment.printStatus(scenario, index3++, content.size(), "Compare"); - } while (canMove); - - // By hashcode - assertEquals(sut1.hashCode(), sut2.hashCode(), "Hash codes are not equal!"); - - // By equals - assertEquals(sut1, sut2, "Maps are not equals"); - } - - @ParameterizedTest(name = "Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Timeout(value = 10) - @Tag("fuzz") - void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("CompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, - commitFrequency, evilHash); - } - - static Stream parametrizedFastFuzz() { - return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, - new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, - new Object[] { false, true }); - } - - @ParameterizedTest(name = "Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Tag("fuzz") - @Tag("slow") - void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("CompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, - commitFrequency, evilHash); - } - - static Stream parametrizedSlowFuzz() { - return FuzzTestUtils.changeStepCount(parametrizedFastFuzz(), 1); - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/DiffCursorFuzzTest.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/DiffCursorFuzzTest.java deleted file mode 100644 index fd663a7c..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/DiffCursorFuzzTest.java +++ /dev/null @@ -1,117 +0,0 @@ -package tools.refinery.data.map.tests.fuzz; - -import static org.junit.jupiter.api.Assertions.fail; - -import java.util.Random; -import java.util.stream.Stream; - -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.DiffCursor; -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.VersionedMapStoreImpl; -import tools.refinery.data.map.internal.VersionedMapImpl; -import tools.refinery.data.map.tests.fuzz.utils.FuzzTestUtils; -import tools.refinery.data.map.tests.utils.MapTestEnvironment; - -class DiffCursorFuzzTest { - private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, - boolean evilHash) { - String[] values = MapTestEnvironment.prepareValues(maxValue); - ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); - - VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); - iterativeRandomPutsAndCommitsThenDiffcursor(scenario, store, steps, maxKey, values, seed, commitFrequency); - } - - private void iterativeRandomPutsAndCommitsThenDiffcursor(String scenario, VersionedMapStore store, - int steps, int maxKey, String[] values, int seed, int commitFrequency) { - // 1. build a map with versions - Random r = new Random(seed); - VersionedMapImpl versioned = (VersionedMapImpl) store.createMap(); - int largestCommit = -1; - - for (int i = 0; i < steps; i++) { - int index = i + 1; - int nextKey = r.nextInt(maxKey); - String nextValue = values[r.nextInt(values.length)]; - try { - versioned.put(nextKey, nextValue); - } catch (Exception exception) { - exception.printStackTrace(); - fail(scenario + ":" + index + ": exception happened: " + exception); - } - if (index % commitFrequency == 0) { - long version = versioned.commit(); - largestCommit = (int) version; - } - if (index % 10000 == 0) - System.out.println(scenario + ":" + index + "/" + steps + " building finished"); - } - // 2. create a non-versioned map, - VersionedMapImpl moving = (VersionedMapImpl) store.createMap(); - Random r2 = new Random(seed + 1); - - final int diffTravelFrequency = commitFrequency * 2; - for (int i = 0; i < steps; i++) { - int index = i + 1; - if (index % diffTravelFrequency == 0) { - // difftravel - long travelToVersion = r2.nextInt(largestCommit + 1); - DiffCursor diffCursor = moving.getDiffCursor(travelToVersion); - moving.putAll(diffCursor); - - } else { - // random puts - int nextKey = r2.nextInt(maxKey); - String nextValue = values[r2.nextInt(values.length)]; - try { - moving.put(nextKey, nextValue); - } catch (Exception exception) { - exception.printStackTrace(); - fail(scenario + ":" + index + ": exception happened: " + exception); - } - if (index % commitFrequency == 0) { - versioned.commit(); - } - if (index % 10000 == 0) - System.out.println(scenario + ":" + index + "/" + steps + " building finished"); - } - } - - } - - @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Timeout(value = 10) - @Tag("fuzz") - void parametrizedFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, - noKeys, noValues, commitFrequency, evilHash); - } - - static Stream parametrizedFuzz() { - return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, - new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, - new Object[] { false, true }); - } - @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Tag("fuzz") - @Tag("slow") - void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, - commitFrequency, evilHash); - } - - static Stream parametrizedSlowFuzz() { - return FuzzTestUtils.changeStepCount(parametrizedFuzz(), 1); - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/MultiThreadFuzzTest.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/MultiThreadFuzzTest.java deleted file mode 100644 index e6af13bf..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/MultiThreadFuzzTest.java +++ /dev/null @@ -1,97 +0,0 @@ -package tools.refinery.data.map.tests.fuzz; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.fail; - -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.stream.Stream; - -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.VersionedMapStoreImpl; -import tools.refinery.data.map.tests.fuzz.utils.FuzzTestUtils; -import tools.refinery.data.map.tests.utils.MapTestEnvironment; - -class MultiThreadFuzzTest { - public static final int noThreads = 32; - - private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, - boolean evilHash) { - String[] values = MapTestEnvironment.prepareValues(maxValue); - ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); - - VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); - - // initialize runnables - MultiThreadTestRunnable[] runnables = new MultiThreadTestRunnable[noThreads]; - for(int i = 0; i errors = new LinkedList<>(); - for(int i = 0; i parametrizedFastFuzz() { - return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, - new Object[] { 2, 3 }, new Object[] { 10, 100 }, new Object[] { 1, 2, 3 }, - new Object[] { false, true }); - } - - @ParameterizedTest(name = "Multithread {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Tag("fuzz") - @Tag("slow") - void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("RestoreS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, - commitFrequency, evilHash); - } - - static Stream parametrizedSlowFuzz() { - return FuzzTestUtils.changeStepCount(RestoreFuzzTest.parametrizedFastFuzz(), 1); - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/MultiThreadTestRunnable.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/MultiThreadTestRunnable.java deleted file mode 100644 index 922178c6..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/MultiThreadTestRunnable.java +++ /dev/null @@ -1,101 +0,0 @@ -package tools.refinery.data.map.tests.fuzz; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Random; - -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.internal.VersionedMapImpl; -import tools.refinery.data.map.tests.utils.MapTestEnvironment; - -public class MultiThreadTestRunnable implements Runnable { - String scenario; - VersionedMapStore store; - int steps; - int maxKey; - String[] values; - int seed; - int commitFrequency; - List errors = new LinkedList<>(); - - public MultiThreadTestRunnable(String scenario, VersionedMapStore store, int steps, - int maxKey, String[] values, int seed, int commitFrequency) { - super(); - this.scenario = scenario; - this.store = store; - this.steps = steps; - this.maxKey = maxKey; - this.values = values; - this.seed = seed; - this.commitFrequency = commitFrequency; - } - - private void logAndThrowError(String message) { - AssertionError error = new AssertionError(message); - errors.add(error); - } - - public List getErrors() { - return errors; - } - - @Override - public void run() { - // 1. build a map with versions - Random r = new Random(seed); - VersionedMapImpl versioned = (VersionedMapImpl) store.createMap(); - Map index2Version = new HashMap<>(); - - for (int i = 0; i < steps; i++) { - int index = i + 1; - int nextKey = r.nextInt(maxKey); - String nextValue = values[r.nextInt(values.length)]; - try { - versioned.put(nextKey, nextValue); - } catch (Exception exception) { - exception.printStackTrace(); - logAndThrowError(scenario + ":" + index + ": exception happened: " + exception); - } - if (index % commitFrequency == 0) { - long version = versioned.commit(); - index2Version.put(i, version); - } - MapTestEnvironment.printStatus(scenario, index, steps, "building"); - } - // 2. create a non-versioned - VersionedMapImpl reference = (VersionedMapImpl) store.createMap(); - r = new Random(seed); - Random r2 = new Random(seed+1); - - for (int i = 0; i < steps; i++) { - int index = i + 1; - int nextKey = r.nextInt(maxKey); - String nextValue = values[r.nextInt(values.length)]; - try { - reference.put(nextKey, nextValue); - } catch (Exception exception) { - exception.printStackTrace(); - logAndThrowError(scenario + ":" + index + ": exception happened: " + exception); - } - // go back to an existing state and compare to the reference - if (index % (commitFrequency) == 0) { - versioned.restore(index2Version.get(i)); - MapTestEnvironment.compareTwoMaps(scenario + ":" + index, reference, versioned,errors); - - // go back to a random state (probably created by another thread) - List states = new ArrayList<>(store.getStates()); - Collections.shuffle(states, r2); - for(Long state : states.subList(0, Math.min(states.size(), 100))) { - versioned.restore(state); - } - versioned.restore(index2Version.get(i)); - } - - MapTestEnvironment.printStatus(scenario, index, steps, "comparison"); - } - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/MutableFuzzTest.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/MutableFuzzTest.java deleted file mode 100644 index a16cb8f5..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/MutableFuzzTest.java +++ /dev/null @@ -1,92 +0,0 @@ -package tools.refinery.data.map.tests.fuzz; - -import static org.junit.jupiter.api.Assertions.fail; - -import java.util.Random; -import java.util.stream.Stream; - -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.VersionedMapStoreImpl; -import tools.refinery.data.map.internal.VersionedMapImpl; -import tools.refinery.data.map.tests.fuzz.utils.FuzzTestUtils; -import tools.refinery.data.map.tests.utils.MapTestEnvironment; - -class MutableFuzzTest { - private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, boolean evilHash) { - String[] values = MapTestEnvironment.prepareValues(maxValue); - ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); - - VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); - VersionedMapImpl sut = (VersionedMapImpl) store.createMap(); - MapTestEnvironment e = new MapTestEnvironment(sut); - - Random r = new Random(seed); - - iterativeRandomPuts(scenario, steps, maxKey, values, e, r); - } - - private void iterativeRandomPuts(String scenario, int steps, int maxKey, String[] values, - MapTestEnvironment e, Random r) { - int stopAt = -1; - for (int i = 0; i < steps; i++) { - int index = i + 1; - int nextKey = r.nextInt(maxKey); - String nextValue = values[r.nextInt(values.length)]; - if (index == stopAt) { - System.out.println("issue!"); - System.out.println("State before:"); - e.printComparison(); - e.sut.prettyPrint(); - System.out.println("Next: put(" + nextKey + "," + nextValue + ")"); - } - try { - e.put(nextKey, nextValue); - if (index == stopAt) { - e.sut.prettyPrint(); - } - e.checkEquivalence(scenario + ":" + index); - } catch (Exception exception) { - exception.printStackTrace(); - fail(scenario + ":" + index + ": exception happened: " + exception); - } - MapTestEnvironment.printStatus(scenario, index, steps, null); - } - } - - @ParameterizedTest(name = "Mutable {index}/{0} Steps={1} Keys={2} Values={3} seed={4} evil-hash={5}") - @MethodSource - @Timeout(value = 10) - @Tag("fuzz") - void parametrizedFuzz(int test, int steps, int noKeys, int noValues, int seed, boolean evilHash) { - runFuzzTest( - "MutableS" + steps + "K" + noKeys + "V" + noValues + "s" + seed + "H" + (evilHash ? "Evil" : "Normal"), - seed, steps, noKeys, noValues, evilHash); - } - - static Stream parametrizedFuzz() { - return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, - new Object[] { 3, 32, 32 * 32, 32 * 32 * 32 * 32 }, new Object[] { 2, 3 }, new Object[] { 1, 2, 3 }, - new Object[] { false, true }); - } - - @ParameterizedTest(name = "Mutable {index}/{0} Steps={1} Keys={2} Values={3} seed={4} evil-hash={5}") - @MethodSource - @Tag("fuzz") - @Tag("slow") - void parametrizedSlowFuzz(int test, int steps, int noKeys, int noValues, int seed, boolean evilHash) { - runFuzzTest( - "MutableS" + steps + "K" + noKeys + "V" + noValues + "s" + seed + "H" + (evilHash ? "Evil" : "Normal"), - seed, steps, noKeys, noValues, evilHash); - } - - static Stream parametrizedSlowFuzz() { - return FuzzTestUtils.changeStepCount(parametrizedFuzz(), 1); - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/MutableImmutableCompareFuzzTest.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/MutableImmutableCompareFuzzTest.java deleted file mode 100644 index 45308892..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/MutableImmutableCompareFuzzTest.java +++ /dev/null @@ -1,89 +0,0 @@ -package tools.refinery.data.map.tests.fuzz; - -import static org.junit.jupiter.api.Assertions.fail; - -import java.util.Random; -import java.util.stream.Stream; - -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.VersionedMapStoreImpl; -import tools.refinery.data.map.internal.VersionedMapImpl; -import tools.refinery.data.map.tests.fuzz.utils.FuzzTestUtils; -import tools.refinery.data.map.tests.utils.MapTestEnvironment; - -class MutableImmutableCompareFuzzTest { - private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, - boolean evilHash) { - String[] values = MapTestEnvironment.prepareValues(maxValue); - ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); - - VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); - VersionedMapImpl immutable = (VersionedMapImpl) store.createMap(); - VersionedMapImpl mutable = (VersionedMapImpl) store.createMap(); - - Random r = new Random(seed); - - iterativeRandomPutsAndCommitsAndCompare(scenario, immutable, mutable, steps, maxKey, values, r, - commitFrequency); - } - - private void iterativeRandomPutsAndCommitsAndCompare(String scenario, VersionedMapImpl immutable, - VersionedMapImpl mutable, int steps, int maxKey, String[] values, Random r, - int commitFrequency) { - for (int i = 0; i < steps; i++) { - int index = i + 1; - int nextKey = r.nextInt(maxKey); - String nextValue = values[r.nextInt(values.length)]; - try { - immutable.put(nextKey, nextValue); - mutable.put(nextKey, nextValue); - } catch (Exception exception) { - exception.printStackTrace(); - fail(scenario + ":" + index + ": exception happened: " + exception); - } - if (index % commitFrequency == 0) { - immutable.commit(); - } - MapTestEnvironment.compareTwoMaps(scenario + ":" + index, immutable, mutable); - - MapTestEnvironment.printStatus(scenario, index, steps, null); - } - } - - @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Timeout(value = 10) - @Tag("fuzz") - void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, - noKeys, noValues, commitFrequency, evilHash); - } - - static Stream parametrizedFastFuzz() { - return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, - new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, - new Object[] { false, true }); - } - - @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Tag("fuzz") - @Tag("slow") - void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, - noKeys, noValues, commitFrequency, evilHash); - } - - static Stream parametrizedSlowFuzz() { - return FuzzTestUtils.changeStepCount(MutableImmutableCompareFuzzTest.parametrizedFastFuzz(), 1); - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/RestoreFuzzTest.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/RestoreFuzzTest.java deleted file mode 100644 index 1b8b38c4..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/RestoreFuzzTest.java +++ /dev/null @@ -1,109 +0,0 @@ -package tools.refinery.data.map.tests.fuzz; - -import static org.junit.jupiter.api.Assertions.fail; - -import java.util.HashMap; -import java.util.Map; -import java.util.Random; -import java.util.stream.Stream; - -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.VersionedMapStoreImpl; -import tools.refinery.data.map.internal.VersionedMapImpl; -import tools.refinery.data.map.tests.fuzz.utils.FuzzTestUtils; -import tools.refinery.data.map.tests.utils.MapTestEnvironment; - -class RestoreFuzzTest { - private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, - boolean evilHash) { - String[] values = MapTestEnvironment.prepareValues(maxValue); - ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); - - VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); - - iterativeRandomPutsAndCommitsThenRestore(scenario, store, steps, maxKey, values, seed, commitFrequency); - } - - private void iterativeRandomPutsAndCommitsThenRestore(String scenario, VersionedMapStore store, - int steps, int maxKey, String[] values, int seed, int commitFrequency) { - // 1. build a map with versions - Random r = new Random(seed); - VersionedMapImpl versioned = (VersionedMapImpl) store.createMap(); - Map index2Version = new HashMap<>(); - - for (int i = 0; i < steps; i++) { - int index = i + 1; - int nextKey = r.nextInt(maxKey); - String nextValue = values[r.nextInt(values.length)]; - try { - versioned.put(nextKey, nextValue); - } catch (Exception exception) { - exception.printStackTrace(); - fail(scenario + ":" + index + ": exception happened: " + exception); - } - if (index % commitFrequency == 0) { - long version = versioned.commit(); - index2Version.put(i, version); - } - MapTestEnvironment.printStatus(scenario, index, steps, "building"); - } - // 2. create a non-versioned and - VersionedMapImpl reference = (VersionedMapImpl) store.createMap(); - r = new Random(seed); - - for (int i = 0; i < steps; i++) { - int index = i + 1; - int nextKey = r.nextInt(maxKey); - String nextValue = values[r.nextInt(values.length)]; - try { - reference.put(nextKey, nextValue); - } catch (Exception exception) { - exception.printStackTrace(); - fail(scenario + ":" + index + ": exception happened: " + exception); - } - if (index % commitFrequency == 0) { - versioned.restore(index2Version.get(i)); - MapTestEnvironment.compareTwoMaps(scenario + ":" + index, reference, versioned); - } - MapTestEnvironment.printStatus(scenario, index, steps, "comparison"); - } - - } - - @ParameterizedTest(name = "Restore {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Timeout(value = 10) - @Tag("smoke") - void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("RestoreS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, - commitFrequency, evilHash); - } - - static Stream parametrizedFastFuzz() { - return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, - new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, - new Object[] { false, true }); - } - - @ParameterizedTest(name = "Restore {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Tag("smoke") - @Tag("slow") - void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("RestoreS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, - commitFrequency, evilHash); - } - - static Stream parametrizedSlowFuzz() { - return FuzzTestUtils.changeStepCount(RestoreFuzzTest.parametrizedFastFuzz(), 1); - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/SharedStoreFuzzTest.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/SharedStoreFuzzTest.java deleted file mode 100644 index 1703a732..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/SharedStoreFuzzTest.java +++ /dev/null @@ -1,113 +0,0 @@ -package tools.refinery.data.map.tests.fuzz; - -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.stream.Stream; - -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.VersionedMapStore; -import tools.refinery.data.map.VersionedMapStoreImpl; -import tools.refinery.data.map.internal.VersionedMapImpl; -import tools.refinery.data.map.tests.fuzz.utils.FuzzTestUtils; -import tools.refinery.data.map.tests.utils.MapTestEnvironment; - -class SharedStoreFuzzTest { - private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, - boolean evilHash) { - String[] values = MapTestEnvironment.prepareValues(maxValue); - ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); - - List> stores = VersionedMapStoreImpl.createSharedVersionedMapStores(5, chp, values[0]); - - iterativeRandomPutsAndCommitsThenRestore(scenario, stores, steps, maxKey, values, seed, commitFrequency); - } - - private void iterativeRandomPutsAndCommitsThenRestore(String scenario, List> stores, - int steps, int maxKey, String[] values, int seed, int commitFrequency) { - // 1. maps with versions - Random r = new Random(seed); - List> versioneds = new LinkedList<>(); - for(VersionedMapStore store : stores) { - versioneds.add((VersionedMapImpl) store.createMap()); - } - - List> index2Version = new LinkedList<>(); - for(int i = 0; i()); - } - - for (int i = 0; i < steps; i++) { - int stepIndex = i + 1; - for (int storeIndex = 0; storeIndex> reference = new LinkedList<>(); - for(VersionedMapStore store : stores) { - reference.add((VersionedMapImpl) store.createMap()); - } - r = new Random(seed); - - for (int i = 0; i < steps; i++) { - int index = i + 1; - for (int storeIndex = 0; storeIndex parametrizedFastFuzz() { - return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, - new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, - new Object[] { false, true }); - } - - @ParameterizedTest(name = "Shared Store {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") - @MethodSource - @Tag("smoke") - @Tag("slow") - void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, - boolean evilHash) { - runFuzzTest("SharedS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, - commitFrequency, evilHash); - } - - static Stream parametrizedSlowFuzz() { - return FuzzTestUtils.changeStepCount(RestoreFuzzTest.parametrizedFastFuzz(), 1); - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/utils/FuzzTestUtils.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/utils/FuzzTestUtils.java deleted file mode 100644 index 23df4aef..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/utils/FuzzTestUtils.java +++ /dev/null @@ -1,64 +0,0 @@ -package tools.refinery.data.map.tests.fuzz.utils; - -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; -import java.util.stream.Stream; - -import org.junit.jupiter.params.provider.Arguments; - -public final class FuzzTestUtils { - public static final int FAST_STEP_COUNT = 500; - public static final int SLOW_STEP_COUNT = 32 * 32 * 32 * 32; - - private FuzzTestUtils() { - throw new IllegalStateException("This is a static utility class and should not be instantiated directly"); - } - - public static Stream changeStepCount(Stream arguments, int parameterIndex) { - return arguments.map(x -> Arguments.of(updatedStepCount(x.get(), parameterIndex))); - } - - public static Object[] updatedStepCount(Object[] arguments, int parameterIndex) { - Object[] copy = Arrays.copyOf(arguments, arguments.length); - copy[parameterIndex] = SLOW_STEP_COUNT; - return copy; - } - - static List> permutationInternal(int from, Object[]... valueOption) { - if (valueOption.length == from) { - return List.of(List.of()); - } else { - Object[] permuteThis = valueOption[from]; - List> otherCombination = permutationInternal(from + 1, valueOption); - List> result = new LinkedList<>(); - for (Object permuteThisElement : permuteThis) { - for (List otherCombinationList : otherCombination) { - List newResult = new LinkedList<>(); - newResult.add(permuteThisElement); - newResult.addAll(otherCombinationList); - result.add(newResult); - } - } - return result; - } - } - - public static Stream permutation(Object[]... valueOption) { - List> permutations = permutationInternal(0, valueOption); - return permutations.stream().map(x -> Arguments.of(x.toArray())); - } - - public static Stream permutationWithSize(Object[]... valueOption) { - int size = 1; - for (int i = 0; i < valueOption.length; i++) { - size *= valueOption[i].length; - } - Object[][] newValueOption = new Object[valueOption.length + 1][]; - newValueOption[0] = new Object[] { size }; - for (int i = 1; i < newValueOption.length; i++) { - newValueOption[i] = valueOption[i - 1]; - } - return permutation(newValueOption); - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/fuzz/utils/FuzzTestUtilsTest.java b/store/src/test/java/tools/refinery/data/map/tests/fuzz/utils/FuzzTestUtilsTest.java deleted file mode 100644 index abf8be3c..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/fuzz/utils/FuzzTestUtilsTest.java +++ /dev/null @@ -1,33 +0,0 @@ -package tools.refinery.data.map.tests.fuzz.utils; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import java.util.List; - -import org.junit.jupiter.api.Test; - -class FuzzTestUtilsTest { - @Test - void permutationInternalTest() { - List> res = FuzzTestUtils.permutationInternal(0, new Object[] { 1, 2, 3 }, - new Object[] { 'a', 'b', 'c' }, new Object[] { "alpha", "beta", "gamma", "delta" }); - assertEquals(3 * 3 * 4, res.size()); - } - - @Test - void permutationTest1() { - var res = FuzzTestUtils.permutation(new Object[] { 1, 2, 3 }, new Object[] { 'a', 'b', 'c' }, - new Object[] { "alpha", "beta", "gamma", "delta" }); - assertEquals(3 * 3 * 4, res.count()); - } - - @Test - void permutationTest2() { - var res = FuzzTestUtils.permutation(new Object[] { 1, 2, 3 }, new Object[] { 'a', 'b', 'c' }, - new Object[] { "alpha", "beta", "gamma", "delta" }); - var arguments = res.findFirst().get().get(); - assertEquals(1, arguments[0]); - assertEquals('a', arguments[1]); - assertEquals("alpha", arguments[2]); - } -} diff --git a/store/src/test/java/tools/refinery/data/map/tests/utils/MapTestEnvironment.java b/store/src/test/java/tools/refinery/data/map/tests/utils/MapTestEnvironment.java deleted file mode 100644 index e1cfc2e2..00000000 --- a/store/src/test/java/tools/refinery/data/map/tests/utils/MapTestEnvironment.java +++ /dev/null @@ -1,213 +0,0 @@ -package tools.refinery.data.map.tests.utils; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; - -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.TreeMap; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.map.Cursor; -import tools.refinery.data.map.VersionedMap; -import tools.refinery.data.map.internal.VersionedMapImpl; - -public class MapTestEnvironment { - public static String[] prepareValues(int maxValue) { - String[] values = new String[maxValue]; - values[0] = "DEFAULT"; - for (int i = 1; i < values.length; i++) { - values[i] = "VAL" + i; - } - return values; - } - - public static ContinousHashProvider prepareHashProvider(final boolean evil) { - // Use maxPrime = 2147483629 - - ContinousHashProvider chp = new ContinousHashProvider() { - - @Override - public int getHash(Integer key, int index) { - if (evil && index < 15 && index < key / 3) { - return 7; - } - int result = 1; - final int prime = 31; - - result = prime * result + key; - result = prime * result + index; - - return result; - } - }; - return chp; - } - - public static void printStatus(String scenario, int actual, int max, String stepName) { - if (actual % 10000 == 0) { - String printStepName = stepName == null ? "" : stepName; - System.out.format(scenario + ":%d/%d (%d%%) " + printStepName + "%n", actual, max, actual * 100 / max); - } - - } - - public static void compareTwoMaps(String title, VersionedMapImpl map1, - VersionedMapImpl map2) { - compareTwoMaps(title, map1, map2, null); - } - public static void compareTwoMaps(String title, VersionedMapImpl map1, - VersionedMapImpl map2, List errors) { - // 1. Comparing cursors. - Cursor cursor1 = map1.getAll(); - Cursor cursor2 = map2.getAll(); - while (!cursor1.isTerminated()) { - if (cursor2.isTerminated()) { - fail("cursor 2 terminated before cursor1"); - } - assertEqualsList(cursor1.getKey(), cursor2.getKey(),"Keys not equal", errors); - assertEqualsList(cursor2.getValue(), cursor2.getValue(), "Values not equal", errors); - cursor1.move(); - cursor2.move(); - } - if (!cursor2.isTerminated()) - fail("cursor 1 terminated before cursor 2"); - - // 2.1. comparing hash codes - assertEqualsList(map1.hashCode(), map2.hashCode(), title + ": hash code check",errors); - assertEqualsList(map1, map2, title + ": 1.equals(2)",errors); - assertEqualsList(map2, map1, title + ": 2.equals(1)",errors); - } - private static void assertEqualsList(Object o1, Object o2, String message, List errors) { - if(errors == null) { - assertEquals(o1, o2, message); - } else { - if(o1 != null) { - if(!(o1.equals(o2))) { - AssertionError error = new AssertionError((message != null ? message+" " : "") + "expected: " + o1 + " but was : " + o2); - errors.add(error); - } - } - } - } - - public VersionedMapImpl sut; - Map oracle = new HashMap(); - - public MapTestEnvironment(VersionedMapImpl sut) { - this.sut = sut; - } - - public void put(K key, V value) { - V oldSutValue = sut.put(key, value); - V oldOracleValue; - if (value != sut.getDefaultValue()) { - oldOracleValue = oracle.put(key, value); - } else { - oldOracleValue = oracle.remove(key); - } - if(oldSutValue == sut.getDefaultValue() && oldOracleValue != null) { - fail("After put, SUT old value was default, but oracle old walue was " + oldOracleValue); - } - if(oldSutValue != sut.getDefaultValue()) { - assertEquals(oldOracleValue, oldSutValue); - } - } - - public void checkEquivalence(String title) { - // 0. Checking integrity - try { - sut.checkIntegrity(); - } catch (IllegalStateException e) { - fail(title + ": " + e.getMessage()); - } - - // 1. Checking: if Reference contains pair, then SUT contains - // pair. - // Tests get functions - for (Entry entry : oracle.entrySet()) { - V sutValue = sut.get(entry.getKey()); - V oracleValue = entry.getValue(); - if (sutValue != oracleValue) { - printComparison(); - fail(title + ": Non-equivalent get(" + entry.getKey() + ") results: SUT=" + sutValue + ", Oracle=" - + oracleValue + "!"); - } - } - - // 2. Checking: if SUT contains pair, then Reference contains - // pair. - // Tests iterators - int elementsInSutEntrySet = 0; - Cursor cursor = sut.getAll(); - while (cursor.move()) { - elementsInSutEntrySet++; - K key = cursor.getKey(); - V sutValue = cursor.getValue(); - // System.out.println(key + " -> " + sutValue); - V oracleValue = oracle.get(key); - if (sutValue != oracleValue) { - printComparison(); - fail(title + ": Non-equivalent entry in iterator: SUT=<" + key + "," + sutValue + ">, Oracle=<" + key - + "," + oracleValue + ">!"); - } - - } - - // 3. Checking sizes - // Counting of non-default value pairs. - int oracleSize = oracle.entrySet().size(); - long sutSize = sut.getSize(); - if (oracleSize != sutSize || oracleSize != elementsInSutEntrySet) { - printComparison(); - fail(title + ": Non-eqivalent size() result: SUT.getSize()=" + sutSize + ", SUT.entryset.size=" - + elementsInSutEntrySet + ", Oracle=" + oracleSize + "!"); - } - } - - public static void checkOrder(String scenario, VersionedMap versionedMap) { - K previous = null; - Cursor cursor = versionedMap.getAll(); - while(cursor.move()) { - System.out.println(cursor.getKey() + " " + ((VersionedMapImpl) versionedMap).getHashProvider().getHash(cursor.getKey(), 0)); - if(previous != null) { - int comparisonResult = ((VersionedMapImpl) versionedMap).getHashProvider().compare(previous, cursor.getKey()); - assertTrue(comparisonResult<0,scenario+" Cursor order is not incremental!"); - } - previous = cursor.getKey(); - } - System.out.println(); - } - - public void printComparison() { - System.out.println("SUT:"); - printEntrySet(sut.getAll()); - System.out.println("Oracle:"); - printEntrySet(oracle.entrySet().iterator()); - } - - private void printEntrySet(Iterator> iterator) { - TreeMap treemap = new TreeMap<>(); - while (iterator.hasNext()) { - Entry entry = iterator.next(); - treemap.put(entry.getKey(), entry.getValue()); - } - for (Entry e : treemap.entrySet()) { - System.out.println("\t" + e.getKey() + " -> " + e.getValue()); - } - } - - private void printEntrySet(Cursor cursor) { - TreeMap treemap = new TreeMap<>(); - while (cursor.move()) { - treemap.put(cursor.getKey(), cursor.getValue()); - } - for (Entry e : treemap.entrySet()) { - System.out.println("\t" + e.getKey() + " -> " + e.getValue()); - } - } -} diff --git a/store/src/test/java/tools/refinery/data/model/hashTests/HashEfficiencyTest.java b/store/src/test/java/tools/refinery/data/model/hashTests/HashEfficiencyTest.java deleted file mode 100644 index f3479846..00000000 --- a/store/src/test/java/tools/refinery/data/model/hashTests/HashEfficiencyTest.java +++ /dev/null @@ -1,161 +0,0 @@ -package tools.refinery.data.model.hashTests; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import java.util.ArrayList; -import java.util.LinkedList; -import java.util.List; -import java.util.Random; - -import org.junit.jupiter.api.Test; - -import tools.refinery.data.map.ContinousHashProvider; -import tools.refinery.data.model.Tuple; -import tools.refinery.data.model.TupleHashProvider; -import tools.refinery.data.model.TupleHashProviderBitMagic; - -class HashEfficiencyTest { - - private static List permutations(int range, int arity) { - if(arity == 1) { - List result = new ArrayList<>(range); - for(int i=0; i 1) { - List smallers = permutations(range, arity-1); - List result = new ArrayList<>(range*smallers.size()); - for(Tuple smaller : smallers) { - for(int i=0; i nPermutations(int arity, int n) { - int range = amountToRange(arity, n); - List permutations = permutations(range, arity); - return permutations.subList(0, n); - } - - public static List nRandoms(int arity, int n, int seed) { - int range = amountToRange(arity, n); - List permutations = new ArrayList<>(n); - Random r = new Random(seed); - for(int i = 0; i p = permutations(10, 2); - assertEquals(p.size(),10*10); - } -// private void printTuples(List p) { -// for(Tuple element : p) { -// System.out.println(element); -// } -// } - @Test - void nPermutationTest() { - final int amount = 500; - List p = nPermutations(2, amount); - assertEquals(amount,p.size()); - } - @Test - void nRandomTest() { - final int amount = 500; - List p = nRandoms(2, amount, 1);; - assertEquals(amount,p.size()); - } - private static double calculateHashClashes(List tuples, ContinousHashProvider chp) { - int sumClashes = 0; - - for(int i = 0; i chp, Tuple a, Tuple b) { - if(a.equals(b)) return 0; - final int bits = 5; - final int segments = Integer.SIZE/bits; - final int mask = (1<>(depth*5))&mask; - int bHash = (chp.getHash(b, index)>>(depth*5))&mask; - if(aHash != bHash) { - return i+1; - } - if(i>400) { - throw new IllegalStateException(a+" vs "+b); - } - } - } - private static double caclulateOptimalHashClash(int size) { - return (Math.log(size)/Math.log(32)); - } - public static void main(String[] args) { - List hashNames = new LinkedList<>(); - List> hashes = new LinkedList<>(); - hashNames.add("PrimeGroup"); - hashes.add(new TupleHashProvider()); - hashNames.add("BitMagic"); - hashes.add(new TupleHashProviderBitMagic()); - - int[] arities = new int[] {2,3,4,5}; - int[] sizes = new int[] {32*32,32*32*8}; - - System.out.println("Size,Arity,DataSource,Hash,Chashes,Optimal,Badness"); - for(int size : sizes) { - double optimalClashes = caclulateOptimalHashClash(size); - for(int arity : arities) { - List dataSourceNames = new LinkedList<>(); - List> dataSources = new LinkedList<>(); - -// dataSourceNames.add("Permutation"); -// dataSources.add(nPermutations(arity, size)); - dataSourceNames.add("Random"); - dataSources.add(nRandoms(arity, size, 0)); - - for(int dataSourceIndex = 0; dataSourceIndex person = new Relation<>("Person", 1, false); - Relation friend = new Relation<>("friend", 2, false); - - ModelStore store = new ModelStoreImpl(Set.of(person, friend)); - Model model = store.createModel(); - - assertTrue(store.getDataRepresentations().contains(person)); - assertTrue(store.getDataRepresentations().contains(friend)); - assertTrue(model.getDataRepresentations().contains(person)); - assertTrue(model.getDataRepresentations().contains(friend)); - - Relation other = new Relation("other", 2, null); - assertFalse(model.getDataRepresentations().contains(other)); - } - - @Test - void modelBuildingTest() { - Relation person = new Relation<>("Person", 1, false); - Relation age = new Relation("age", 1, null); - Relation friend = new Relation<>("friend", 2, false); - - ModelStore store = new ModelStoreImpl(Set.of(person, age, friend)); - Model model = store.createModel(); - - model.put(person, Tuple.of(0), true); - model.put(person, Tuple.of(1), true); - model.put(age, Tuple.of(0), 3); - model.put(age, Tuple.of(1), 1); - model.put(friend, Tuple.of(0, 1), true); - model.put(friend, Tuple.of(1, 0), true); - - assertTrue(model.get(person, Tuple.of(0))); - assertTrue(model.get(person, Tuple.of(1))); - assertFalse(model.get(person, Tuple.of(2))); - - assertEquals(3, model.get(age, Tuple.of(0))); - assertEquals(1, model.get(age, Tuple.of(1))); - assertEquals(null, model.get(age, Tuple.of(2))); - - assertTrue(model.get(friend, Tuple.of(0, 1))); - assertFalse(model.get(friend, Tuple.of(0, 5))); - } - - @Test - void modelBuildingArityFailTest() { - Relation person = new Relation<>("Person", 1, false); - ModelStore store = new ModelStoreImpl(Set.of(person)); - Model model = store.createModel(); - - final Tuple tuple3 = Tuple.of(1, 1, 1); - Assertions.assertThrows(IllegalArgumentException.class, () -> model.put(person, tuple3, true)); - Assertions.assertThrows(IllegalArgumentException.class, () -> model.get(person, tuple3)); - } - - @Test - void modelBuildingNullFailTest() { - Relation age = new Relation("age", 1, null); - ModelStore store = new ModelStoreImpl(Set.of(age)); - Model model = store.createModel(); - - model.put(age, Tuple.of(1), null); // valid - Assertions.assertThrows(IllegalArgumentException.class, () -> model.put(age, null, 1)); - Assertions.assertThrows(IllegalArgumentException.class, () -> model.get(age, null)); - - } - - @Test - void modelUpdateTest() { - Relation person = new Relation<>("Person", 1, false); - Relation age = new Relation("age", 1, null); - Relation friend = new Relation<>("friend", 2, false); - - ModelStore store = new ModelStoreImpl(Set.of(person, age, friend)); - Model model = store.createModel(); - - model.put(person, Tuple.of(0), true); - model.put(person, Tuple.of(1), true); - model.put(age, Tuple.of(0), 3); - model.put(age, Tuple.of(1), 1); - model.put(friend, Tuple.of(0, 1), true); - model.put(friend, Tuple.of(1, 0), true); - - assertEquals(3, model.get(age, Tuple.of(0))); - assertTrue(model.get(friend, Tuple.of(0, 1))); - - model.put(age, Tuple.of(0), 4); - model.put(friend, Tuple.of(0, 1), false); - - assertEquals(4, model.get(age, Tuple.of(0))); - assertFalse(model.get(friend, Tuple.of(0, 1))); - } - - @Test - void restoreTest() { - Relation person = new Relation("Person", 1, false); - Relation friend = new Relation("friend", 2, false); - - ModelStore store = new ModelStoreImpl(Set.of(person, friend)); - Model model = store.createModel(); - - model.put(person, Tuple.of(0), true); - model.put(person, Tuple.of(1), true); - model.put(friend, Tuple.of(0, 1), true); - model.put(friend, Tuple.of(1, 0), true); - long state1 = model.commit(); - - assertFalse(model.get(person, Tuple.of(2))); - assertFalse(model.get(friend, Tuple.of(0, 2))); - - model.put(person, Tuple.of(2), true); - model.put(friend, Tuple.of(0, 2), true); - long state2 = model.commit(); - - assertTrue(model.get(person, Tuple.of(2))); - assertTrue(model.get(friend, Tuple.of(0, 2))); - - model.restore(state1); - - assertFalse(model.get(person, Tuple.of(2))); - assertFalse(model.get(friend, Tuple.of(0, 2))); - - model.restore(state2); - - assertTrue(model.get(person, Tuple.of(2))); - assertTrue(model.get(friend, Tuple.of(0, 2))); - } -} diff --git a/store/src/test/java/tools/refinery/data/query/test/QueryTest.java b/store/src/test/java/tools/refinery/data/query/test/QueryTest.java deleted file mode 100644 index faf3f111..00000000 --- a/store/src/test/java/tools/refinery/data/query/test/QueryTest.java +++ /dev/null @@ -1,90 +0,0 @@ -package tools.refinery.data.query.test; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.util.Collection; -import java.util.Set; - -import org.eclipse.viatra.query.runtime.api.AdvancedViatraQueryEngine; -import org.eclipse.viatra.query.runtime.api.GenericPatternMatch; -import org.eclipse.viatra.query.runtime.api.GenericPatternMatcher; -import org.eclipse.viatra.query.runtime.api.GenericQuerySpecification; -import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine; -import org.junit.jupiter.api.Test; - -import tools.refinery.data.model.Model; -import tools.refinery.data.model.ModelStore; -import tools.refinery.data.model.ModelStoreImpl; -import tools.refinery.data.model.Tuple; -import tools.refinery.data.model.representation.Relation; -import tools.refinery.data.model.representation.TruthValue; -import tools.refinery.data.query.RelationalScope; -import tools.refinery.data.query.internal.PredicateTranslator; -import tools.refinery.data.query.view.FilteredRelationView; -import tools.refinery.data.query.view.FunctionalRelationView; -import tools.refinery.data.query.view.KeyOnlyRelationView; -import tools.refinery.data.query.view.RelationView; - -class QueryTest { - @Test - void minimalTest() { - Relation person = new Relation<>("Person", 1, false); - - RelationView persionView = new KeyOnlyRelationView(person); - GenericQuerySpecification personQuery = (new PredicateTranslator("PersonQuery")) - .addParameter("p", persionView).addConstraint(persionView, "p").build(); - - ModelStore store = new ModelStoreImpl(Set.of(person)); - Model model = store.createModel(); - - model.put(person, Tuple.of(0), true); - model.put(person, Tuple.of(1), true); - - RelationalScope scope = new RelationalScope(model, Set.of(persionView)); - - ViatraQueryEngine engine = AdvancedViatraQueryEngine.on(scope); - GenericPatternMatcher personMatcher = engine.getMatcher(personQuery); - - assertEquals(2, personMatcher.countMatches()); - } - - void modelBuildingTest() { - Relation person = new Relation<>("Person", 1, false); - Relation age = new Relation("age", 1, null); - Relation friend = new Relation<>("friend", 2, TruthValue.FALSE); - - ModelStore store = new ModelStoreImpl(Set.of(person, age, friend)); - Model model = store.createModel(); - - model.put(person, Tuple.of(0), true); - model.put(person, Tuple.of(1), true); - model.put(age, Tuple.of(0), 3); - model.put(age, Tuple.of(1), 1); - model.put(friend, Tuple.of(0, 1), TruthValue.TRUE); - model.put(friend, Tuple.of(1, 0), TruthValue.UNKNOWN); - - // Sanity check - assertTrue(model.get(person, Tuple.of(0))); - assertTrue(model.get(person, Tuple.of(1))); - assertFalse(model.get(person, Tuple.of(2))); - - RelationView persionView = new KeyOnlyRelationView(person); - RelationView ageView = new FunctionalRelationView<>(age); - RelationView friendMustView = new FilteredRelationView(friend, (k, v) -> v.must()); - RelationView friendMayView = new FilteredRelationView(friend, (k, v) -> v.may()); - - RelationalScope scope = new RelationalScope(model, Set.of(persionView, ageView, friendMustView, friendMayView)); - - GenericQuerySpecification personQuery = (new PredicateTranslator("PersonQuery")) - .addParameter("p", persionView).addConstraint(persionView, "p").build(); - - ViatraQueryEngine engine = AdvancedViatraQueryEngine.on(scope); - GenericPatternMatcher personMatcher = engine.getMatcher(personQuery); - Collection personMatches = personMatcher.getAllMatches(); - for (GenericPatternMatch personMatch : personMatches) { - System.out.println(personMatch); - } - } -} \ No newline at end of file diff --git a/store/src/test/java/tools/refinery/data/util/CollectionsUtilTests.java b/store/src/test/java/tools/refinery/data/util/CollectionsUtilTests.java deleted file mode 100644 index 39ff4aca..00000000 --- a/store/src/test/java/tools/refinery/data/util/CollectionsUtilTests.java +++ /dev/null @@ -1,78 +0,0 @@ -package tools.refinery.data.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static tools.refinery.data.util.CollectionsUtil.filter; -import static tools.refinery.data.util.CollectionsUtil.map; - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class CollectionsUtilTests { - List list10 = List.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); - List listTen = List.of("1", "2", "3", "4", "5", "6", "7", "8", "9", "10"); - - private static void compare(Iterable a, Iterable b) { - List listA = toList(a); - List listB = toList(b); - assertEquals(listA, listB); - } - - private static List toList(Iterable a) { - List result = new ArrayList(); - Iterator iterator = a.iterator(); - while (iterator.hasNext()) { - result.add(iterator.next()); - } - return result; - } - - @Test - void testFilterEven() { - compare(List.of(2, 4, 6, 8, 10), filter(list10, (x -> x % 2 == 0))); - } - - @Test - void testFilterOdd() { - compare(List.of(1, 3, 5, 7, 9), filter(list10, (x -> x % 2 == 1))); - } - - @Test - void testFilterFalse() { - compare(List.of(), filter(list10, (x -> false))); - } - - @Test - void testFilterTrue() { - compare(list10, filter(list10, (x -> true))); - } - - @Test - void testFilterEmpty() { - compare(List.of(), filter(List.of(), (x -> true))); - } - - @Test() - void testNoSuchElement() { - Iterable iterable = filter(list10, (x -> x % 2 == 0)); - Iterator iterator = iterable.iterator(); - while (iterator.hasNext()) { - iterator.next(); - } - Assertions.assertThrows(NoSuchElementException.class, () -> iterator.next()); - } - - @Test() - void mapTest() { - compare(listTen, map(list10, x -> x.toString())); - } - - @Test() - void mapEmtyTest() { - compare(List.of(), map(List.of(), x -> x.toString())); - } -} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/CommitFuzzTest.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/CommitFuzzTest.java new file mode 100644 index 00000000..1f9d022f --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/CommitFuzzTest.java @@ -0,0 +1,96 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class CommitFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + VersionedMapImpl sut = (VersionedMapImpl) store.createMap(); + MapTestEnvironment e = new MapTestEnvironment(sut); + + Random r = new Random(seed); + + iterativeRandomPutsAndCommits(scenario, steps, maxKey, values, e, r, commitFrequency); + } + + private void iterativeRandomPutsAndCommits(String scenario, int steps, int maxKey, String[] values, + MapTestEnvironment e, Random r, int commitFrequency) { + int stopAt = -1; + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + if (index == stopAt) { + System.out.println("issue!"); + System.out.println("State before:"); + e.printComparison(); + e.sut.prettyPrint(); + System.out.println("Next: put(" + nextKey + "," + nextValue + ")"); + } + try { + e.put(nextKey, nextValue); + if (index == stopAt) { + e.sut.prettyPrint(); + } + e.checkEquivalence(scenario + ":" + index); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + MapTestEnvironment.printStatus(scenario, index, steps, null); + if (index % commitFrequency == 0) { + e.sut.commit(); + } + } + } + + @ParameterizedTest(name = "Commit {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Timeout(value = 10) + @Tag("fuzz") + void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("CommitS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Commit {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("CommitS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(parametrizedFastFuzz(), 1); + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/ContentEqualsFuzzTest.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/ContentEqualsFuzzTest.java new file mode 100644 index 00000000..263cb2cd --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/ContentEqualsFuzzTest.java @@ -0,0 +1,143 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.AbstractMap.SimpleEntry; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.VersionedMap; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class ContentEqualsFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + Random r = new Random(seed); + + iterativeRandomPutsAndCommitsThenCompare(scenario, chp, steps, maxKey, values, r, commitFrequency); + } + + private void iterativeRandomPutsAndCommitsThenCompare(String scenario, ContinousHashProvider chp, int steps, int maxKey, String[] values, Random r, int commitFrequency) { + + VersionedMapStore store1 = new VersionedMapStoreImpl(chp, values[0]); + VersionedMap sut1 = store1.createMap(); + + // Fill one map + for (int i = 0; i < steps; i++) { + int index1 = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + sut1.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index1 + ": exception happened: " + exception); + } + MapTestEnvironment.printStatus(scenario, index1, steps, "Fill"); + if (index1 % commitFrequency == 0) { + sut1.commit(); + } + } + + // Get the content of the first map + List> content = new LinkedList<>(); + Cursor cursor = sut1.getAll(); + while (cursor.move()) { + content.add(new SimpleEntry<>(cursor.getKey(), cursor.getValue())); + } + + // Randomize the order of the content + Collections.shuffle(content, r); + + VersionedMapStore store2 = new VersionedMapStoreImpl(chp, values[0]); + VersionedMap sut2 = store2.createMap(); + int index2 = 1; + for (SimpleEntry entry : content) { + sut2.put(entry.getKey(), entry.getValue()); + if(index2++%commitFrequency == 0) + sut2.commit(); + } + + // Check the integrity of the maps + ((VersionedMapImpl) sut1).checkIntegrity(); + ((VersionedMapImpl) sut2).checkIntegrity(); + +// // Compare the two maps + // By size + assertEquals(sut1.getSize(), content.size()); + assertEquals(sut2.getSize(), content.size()); + + + + // By cursors + Cursor cursor1 = sut1.getAll(); + Cursor cursor2 = sut2.getAll(); + int index3 = 1; + boolean canMove = true; + do{ + boolean canMove1 = cursor1.move(); + boolean canMove2 = cursor2.move(); + assertEquals(canMove1, canMove2, scenario + ":" + index3 +" Cursors stopped at different times!"); + assertEquals(cursor1.getKey(), cursor2.getKey(), scenario + ":" + index3 +" Cursors have different keys!"); + assertEquals(cursor1.getValue(), cursor2.getValue(), scenario + ":" + index3 +" Cursors have different values!"); + + canMove = canMove1; + MapTestEnvironment.printStatus(scenario, index3++, content.size(), "Compare"); + } while (canMove); + + // By hashcode + assertEquals(sut1.hashCode(), sut2.hashCode(), "Hash codes are not equal!"); + + // By equals + assertEquals(sut1, sut2, "Maps are not equals"); + } + + @ParameterizedTest(name = "Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Timeout(value = 10) + @Tag("fuzz") + void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("CompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("CompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(parametrizedFastFuzz(), 1); + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/DiffCursorFuzzTest.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/DiffCursorFuzzTest.java new file mode 100644 index 00000000..e6334224 --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/DiffCursorFuzzTest.java @@ -0,0 +1,117 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.DiffCursor; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class DiffCursorFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + iterativeRandomPutsAndCommitsThenDiffcursor(scenario, store, steps, maxKey, values, seed, commitFrequency); + } + + private void iterativeRandomPutsAndCommitsThenDiffcursor(String scenario, VersionedMapStore store, + int steps, int maxKey, String[] values, int seed, int commitFrequency) { + // 1. build a map with versions + Random r = new Random(seed); + VersionedMapImpl versioned = (VersionedMapImpl) store.createMap(); + int largestCommit = -1; + + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + versioned.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + long version = versioned.commit(); + largestCommit = (int) version; + } + if (index % 10000 == 0) + System.out.println(scenario + ":" + index + "/" + steps + " building finished"); + } + // 2. create a non-versioned map, + VersionedMapImpl moving = (VersionedMapImpl) store.createMap(); + Random r2 = new Random(seed + 1); + + final int diffTravelFrequency = commitFrequency * 2; + for (int i = 0; i < steps; i++) { + int index = i + 1; + if (index % diffTravelFrequency == 0) { + // difftravel + long travelToVersion = r2.nextInt(largestCommit + 1); + DiffCursor diffCursor = moving.getDiffCursor(travelToVersion); + moving.putAll(diffCursor); + + } else { + // random puts + int nextKey = r2.nextInt(maxKey); + String nextValue = values[r2.nextInt(values.length)]; + try { + moving.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + versioned.commit(); + } + if (index % 10000 == 0) + System.out.println(scenario + ":" + index + "/" + steps + " building finished"); + } + } + + } + + @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Timeout(value = 10) + @Tag("fuzz") + void parametrizedFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, + noKeys, noValues, commitFrequency, evilHash); + } + + static Stream parametrizedFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(parametrizedFuzz(), 1); + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadFuzzTest.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadFuzzTest.java new file mode 100644 index 00000000..1ab431a8 --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadFuzzTest.java @@ -0,0 +1,97 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class MultiThreadFuzzTest { + public static final int noThreads = 32; + + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + + // initialize runnables + MultiThreadTestRunnable[] runnables = new MultiThreadTestRunnable[noThreads]; + for(int i = 0; i errors = new LinkedList<>(); + for(int i = 0; i parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Multithread {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("RestoreS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(RestoreFuzzTest.parametrizedFastFuzz(), 1); + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadTestRunnable.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadTestRunnable.java new file mode 100644 index 00000000..f77f9ee5 --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/MultiThreadTestRunnable.java @@ -0,0 +1,101 @@ +package tools.refinery.store.map.tests.fuzz; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Random; + +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +public class MultiThreadTestRunnable implements Runnable { + String scenario; + VersionedMapStore store; + int steps; + int maxKey; + String[] values; + int seed; + int commitFrequency; + List errors = new LinkedList<>(); + + public MultiThreadTestRunnable(String scenario, VersionedMapStore store, int steps, + int maxKey, String[] values, int seed, int commitFrequency) { + super(); + this.scenario = scenario; + this.store = store; + this.steps = steps; + this.maxKey = maxKey; + this.values = values; + this.seed = seed; + this.commitFrequency = commitFrequency; + } + + private void logAndThrowError(String message) { + AssertionError error = new AssertionError(message); + errors.add(error); + } + + public List getErrors() { + return errors; + } + + @Override + public void run() { + // 1. build a map with versions + Random r = new Random(seed); + VersionedMapImpl versioned = (VersionedMapImpl) store.createMap(); + Map index2Version = new HashMap<>(); + + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + versioned.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + logAndThrowError(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + long version = versioned.commit(); + index2Version.put(i, version); + } + MapTestEnvironment.printStatus(scenario, index, steps, "building"); + } + // 2. create a non-versioned + VersionedMapImpl reference = (VersionedMapImpl) store.createMap(); + r = new Random(seed); + Random r2 = new Random(seed+1); + + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + reference.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + logAndThrowError(scenario + ":" + index + ": exception happened: " + exception); + } + // go back to an existing state and compare to the reference + if (index % (commitFrequency) == 0) { + versioned.restore(index2Version.get(i)); + MapTestEnvironment.compareTwoMaps(scenario + ":" + index, reference, versioned,errors); + + // go back to a random state (probably created by another thread) + List states = new ArrayList<>(store.getStates()); + Collections.shuffle(states, r2); + for(Long state : states.subList(0, Math.min(states.size(), 100))) { + versioned.restore(state); + } + versioned.restore(index2Version.get(i)); + } + + MapTestEnvironment.printStatus(scenario, index, steps, "comparison"); + } + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableFuzzTest.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableFuzzTest.java new file mode 100644 index 00000000..d40c49c4 --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableFuzzTest.java @@ -0,0 +1,92 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class MutableFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + VersionedMapImpl sut = (VersionedMapImpl) store.createMap(); + MapTestEnvironment e = new MapTestEnvironment(sut); + + Random r = new Random(seed); + + iterativeRandomPuts(scenario, steps, maxKey, values, e, r); + } + + private void iterativeRandomPuts(String scenario, int steps, int maxKey, String[] values, + MapTestEnvironment e, Random r) { + int stopAt = -1; + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + if (index == stopAt) { + System.out.println("issue!"); + System.out.println("State before:"); + e.printComparison(); + e.sut.prettyPrint(); + System.out.println("Next: put(" + nextKey + "," + nextValue + ")"); + } + try { + e.put(nextKey, nextValue); + if (index == stopAt) { + e.sut.prettyPrint(); + } + e.checkEquivalence(scenario + ":" + index); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + MapTestEnvironment.printStatus(scenario, index, steps, null); + } + } + + @ParameterizedTest(name = "Mutable {index}/{0} Steps={1} Keys={2} Values={3} seed={4} evil-hash={5}") + @MethodSource + @Timeout(value = 10) + @Tag("fuzz") + void parametrizedFuzz(int test, int steps, int noKeys, int noValues, int seed, boolean evilHash) { + runFuzzTest( + "MutableS" + steps + "K" + noKeys + "V" + noValues + "s" + seed + "H" + (evilHash ? "Evil" : "Normal"), + seed, steps, noKeys, noValues, evilHash); + } + + static Stream parametrizedFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, + new Object[] { 3, 32, 32 * 32, 32 * 32 * 32 * 32 }, new Object[] { 2, 3 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Mutable {index}/{0} Steps={1} Keys={2} Values={3} seed={4} evil-hash={5}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int test, int steps, int noKeys, int noValues, int seed, boolean evilHash) { + runFuzzTest( + "MutableS" + steps + "K" + noKeys + "V" + noValues + "s" + seed + "H" + (evilHash ? "Evil" : "Normal"), + seed, steps, noKeys, noValues, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(parametrizedFuzz(), 1); + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableImmutableCompareFuzzTest.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableImmutableCompareFuzzTest.java new file mode 100644 index 00000000..410705a2 --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/MutableImmutableCompareFuzzTest.java @@ -0,0 +1,89 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class MutableImmutableCompareFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + VersionedMapImpl immutable = (VersionedMapImpl) store.createMap(); + VersionedMapImpl mutable = (VersionedMapImpl) store.createMap(); + + Random r = new Random(seed); + + iterativeRandomPutsAndCommitsAndCompare(scenario, immutable, mutable, steps, maxKey, values, r, + commitFrequency); + } + + private void iterativeRandomPutsAndCommitsAndCompare(String scenario, VersionedMapImpl immutable, + VersionedMapImpl mutable, int steps, int maxKey, String[] values, Random r, + int commitFrequency) { + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + immutable.put(nextKey, nextValue); + mutable.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + immutable.commit(); + } + MapTestEnvironment.compareTwoMaps(scenario + ":" + index, immutable, mutable); + + MapTestEnvironment.printStatus(scenario, index, steps, null); + } + } + + @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Timeout(value = 10) + @Tag("fuzz") + void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, + noKeys, noValues, commitFrequency, evilHash); + } + + static Stream parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Mutable-Immutable Compare {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("fuzz") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("MutableImmutableCompareS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, + noKeys, noValues, commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(MutableImmutableCompareFuzzTest.parametrizedFastFuzz(), 1); + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/RestoreFuzzTest.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/RestoreFuzzTest.java new file mode 100644 index 00000000..2e29a03f --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/RestoreFuzzTest.java @@ -0,0 +1,109 @@ +package tools.refinery.store.map.tests.fuzz; + +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.HashMap; +import java.util.Map; +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class RestoreFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + VersionedMapStore store = new VersionedMapStoreImpl(chp, values[0]); + + iterativeRandomPutsAndCommitsThenRestore(scenario, store, steps, maxKey, values, seed, commitFrequency); + } + + private void iterativeRandomPutsAndCommitsThenRestore(String scenario, VersionedMapStore store, + int steps, int maxKey, String[] values, int seed, int commitFrequency) { + // 1. build a map with versions + Random r = new Random(seed); + VersionedMapImpl versioned = (VersionedMapImpl) store.createMap(); + Map index2Version = new HashMap<>(); + + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + versioned.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + long version = versioned.commit(); + index2Version.put(i, version); + } + MapTestEnvironment.printStatus(scenario, index, steps, "building"); + } + // 2. create a non-versioned and + VersionedMapImpl reference = (VersionedMapImpl) store.createMap(); + r = new Random(seed); + + for (int i = 0; i < steps; i++) { + int index = i + 1; + int nextKey = r.nextInt(maxKey); + String nextValue = values[r.nextInt(values.length)]; + try { + reference.put(nextKey, nextValue); + } catch (Exception exception) { + exception.printStackTrace(); + fail(scenario + ":" + index + ": exception happened: " + exception); + } + if (index % commitFrequency == 0) { + versioned.restore(index2Version.get(i)); + MapTestEnvironment.compareTwoMaps(scenario + ":" + index, reference, versioned); + } + MapTestEnvironment.printStatus(scenario, index, steps, "comparison"); + } + + } + + @ParameterizedTest(name = "Restore {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Timeout(value = 10) + @Tag("smoke") + void parametrizedFastFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("RestoreS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Restore {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("smoke") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("RestoreS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(RestoreFuzzTest.parametrizedFastFuzz(), 1); + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/SharedStoreFuzzTest.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/SharedStoreFuzzTest.java new file mode 100644 index 00000000..914a0f63 --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/SharedStoreFuzzTest.java @@ -0,0 +1,113 @@ +package tools.refinery.store.map.tests.fuzz; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.VersionedMapStore; +import tools.refinery.store.map.VersionedMapStoreImpl; +import tools.refinery.store.map.internal.VersionedMapImpl; +import tools.refinery.store.map.tests.fuzz.utils.FuzzTestUtils; +import tools.refinery.store.map.tests.utils.MapTestEnvironment; + +class SharedStoreFuzzTest { + private void runFuzzTest(String scenario, int seed, int steps, int maxKey, int maxValue, int commitFrequency, + boolean evilHash) { + String[] values = MapTestEnvironment.prepareValues(maxValue); + ContinousHashProvider chp = MapTestEnvironment.prepareHashProvider(evilHash); + + List> stores = VersionedMapStoreImpl.createSharedVersionedMapStores(5, chp, values[0]); + + iterativeRandomPutsAndCommitsThenRestore(scenario, stores, steps, maxKey, values, seed, commitFrequency); + } + + private void iterativeRandomPutsAndCommitsThenRestore(String scenario, List> stores, + int steps, int maxKey, String[] values, int seed, int commitFrequency) { + // 1. maps with versions + Random r = new Random(seed); + List> versioneds = new LinkedList<>(); + for(VersionedMapStore store : stores) { + versioneds.add((VersionedMapImpl) store.createMap()); + } + + List> index2Version = new LinkedList<>(); + for(int i = 0; i()); + } + + for (int i = 0; i < steps; i++) { + int stepIndex = i + 1; + for (int storeIndex = 0; storeIndex> reference = new LinkedList<>(); + for(VersionedMapStore store : stores) { + reference.add((VersionedMapImpl) store.createMap()); + } + r = new Random(seed); + + for (int i = 0; i < steps; i++) { + int index = i + 1; + for (int storeIndex = 0; storeIndex parametrizedFastFuzz() { + return FuzzTestUtils.permutationWithSize(new Object[] { FuzzTestUtils.FAST_STEP_COUNT }, new Object[] { 3, 32, 32 * 32 }, + new Object[] { 2, 3 }, new Object[] { 1, 10, 100 }, new Object[] { 1, 2, 3 }, + new Object[] { false, true }); + } + + @ParameterizedTest(name = "Shared Store {index}/{0} Steps={1} Keys={2} Values={3} commit frequency={4} seed={5} evil-hash={6}") + @MethodSource + @Tag("smoke") + @Tag("slow") + void parametrizedSlowFuzz(int tests, int steps, int noKeys, int noValues, int commitFrequency, int seed, + boolean evilHash) { + runFuzzTest("SharedS" + steps + "K" + noKeys + "V" + noValues + "s" + seed, seed, steps, noKeys, noValues, + commitFrequency, evilHash); + } + + static Stream parametrizedSlowFuzz() { + return FuzzTestUtils.changeStepCount(RestoreFuzzTest.parametrizedFastFuzz(), 1); + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtils.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtils.java new file mode 100644 index 00000000..e75d7f5a --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtils.java @@ -0,0 +1,64 @@ +package tools.refinery.store.map.tests.fuzz.utils; + +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.stream.Stream; + +import org.junit.jupiter.params.provider.Arguments; + +public final class FuzzTestUtils { + public static final int FAST_STEP_COUNT = 500; + public static final int SLOW_STEP_COUNT = 32 * 32 * 32 * 32; + + private FuzzTestUtils() { + throw new IllegalStateException("This is a static utility class and should not be instantiated directly"); + } + + public static Stream changeStepCount(Stream arguments, int parameterIndex) { + return arguments.map(x -> Arguments.of(updatedStepCount(x.get(), parameterIndex))); + } + + public static Object[] updatedStepCount(Object[] arguments, int parameterIndex) { + Object[] copy = Arrays.copyOf(arguments, arguments.length); + copy[parameterIndex] = SLOW_STEP_COUNT; + return copy; + } + + static List> permutationInternal(int from, Object[]... valueOption) { + if (valueOption.length == from) { + return List.of(List.of()); + } else { + Object[] permuteThis = valueOption[from]; + List> otherCombination = permutationInternal(from + 1, valueOption); + List> result = new LinkedList<>(); + for (Object permuteThisElement : permuteThis) { + for (List otherCombinationList : otherCombination) { + List newResult = new LinkedList<>(); + newResult.add(permuteThisElement); + newResult.addAll(otherCombinationList); + result.add(newResult); + } + } + return result; + } + } + + public static Stream permutation(Object[]... valueOption) { + List> permutations = permutationInternal(0, valueOption); + return permutations.stream().map(x -> Arguments.of(x.toArray())); + } + + public static Stream permutationWithSize(Object[]... valueOption) { + int size = 1; + for (int i = 0; i < valueOption.length; i++) { + size *= valueOption[i].length; + } + Object[][] newValueOption = new Object[valueOption.length + 1][]; + newValueOption[0] = new Object[] { size }; + for (int i = 1; i < newValueOption.length; i++) { + newValueOption[i] = valueOption[i - 1]; + } + return permutation(newValueOption); + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtilsTest.java b/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtilsTest.java new file mode 100644 index 00000000..72f2a46c --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/fuzz/utils/FuzzTestUtilsTest.java @@ -0,0 +1,33 @@ +package tools.refinery.store.map.tests.fuzz.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +class FuzzTestUtilsTest { + @Test + void permutationInternalTest() { + List> res = FuzzTestUtils.permutationInternal(0, new Object[] { 1, 2, 3 }, + new Object[] { 'a', 'b', 'c' }, new Object[] { "alpha", "beta", "gamma", "delta" }); + assertEquals(3 * 3 * 4, res.size()); + } + + @Test + void permutationTest1() { + var res = FuzzTestUtils.permutation(new Object[] { 1, 2, 3 }, new Object[] { 'a', 'b', 'c' }, + new Object[] { "alpha", "beta", "gamma", "delta" }); + assertEquals(3 * 3 * 4, res.count()); + } + + @Test + void permutationTest2() { + var res = FuzzTestUtils.permutation(new Object[] { 1, 2, 3 }, new Object[] { 'a', 'b', 'c' }, + new Object[] { "alpha", "beta", "gamma", "delta" }); + var arguments = res.findFirst().get().get(); + assertEquals(1, arguments[0]); + assertEquals('a', arguments[1]); + assertEquals("alpha", arguments[2]); + } +} diff --git a/store/src/test/java/tools/refinery/store/map/tests/utils/MapTestEnvironment.java b/store/src/test/java/tools/refinery/store/map/tests/utils/MapTestEnvironment.java new file mode 100644 index 00000000..991b4f51 --- /dev/null +++ b/store/src/test/java/tools/refinery/store/map/tests/utils/MapTestEnvironment.java @@ -0,0 +1,214 @@ +package tools.refinery.store.map.tests.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.map.Cursor; +import tools.refinery.store.map.VersionedMap; +import tools.refinery.store.map.internal.VersionedMapImpl; + +import java.util.TreeMap; + +public class MapTestEnvironment { + public static String[] prepareValues(int maxValue) { + String[] values = new String[maxValue]; + values[0] = "DEFAULT"; + for (int i = 1; i < values.length; i++) { + values[i] = "VAL" + i; + } + return values; + } + + public static ContinousHashProvider prepareHashProvider(final boolean evil) { + // Use maxPrime = 2147483629 + + ContinousHashProvider chp = new ContinousHashProvider() { + + @Override + public int getHash(Integer key, int index) { + if (evil && index < 15 && index < key / 3) { + return 7; + } + int result = 1; + final int prime = 31; + + result = prime * result + key; + result = prime * result + index; + + return result; + } + }; + return chp; + } + + public static void printStatus(String scenario, int actual, int max, String stepName) { + if (actual % 10000 == 0) { + String printStepName = stepName == null ? "" : stepName; + System.out.format(scenario + ":%d/%d (%d%%) " + printStepName + "%n", actual, max, actual * 100 / max); + } + + } + + public static void compareTwoMaps(String title, VersionedMapImpl map1, + VersionedMapImpl map2) { + compareTwoMaps(title, map1, map2, null); + } + public static void compareTwoMaps(String title, VersionedMapImpl map1, + VersionedMapImpl map2, List errors) { + // 1. Comparing cursors. + Cursor cursor1 = map1.getAll(); + Cursor cursor2 = map2.getAll(); + while (!cursor1.isTerminated()) { + if (cursor2.isTerminated()) { + fail("cursor 2 terminated before cursor1"); + } + assertEqualsList(cursor1.getKey(), cursor2.getKey(),"Keys not equal", errors); + assertEqualsList(cursor2.getValue(), cursor2.getValue(), "Values not equal", errors); + cursor1.move(); + cursor2.move(); + } + if (!cursor2.isTerminated()) + fail("cursor 1 terminated before cursor 2"); + + // 2.1. comparing hash codes + assertEqualsList(map1.hashCode(), map2.hashCode(), title + ": hash code check",errors); + assertEqualsList(map1, map2, title + ": 1.equals(2)",errors); + assertEqualsList(map2, map1, title + ": 2.equals(1)",errors); + } + private static void assertEqualsList(Object o1, Object o2, String message, List errors) { + if(errors == null) { + assertEquals(o1, o2, message); + } else { + if(o1 != null) { + if(!(o1.equals(o2))) { + AssertionError error = new AssertionError((message != null ? message+" " : "") + "expected: " + o1 + " but was : " + o2); + errors.add(error); + } + } + } + } + + public VersionedMapImpl sut; + Map oracle = new HashMap(); + + public MapTestEnvironment(VersionedMapImpl sut) { + this.sut = sut; + } + + public void put(K key, V value) { + V oldSutValue = sut.put(key, value); + V oldOracleValue; + if (value != sut.getDefaultValue()) { + oldOracleValue = oracle.put(key, value); + } else { + oldOracleValue = oracle.remove(key); + } + if(oldSutValue == sut.getDefaultValue() && oldOracleValue != null) { + fail("After put, SUT old value was default, but oracle old walue was " + oldOracleValue); + } + if(oldSutValue != sut.getDefaultValue()) { + assertEquals(oldOracleValue, oldSutValue); + } + } + + public void checkEquivalence(String title) { + // 0. Checking integrity + try { + sut.checkIntegrity(); + } catch (IllegalStateException e) { + fail(title + ": " + e.getMessage()); + } + + // 1. Checking: if Reference contains pair, then SUT contains + // pair. + // Tests get functions + for (Entry entry : oracle.entrySet()) { + V sutValue = sut.get(entry.getKey()); + V oracleValue = entry.getValue(); + if (sutValue != oracleValue) { + printComparison(); + fail(title + ": Non-equivalent get(" + entry.getKey() + ") results: SUT=" + sutValue + ", Oracle=" + + oracleValue + "!"); + } + } + + // 2. Checking: if SUT contains pair, then Reference contains + // pair. + // Tests iterators + int elementsInSutEntrySet = 0; + Cursor cursor = sut.getAll(); + while (cursor.move()) { + elementsInSutEntrySet++; + K key = cursor.getKey(); + V sutValue = cursor.getValue(); + // System.out.println(key + " -> " + sutValue); + V oracleValue = oracle.get(key); + if (sutValue != oracleValue) { + printComparison(); + fail(title + ": Non-equivalent entry in iterator: SUT=<" + key + "," + sutValue + ">, Oracle=<" + key + + "," + oracleValue + ">!"); + } + + } + + // 3. Checking sizes + // Counting of non-default value pairs. + int oracleSize = oracle.entrySet().size(); + long sutSize = sut.getSize(); + if (oracleSize != sutSize || oracleSize != elementsInSutEntrySet) { + printComparison(); + fail(title + ": Non-eqivalent size() result: SUT.getSize()=" + sutSize + ", SUT.entryset.size=" + + elementsInSutEntrySet + ", Oracle=" + oracleSize + "!"); + } + } + + public static void checkOrder(String scenario, VersionedMap versionedMap) { + K previous = null; + Cursor cursor = versionedMap.getAll(); + while(cursor.move()) { + System.out.println(cursor.getKey() + " " + ((VersionedMapImpl) versionedMap).getHashProvider().getHash(cursor.getKey(), 0)); + if(previous != null) { + int comparisonResult = ((VersionedMapImpl) versionedMap).getHashProvider().compare(previous, cursor.getKey()); + assertTrue(comparisonResult<0,scenario+" Cursor order is not incremental!"); + } + previous = cursor.getKey(); + } + System.out.println(); + } + + public void printComparison() { + System.out.println("SUT:"); + printEntrySet(sut.getAll()); + System.out.println("Oracle:"); + printEntrySet(oracle.entrySet().iterator()); + } + + private void printEntrySet(Iterator> iterator) { + TreeMap treemap = new TreeMap<>(); + while (iterator.hasNext()) { + Entry entry = iterator.next(); + treemap.put(entry.getKey(), entry.getValue()); + } + for (Entry e : treemap.entrySet()) { + System.out.println("\t" + e.getKey() + " -> " + e.getValue()); + } + } + + private void printEntrySet(Cursor cursor) { + TreeMap treemap = new TreeMap<>(); + while (cursor.move()) { + treemap.put(cursor.getKey(), cursor.getValue()); + } + for (Entry e : treemap.entrySet()) { + System.out.println("\t" + e.getKey() + " -> " + e.getValue()); + } + } +} diff --git a/store/src/test/java/tools/refinery/store/model/hashTests/HashEfficiencyTest.java b/store/src/test/java/tools/refinery/store/model/hashTests/HashEfficiencyTest.java new file mode 100644 index 00000000..7d070380 --- /dev/null +++ b/store/src/test/java/tools/refinery/store/model/hashTests/HashEfficiencyTest.java @@ -0,0 +1,161 @@ +package tools.refinery.store.model.hashTests; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Random; + +import org.junit.jupiter.api.Test; + +import tools.refinery.store.map.ContinousHashProvider; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.TupleHashProvider; +import tools.refinery.store.model.TupleHashProviderBitMagic; + +class HashEfficiencyTest { + + private static List permutations(int range, int arity) { + if(arity == 1) { + List result = new ArrayList<>(range); + for(int i=0; i 1) { + List smallers = permutations(range, arity-1); + List result = new ArrayList<>(range*smallers.size()); + for(Tuple smaller : smallers) { + for(int i=0; i nPermutations(int arity, int n) { + int range = amountToRange(arity, n); + List permutations = permutations(range, arity); + return permutations.subList(0, n); + } + + public static List nRandoms(int arity, int n, int seed) { + int range = amountToRange(arity, n); + List permutations = new ArrayList<>(n); + Random r = new Random(seed); + for(int i = 0; i p = permutations(10, 2); + assertEquals(p.size(),10*10); + } +// private void printTuples(List p) { +// for(Tuple element : p) { +// System.out.println(element); +// } +// } + @Test + void nPermutationTest() { + final int amount = 500; + List p = nPermutations(2, amount); + assertEquals(amount,p.size()); + } + @Test + void nRandomTest() { + final int amount = 500; + List p = nRandoms(2, amount, 1);; + assertEquals(amount,p.size()); + } + private static double calculateHashClashes(List tuples, ContinousHashProvider chp) { + int sumClashes = 0; + + for(int i = 0; i chp, Tuple a, Tuple b) { + if(a.equals(b)) return 0; + final int bits = 5; + final int segments = Integer.SIZE/bits; + final int mask = (1<>(depth*5))&mask; + int bHash = (chp.getHash(b, index)>>(depth*5))&mask; + if(aHash != bHash) { + return i+1; + } + if(i>400) { + throw new IllegalStateException(a+" vs "+b); + } + } + } + private static double caclulateOptimalHashClash(int size) { + return (Math.log(size)/Math.log(32)); + } + public static void main(String[] args) { + List hashNames = new LinkedList<>(); + List> hashes = new LinkedList<>(); + hashNames.add("PrimeGroup"); + hashes.add(new TupleHashProvider()); + hashNames.add("BitMagic"); + hashes.add(new TupleHashProviderBitMagic()); + + int[] arities = new int[] {2,3,4,5}; + int[] sizes = new int[] {32*32,32*32*8}; + + System.out.println("Size,Arity,DataSource,Hash,Chashes,Optimal,Badness"); + for(int size : sizes) { + double optimalClashes = caclulateOptimalHashClash(size); + for(int arity : arities) { + List dataSourceNames = new LinkedList<>(); + List> dataSources = new LinkedList<>(); + +// dataSourceNames.add("Permutation"); +// dataSources.add(nPermutations(arity, size)); + dataSourceNames.add("Random"); + dataSources.add(nRandoms(arity, size, 0)); + + for(int dataSourceIndex = 0; dataSourceIndex person = new Relation<>("Person", 1, false); + Relation friend = new Relation<>("friend", 2, false); + + ModelStore store = new ModelStoreImpl(Set.of(person, friend)); + Model model = store.createModel(); + + assertTrue(store.getDataRepresentations().contains(person)); + assertTrue(store.getDataRepresentations().contains(friend)); + assertTrue(model.getDataRepresentations().contains(person)); + assertTrue(model.getDataRepresentations().contains(friend)); + + Relation other = new Relation("other", 2, null); + assertFalse(model.getDataRepresentations().contains(other)); + } + + @Test + void modelBuildingTest() { + Relation person = new Relation<>("Person", 1, false); + Relation age = new Relation("age", 1, null); + Relation friend = new Relation<>("friend", 2, false); + + ModelStore store = new ModelStoreImpl(Set.of(person, age, friend)); + Model model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(age, Tuple.of(0), 3); + model.put(age, Tuple.of(1), 1); + model.put(friend, Tuple.of(0, 1), true); + model.put(friend, Tuple.of(1, 0), true); + + assertTrue(model.get(person, Tuple.of(0))); + assertTrue(model.get(person, Tuple.of(1))); + assertFalse(model.get(person, Tuple.of(2))); + + assertEquals(3, model.get(age, Tuple.of(0))); + assertEquals(1, model.get(age, Tuple.of(1))); + assertEquals(null, model.get(age, Tuple.of(2))); + + assertTrue(model.get(friend, Tuple.of(0, 1))); + assertFalse(model.get(friend, Tuple.of(0, 5))); + } + + @Test + void modelBuildingArityFailTest() { + Relation person = new Relation<>("Person", 1, false); + ModelStore store = new ModelStoreImpl(Set.of(person)); + Model model = store.createModel(); + + final Tuple tuple3 = Tuple.of(1, 1, 1); + Assertions.assertThrows(IllegalArgumentException.class, () -> model.put(person, tuple3, true)); + Assertions.assertThrows(IllegalArgumentException.class, () -> model.get(person, tuple3)); + } + + @Test + void modelBuildingNullFailTest() { + Relation age = new Relation("age", 1, null); + ModelStore store = new ModelStoreImpl(Set.of(age)); + Model model = store.createModel(); + + model.put(age, Tuple.of(1), null); // valid + Assertions.assertThrows(IllegalArgumentException.class, () -> model.put(age, null, 1)); + Assertions.assertThrows(IllegalArgumentException.class, () -> model.get(age, null)); + + } + + @Test + void modelUpdateTest() { + Relation person = new Relation<>("Person", 1, false); + Relation age = new Relation("age", 1, null); + Relation friend = new Relation<>("friend", 2, false); + + ModelStore store = new ModelStoreImpl(Set.of(person, age, friend)); + Model model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(age, Tuple.of(0), 3); + model.put(age, Tuple.of(1), 1); + model.put(friend, Tuple.of(0, 1), true); + model.put(friend, Tuple.of(1, 0), true); + + assertEquals(3, model.get(age, Tuple.of(0))); + assertTrue(model.get(friend, Tuple.of(0, 1))); + + model.put(age, Tuple.of(0), 4); + model.put(friend, Tuple.of(0, 1), false); + + assertEquals(4, model.get(age, Tuple.of(0))); + assertFalse(model.get(friend, Tuple.of(0, 1))); + } + + @Test + void restoreTest() { + Relation person = new Relation("Person", 1, false); + Relation friend = new Relation("friend", 2, false); + + ModelStore store = new ModelStoreImpl(Set.of(person, friend)); + Model model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(friend, Tuple.of(0, 1), true); + model.put(friend, Tuple.of(1, 0), true); + long state1 = model.commit(); + + assertFalse(model.get(person, Tuple.of(2))); + assertFalse(model.get(friend, Tuple.of(0, 2))); + + model.put(person, Tuple.of(2), true); + model.put(friend, Tuple.of(0, 2), true); + long state2 = model.commit(); + + assertTrue(model.get(person, Tuple.of(2))); + assertTrue(model.get(friend, Tuple.of(0, 2))); + + model.restore(state1); + + assertFalse(model.get(person, Tuple.of(2))); + assertFalse(model.get(friend, Tuple.of(0, 2))); + + model.restore(state2); + + assertTrue(model.get(person, Tuple.of(2))); + assertTrue(model.get(friend, Tuple.of(0, 2))); + } +} diff --git a/store/src/test/java/tools/refinery/store/query/test/QueryTest.java b/store/src/test/java/tools/refinery/store/query/test/QueryTest.java new file mode 100644 index 00000000..38aa130a --- /dev/null +++ b/store/src/test/java/tools/refinery/store/query/test/QueryTest.java @@ -0,0 +1,90 @@ +package tools.refinery.store.query.test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Collection; +import java.util.Set; + +import org.eclipse.viatra.query.runtime.api.AdvancedViatraQueryEngine; +import org.eclipse.viatra.query.runtime.api.GenericPatternMatch; +import org.eclipse.viatra.query.runtime.api.GenericPatternMatcher; +import org.eclipse.viatra.query.runtime.api.GenericQuerySpecification; +import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine; +import org.junit.jupiter.api.Test; + +import tools.refinery.store.model.Model; +import tools.refinery.store.model.ModelStore; +import tools.refinery.store.model.ModelStoreImpl; +import tools.refinery.store.model.Tuple; +import tools.refinery.store.model.representation.Relation; +import tools.refinery.store.model.representation.TruthValue; +import tools.refinery.store.query.RelationalScope; +import tools.refinery.store.query.internal.PredicateTranslator; +import tools.refinery.store.query.view.FilteredRelationView; +import tools.refinery.store.query.view.FunctionalRelationView; +import tools.refinery.store.query.view.KeyOnlyRelationView; +import tools.refinery.store.query.view.RelationView; + +class QueryTest { + @Test + void minimalTest() { + Relation person = new Relation<>("Person", 1, false); + + RelationView persionView = new KeyOnlyRelationView(person); + GenericQuerySpecification personQuery = (new PredicateTranslator("PersonQuery")) + .addParameter("p", persionView).addConstraint(persionView, "p").build(); + + ModelStore store = new ModelStoreImpl(Set.of(person)); + Model model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + + RelationalScope scope = new RelationalScope(model, Set.of(persionView)); + + ViatraQueryEngine engine = AdvancedViatraQueryEngine.on(scope); + GenericPatternMatcher personMatcher = engine.getMatcher(personQuery); + + assertEquals(2, personMatcher.countMatches()); + } + + void modelBuildingTest() { + Relation person = new Relation<>("Person", 1, false); + Relation age = new Relation("age", 1, null); + Relation friend = new Relation<>("friend", 2, TruthValue.FALSE); + + ModelStore store = new ModelStoreImpl(Set.of(person, age, friend)); + Model model = store.createModel(); + + model.put(person, Tuple.of(0), true); + model.put(person, Tuple.of(1), true); + model.put(age, Tuple.of(0), 3); + model.put(age, Tuple.of(1), 1); + model.put(friend, Tuple.of(0, 1), TruthValue.TRUE); + model.put(friend, Tuple.of(1, 0), TruthValue.UNKNOWN); + + // Sanity check + assertTrue(model.get(person, Tuple.of(0))); + assertTrue(model.get(person, Tuple.of(1))); + assertFalse(model.get(person, Tuple.of(2))); + + RelationView persionView = new KeyOnlyRelationView(person); + RelationView ageView = new FunctionalRelationView<>(age); + RelationView friendMustView = new FilteredRelationView(friend, (k, v) -> v.must()); + RelationView friendMayView = new FilteredRelationView(friend, (k, v) -> v.may()); + + RelationalScope scope = new RelationalScope(model, Set.of(persionView, ageView, friendMustView, friendMayView)); + + GenericQuerySpecification personQuery = (new PredicateTranslator("PersonQuery")) + .addParameter("p", persionView).addConstraint(persionView, "p").build(); + + ViatraQueryEngine engine = AdvancedViatraQueryEngine.on(scope); + GenericPatternMatcher personMatcher = engine.getMatcher(personQuery); + Collection personMatches = personMatcher.getAllMatches(); + for (GenericPatternMatch personMatch : personMatches) { + System.out.println(personMatch); + } + } +} \ No newline at end of file diff --git a/store/src/test/java/tools/refinery/store/util/CollectionsUtilTests.java b/store/src/test/java/tools/refinery/store/util/CollectionsUtilTests.java new file mode 100644 index 00000000..171be0e5 --- /dev/null +++ b/store/src/test/java/tools/refinery/store/util/CollectionsUtilTests.java @@ -0,0 +1,78 @@ +package tools.refinery.store.util; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static tools.refinery.store.util.CollectionsUtil.filter; +import static tools.refinery.store.util.CollectionsUtil.map; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class CollectionsUtilTests { + List list10 = List.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); + List listTen = List.of("1", "2", "3", "4", "5", "6", "7", "8", "9", "10"); + + private static void compare(Iterable a, Iterable b) { + List listA = toList(a); + List listB = toList(b); + assertEquals(listA, listB); + } + + private static List toList(Iterable a) { + List result = new ArrayList(); + Iterator iterator = a.iterator(); + while (iterator.hasNext()) { + result.add(iterator.next()); + } + return result; + } + + @Test + void testFilterEven() { + compare(List.of(2, 4, 6, 8, 10), filter(list10, (x -> x % 2 == 0))); + } + + @Test + void testFilterOdd() { + compare(List.of(1, 3, 5, 7, 9), filter(list10, (x -> x % 2 == 1))); + } + + @Test + void testFilterFalse() { + compare(List.of(), filter(list10, (x -> false))); + } + + @Test + void testFilterTrue() { + compare(list10, filter(list10, (x -> true))); + } + + @Test + void testFilterEmpty() { + compare(List.of(), filter(List.of(), (x -> true))); + } + + @Test() + void testNoSuchElement() { + Iterable iterable = filter(list10, (x -> x % 2 == 0)); + Iterator iterator = iterable.iterator(); + while (iterator.hasNext()) { + iterator.next(); + } + Assertions.assertThrows(NoSuchElementException.class, () -> iterator.next()); + } + + @Test() + void mapTest() { + compare(listTen, map(list10, x -> x.toString())); + } + + @Test() + void mapEmtyTest() { + compare(List.of(), map(List.of(), x -> x.toString())); + } +} -- cgit v1.2.3-70-g09d2