diff options
Diffstat (limited to 'Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit')
36 files changed, 5366 insertions, 911 deletions
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/Modality.java b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/Modality.java index d2132cea..f3a6ec32 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/Modality.java +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/Modality.java | |||
@@ -2,21 +2,46 @@ package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra; | |||
2 | 2 | ||
3 | public enum Modality { | 3 | public enum Modality { |
4 | MUST, MAY, CURRENT; | 4 | MUST, MAY, CURRENT; |
5 | |||
5 | public boolean isMust() { | 6 | public boolean isMust() { |
6 | return this == MUST; | 7 | return this == MUST; |
7 | } | 8 | } |
9 | |||
8 | public boolean isMay() { | 10 | public boolean isMay() { |
9 | return this == MAY; | 11 | return this == MAY; |
10 | } | 12 | } |
13 | |||
11 | public boolean isCurrent() { | 14 | public boolean isCurrent() { |
12 | return this == CURRENT; | 15 | return this == CURRENT; |
13 | } | 16 | } |
17 | |||
14 | public boolean isMustOrCurrent() { | 18 | public boolean isMustOrCurrent() { |
15 | return isMust() || isCurrent(); | 19 | return isMust() || isCurrent(); |
16 | } | 20 | } |
21 | |||
17 | public Modality getDual() { | 22 | public Modality getDual() { |
18 | if(this.isCurrent()) return CURRENT; | 23 | switch (this) { |
19 | else if(this.isMust())return MAY; | 24 | case CURRENT: |
20 | else return MUST; | 25 | return CURRENT; |
26 | case MUST: | ||
27 | return MAY; | ||
28 | case MAY: | ||
29 | return MUST; | ||
30 | default: | ||
31 | throw new UnsupportedOperationException("Unknown Modality: " + this); | ||
32 | } | ||
33 | } | ||
34 | |||
35 | public Modality toBase() { | ||
36 | if (this.isCurrent()) { | ||
37 | return MUST; | ||
38 | } else { | ||
39 | return this; | ||
40 | } | ||
41 | } | ||
42 | |||
43 | @Override | ||
44 | public String toString() { | ||
45 | return super.toString().toLowerCase(); | ||
21 | } | 46 | } |
22 | } | 47 | } |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/ModelGenerationMethodProvider.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/ModelGenerationMethodProvider.xtend index f43ab96d..e45ec1c8 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/ModelGenerationMethodProvider.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/ModelGenerationMethodProvider.xtend | |||
@@ -1,8 +1,20 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra | 1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra |
2 | 2 | ||
3 | import com.google.common.collect.ImmutableMap | ||
3 | import hu.bme.mit.inf.dslreasoner.logic.model.builder.DocumentationLevel | 4 | import hu.bme.mit.inf.dslreasoner.logic.model.builder.DocumentationLevel |
4 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem | 5 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem |
5 | import hu.bme.mit.inf.dslreasoner.viatra2logic.viatra2logicannotations.TransfomedViatraQuery | 6 | import hu.bme.mit.inf.dslreasoner.viatra2logic.viatra2logicannotations.TransfomedViatraQuery |
7 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.CbcPolyhedronSolver | ||
8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.LinearTypeConstraintHint | ||
9 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.MultiplicityGoalConstraintCalculator | ||
10 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.PolyhedronScopePropagator | ||
11 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.RelationConstraintCalculator | ||
12 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.ScopePropagator | ||
13 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.ScopePropagatorStrategy | ||
14 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.TypeHierarchyScopePropagator | ||
15 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.Z3PolyhedronSolver | ||
16 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.GeneratedPatterns | ||
17 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.ModalPatternQueries | ||
6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.PatternProvider | 18 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.PatternProvider |
7 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.rules.GoalConstraintProvider | 19 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.rules.GoalConstraintProvider |
8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.rules.RefinementRuleProvider | 20 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.rules.RefinementRuleProvider |
@@ -10,6 +22,7 @@ import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.par | |||
10 | import hu.bme.mit.inf.dslreasoner.workspace.ReasonerWorkspace | 22 | import hu.bme.mit.inf.dslreasoner.workspace.ReasonerWorkspace |
11 | import java.util.Collection | 23 | import java.util.Collection |
12 | import java.util.List | 24 | import java.util.List |
25 | import java.util.Map | ||
13 | import java.util.Set | 26 | import java.util.Set |
14 | import org.eclipse.viatra.query.runtime.api.IPatternMatch | 27 | import org.eclipse.viatra.query.runtime.api.IPatternMatch |
15 | import org.eclipse.viatra.query.runtime.api.IQuerySpecification | 28 | import org.eclipse.viatra.query.runtime.api.IQuerySpecification |
@@ -20,63 +33,112 @@ import org.eclipse.xtend.lib.annotations.Data | |||
20 | 33 | ||
21 | class ModelGenerationStatistics { | 34 | class ModelGenerationStatistics { |
22 | public var long transformationExecutionTime = 0 | 35 | public var long transformationExecutionTime = 0 |
36 | |||
23 | synchronized def addExecutionTime(long amount) { | 37 | synchronized def addExecutionTime(long amount) { |
24 | transformationExecutionTime+=amount | 38 | transformationExecutionTime += amount |
39 | } | ||
40 | |||
41 | public var long scopePropagationTime = 0 | ||
42 | |||
43 | synchronized def addScopePropagationTime(long amount) { | ||
44 | scopePropagationTime += amount | ||
45 | } | ||
46 | |||
47 | public var long preliminaryTypeAnalisisTime = 0 | ||
48 | |||
49 | public var int decisionsTried = 0 | ||
50 | |||
51 | synchronized def incrementDecisionCount() { | ||
52 | decisionsTried++ | ||
53 | } | ||
54 | |||
55 | public var int transformationInvocations | ||
56 | |||
57 | synchronized def incrementTransformationCount() { | ||
58 | transformationInvocations++ | ||
59 | } | ||
60 | |||
61 | public var int scopePropagatorInvocations | ||
62 | |||
63 | synchronized def incrementScopePropagationCount() { | ||
64 | scopePropagatorInvocations++ | ||
65 | } | ||
66 | |||
67 | public var int scopePropagatorSolverInvocations | ||
68 | |||
69 | synchronized def incrementScopePropagationSolverCount() { | ||
70 | scopePropagatorSolverInvocations++ | ||
25 | } | 71 | } |
26 | public var long PreliminaryTypeAnalisisTime = 0 | ||
27 | } | 72 | } |
73 | |||
28 | @Data class ModelGenerationMethod { | 74 | @Data class ModelGenerationMethod { |
29 | ModelGenerationStatistics statistics | 75 | ModelGenerationStatistics statistics |
30 | 76 | ||
31 | Collection<? extends BatchTransformationRule<?,?>> objectRefinementRules | 77 | Collection<? extends BatchTransformationRule<?, ?>> objectRefinementRules |
32 | Collection<? extends BatchTransformationRule<?,?>> relationRefinementRules | 78 | Collection<? extends BatchTransformationRule<?, ?>> relationRefinementRules |
33 | 79 | ||
34 | List<MultiplicityGoalConstraintCalculator> unfinishedMultiplicities | 80 | List<MultiplicityGoalConstraintCalculator> unfinishedMultiplicities |
35 | Collection<? extends IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> unfinishedWF | 81 | |
36 | 82 | Collection<? extends IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> unfinishedWF | |
37 | Collection<? extends IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> invalidWF | 83 | |
38 | 84 | Collection<? extends IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> invalidWF | |
39 | Collection<? extends IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> allPatterns | 85 | |
86 | Map<String, ModalPatternQueries> modalRelationQueries | ||
87 | |||
88 | Collection<? extends IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> allPatterns | ||
40 | } | 89 | } |
90 | |||
41 | enum TypeInferenceMethod { | 91 | enum TypeInferenceMethod { |
42 | Generic, PreliminaryAnalysis | 92 | Generic, |
93 | PreliminaryAnalysis | ||
43 | } | 94 | } |
44 | 95 | ||
45 | class ModelGenerationMethodProvider { | 96 | class ModelGenerationMethodProvider { |
46 | private val PatternProvider patternProvider = new PatternProvider | 97 | val PatternProvider patternProvider = new PatternProvider |
47 | private val RefinementRuleProvider refinementRuleProvider = new RefinementRuleProvider | 98 | val RefinementRuleProvider refinementRuleProvider = new RefinementRuleProvider |
48 | private val GoalConstraintProvider goalConstraintProvider = new GoalConstraintProvider | 99 | val GoalConstraintProvider goalConstraintProvider = new GoalConstraintProvider |
49 | 100 | val relationConstraintCalculator = new RelationConstraintCalculator | |
50 | public def ModelGenerationMethod createModelGenerationMethod( | 101 | |
102 | def ModelGenerationMethod createModelGenerationMethod( | ||
51 | LogicProblem logicProblem, | 103 | LogicProblem logicProblem, |
52 | PartialInterpretation emptySolution, | 104 | PartialInterpretation emptySolution, |
53 | ReasonerWorkspace workspace, | 105 | ReasonerWorkspace workspace, |
54 | boolean nameNewElements, | 106 | boolean nameNewElements, |
55 | TypeInferenceMethod typeInferenceMethod, | 107 | TypeInferenceMethod typeInferenceMethod, |
56 | ScopePropagator scopePropagator, | 108 | ScopePropagatorStrategy scopePropagatorStrategy, |
109 | Collection<LinearTypeConstraintHint> hints, | ||
57 | DocumentationLevel debugLevel | 110 | DocumentationLevel debugLevel |
58 | ) { | 111 | ) { |
59 | val statistics = new ModelGenerationStatistics | 112 | val statistics = new ModelGenerationStatistics |
60 | val writeFiles = (debugLevel === DocumentationLevel.NORMAL || debugLevel === DocumentationLevel.FULL) | 113 | val writeFiles = (debugLevel === DocumentationLevel.NORMAL || debugLevel === DocumentationLevel.FULL) |
61 | 114 | ||
62 | val Set<PQuery> existingQueries = logicProblem | 115 | val Set<PQuery> existingQueries = logicProblem.relations.map[annotations].flatten.filter(TransfomedViatraQuery). |
63 | .relations | 116 | map[it.patternPQuery as PQuery].toSet |
64 | .map[annotations] | 117 | |
65 | .flatten | 118 | val relationConstraints = relationConstraintCalculator.calculateRelationConstraints(logicProblem) |
66 | .filter(TransfomedViatraQuery) | 119 | val queries = patternProvider.generateQueries(logicProblem, emptySolution, statistics, existingQueries, |
67 | .map[it.patternPQuery as PQuery] | 120 | workspace, typeInferenceMethod, scopePropagatorStrategy, relationConstraints, hints, writeFiles) |
68 | .toSet | 121 | val scopePropagator = createScopePropagator(scopePropagatorStrategy, emptySolution, hints, queries, statistics) |
69 | 122 | scopePropagator.propagateAllScopeConstraints | |
70 | val queries = patternProvider.generateQueries(logicProblem,emptySolution,statistics,existingQueries,workspace,typeInferenceMethod,writeFiles) | 123 | val objectRefinementRules = refinementRuleProvider.createObjectRefinementRules(queries, scopePropagator, |
71 | val //LinkedHashMap<Pair<Relation, ? extends Type>, BatchTransformationRule<GenericPatternMatch, ViatraQueryMatcher<GenericPatternMatch>>> | 124 | nameNewElements, statistics) |
72 | objectRefinementRules = refinementRuleProvider.createObjectRefinementRules(queries,scopePropagator,nameNewElements,statistics) | 125 | val relationRefinementRules = refinementRuleProvider.createRelationRefinementRules(queries, scopePropagator, |
73 | val relationRefinementRules = refinementRuleProvider.createRelationRefinementRules(queries,statistics) | 126 | statistics) |
74 | 127 | ||
75 | val unfinishedMultiplicities = goalConstraintProvider.getUnfinishedMultiplicityQueries(queries) | 128 | val unfinishedMultiplicities = goalConstraintProvider.getUnfinishedMultiplicityQueries(queries) |
76 | val unfinishedWF = queries.getUnfinishedWFQueries.values | 129 | val unfinishedWF = queries.getUnfinishedWFQueries.values |
77 | 130 | ||
131 | val modalRelationQueriesBuilder = ImmutableMap.builder | ||
132 | for (entry : queries.modalRelationQueries.entrySet) { | ||
133 | val annotation = entry.key.annotations.filter(TransfomedViatraQuery).head | ||
134 | if (annotation !== null) { | ||
135 | modalRelationQueriesBuilder.put(annotation.patternFullyQualifiedName, entry.value) | ||
136 | } | ||
137 | } | ||
138 | val modalRelationQueries = modalRelationQueriesBuilder.build | ||
139 | |||
78 | val invalidWF = queries.getInvalidWFQueries.values | 140 | val invalidWF = queries.getInvalidWFQueries.values |
79 | 141 | ||
80 | return new ModelGenerationMethod( | 142 | return new ModelGenerationMethod( |
81 | statistics, | 143 | statistics, |
82 | objectRefinementRules.values, | 144 | objectRefinementRules.values, |
@@ -84,7 +146,45 @@ class ModelGenerationMethodProvider { | |||
84 | unfinishedMultiplicities, | 146 | unfinishedMultiplicities, |
85 | unfinishedWF, | 147 | unfinishedWF, |
86 | invalidWF, | 148 | invalidWF, |
149 | modalRelationQueries, | ||
87 | queries.allQueries | 150 | queries.allQueries |
88 | ) | 151 | ) |
89 | } | 152 | } |
153 | |||
154 | private def createScopePropagator(ScopePropagatorStrategy scopePropagatorStrategy, | ||
155 | PartialInterpretation emptySolution, Collection<LinearTypeConstraintHint> hints, GeneratedPatterns queries, | ||
156 | ModelGenerationStatistics statistics) { | ||
157 | if (!hints.empty && !(scopePropagatorStrategy instanceof ScopePropagatorStrategy.Polyhedral)) { | ||
158 | throw new IllegalArgumentException("Only the Polyhedral scope propagator strategy can use hints.") | ||
159 | } | ||
160 | switch (scopePropagatorStrategy) { | ||
161 | case ScopePropagatorStrategy.None, | ||
162 | case ScopePropagatorStrategy.Basic: | ||
163 | new ScopePropagator(emptySolution, statistics) | ||
164 | case ScopePropagatorStrategy.BasicTypeHierarchy: | ||
165 | new TypeHierarchyScopePropagator(emptySolution, statistics) | ||
166 | ScopePropagatorStrategy.Polyhedral: { | ||
167 | val types = queries.refineObjectQueries.keySet.map[newType].toSet | ||
168 | val allPatternsByName = queries.allQueries.toMap[fullyQualifiedName] | ||
169 | val solver = switch (scopePropagatorStrategy.solver) { | ||
170 | case Z3Integer: | ||
171 | new Z3PolyhedronSolver(false, scopePropagatorStrategy.timeoutSeconds) | ||
172 | case Z3Real: | ||
173 | new Z3PolyhedronSolver(true, scopePropagatorStrategy.timeoutSeconds) | ||
174 | case Cbc: | ||
175 | new CbcPolyhedronSolver(false, scopePropagatorStrategy.timeoutSeconds, true) | ||
176 | case Clp: | ||
177 | new CbcPolyhedronSolver(true, scopePropagatorStrategy.timeoutSeconds, true) | ||
178 | default: | ||
179 | throw new IllegalArgumentException("Unknown polyhedron solver: " + | ||
180 | scopePropagatorStrategy.solver) | ||
181 | } | ||
182 | new PolyhedronScopePropagator(emptySolution, statistics, types, queries.multiplicityConstraintQueries, | ||
183 | queries.hasElementInContainmentQuery, allPatternsByName, hints, solver, | ||
184 | scopePropagatorStrategy.requiresUpperBoundIndexing, scopePropagatorStrategy.updateHeuristic) | ||
185 | } | ||
186 | default: | ||
187 | throw new IllegalArgumentException("Unknown scope propagator strategy: " + scopePropagatorStrategy) | ||
188 | } | ||
189 | } | ||
90 | } | 190 | } |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/ScopePropagator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/ScopePropagator.xtend deleted file mode 100644 index 38633c07..00000000 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/ScopePropagator.xtend +++ /dev/null | |||
@@ -1,156 +0,0 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra | ||
2 | |||
3 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | ||
4 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialTypeInterpratation | ||
5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.Scope | ||
6 | import java.util.HashMap | ||
7 | import java.util.Map | ||
8 | import java.util.Set | ||
9 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialComplexTypeInterpretation | ||
10 | import java.util.HashSet | ||
11 | |||
12 | class ScopePropagator { | ||
13 | PartialInterpretation partialInterpretation | ||
14 | Map<PartialTypeInterpratation,Scope> type2Scope | ||
15 | |||
16 | val Map<Scope, Set<Scope>> superScopes | ||
17 | val Map<Scope, Set<Scope>> subScopes | ||
18 | |||
19 | public new(PartialInterpretation p) { | ||
20 | partialInterpretation = p | ||
21 | type2Scope = new HashMap | ||
22 | for(scope : p.scopes) { | ||
23 | type2Scope.put(scope.targetTypeInterpretation,scope) | ||
24 | } | ||
25 | |||
26 | superScopes = new HashMap | ||
27 | subScopes = new HashMap | ||
28 | for(scope : p.scopes) { | ||
29 | superScopes.put(scope,new HashSet) | ||
30 | subScopes.put(scope,new HashSet) | ||
31 | } | ||
32 | |||
33 | for(scope : p.scopes) { | ||
34 | val target = scope.targetTypeInterpretation | ||
35 | if(target instanceof PartialComplexTypeInterpretation) { | ||
36 | val supertypeInterpretations = target.supertypeInterpretation | ||
37 | for(supertypeInterpretation : supertypeInterpretations) { | ||
38 | val supertypeScope = type2Scope.get(supertypeInterpretation) | ||
39 | superScopes.get(scope).add(supertypeScope) | ||
40 | subScopes.get(supertypeScope).add(scope) | ||
41 | } | ||
42 | } | ||
43 | } | ||
44 | } | ||
45 | |||
46 | def public propagateAllScopeConstraints() { | ||
47 | var boolean hadChanged | ||
48 | do{ | ||
49 | hadChanged = false | ||
50 | for(superScopeEntry : superScopes.entrySet) { | ||
51 | val sub = superScopeEntry.key | ||
52 | hadChanged = propagateLowerLimitUp(sub,partialInterpretation) || hadChanged | ||
53 | hadChanged = propagateUpperLimitDown(sub,partialInterpretation) || hadChanged | ||
54 | for(sup: superScopeEntry.value) { | ||
55 | hadChanged = propagateLowerLimitUp(sub,sup) || hadChanged | ||
56 | hadChanged = propagateUpperLimitDown(sub,sup) || hadChanged | ||
57 | } | ||
58 | } | ||
59 | } while(hadChanged) | ||
60 | // println('''All constraints are propagated.''') | ||
61 | } | ||
62 | |||
63 | def public propagateAdditionToType(PartialTypeInterpratation t) { | ||
64 | // println('''Adding to «(t as PartialComplexTypeInterpretation).interpretationOf.name»''') | ||
65 | val targetScope = type2Scope.get(t) | ||
66 | targetScope.removeOne | ||
67 | val sups = superScopes.get(targetScope) | ||
68 | sups.forEach[removeOne] | ||
69 | if(this.partialInterpretation.minNewElements > 0) { | ||
70 | this.partialInterpretation.minNewElements = this.partialInterpretation.minNewElements-1 | ||
71 | } | ||
72 | if(this.partialInterpretation.maxNewElements > 0) { | ||
73 | this.partialInterpretation.maxNewElements = this.partialInterpretation.maxNewElements-1 | ||
74 | } else if(this.partialInterpretation.maxNewElements === 0) { | ||
75 | throw new IllegalArgumentException('''Inconsistent object creation: lower node limit is 0!''') | ||
76 | } | ||
77 | |||
78 | // subScopes.get(targetScope).forEach[propagateUpperLimitDown(it,targetScope)] | ||
79 | // for(sup: sups) { | ||
80 | // subScopes.get(sup).forEach[propagateUpperLimitDown(it,sup)] | ||
81 | // } | ||
82 | // for(scope : type2Scope.values) { | ||
83 | // propagateUpperLimitDown(scope,partialInterpretation) | ||
84 | // } | ||
85 | |||
86 | propagateAllScopeConstraints | ||
87 | |||
88 | // println('''Target Scope: «targetScope.minNewElements» - «targetScope.maxNewElements»''') | ||
89 | // println(''' «this.partialInterpretation.minNewElements» - «this.partialInterpretation.maxNewElements»''') | ||
90 | // this.partialInterpretation.scopes.forEach[println(''' «(it.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name»: «it.minNewElements»-«it.maxNewElements»''')] | ||
91 | // println('''All constraints are propagated upon increasing «(t as PartialComplexTypeInterpretation).interpretationOf.name»''') | ||
92 | } | ||
93 | |||
94 | private def propagateLowerLimitUp(Scope subScope, Scope superScope) { | ||
95 | if(subScope.minNewElements>superScope.minNewElements) { | ||
96 | // println(''' | ||
97 | // «(subScope.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name» -> «(superScope.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name» | ||
98 | // superScope.minNewElements «superScope.minNewElements» = subScope.minNewElements «subScope.minNewElements» | ||
99 | // ''') | ||
100 | superScope.minNewElements = subScope.minNewElements | ||
101 | return true | ||
102 | } else { | ||
103 | return false | ||
104 | } | ||
105 | } | ||
106 | |||
107 | private def propagateUpperLimitDown(Scope subScope, Scope superScope) { | ||
108 | if(superScope.maxNewElements>=0 && (superScope.maxNewElements<subScope.maxNewElements || subScope.maxNewElements<0)) { | ||
109 | // println(''' | ||
110 | // «(subScope.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name» -> «(superScope.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name» | ||
111 | // subScope.maxNewElements «subScope.maxNewElements» = superScope.maxNewElements «superScope.maxNewElements» | ||
112 | // ''') | ||
113 | subScope.maxNewElements = superScope.maxNewElements | ||
114 | return true | ||
115 | } else { | ||
116 | return false | ||
117 | } | ||
118 | } | ||
119 | |||
120 | private def propagateLowerLimitUp(Scope subScope, PartialInterpretation p) { | ||
121 | if(subScope.minNewElements>p.minNewElements) { | ||
122 | // println(''' | ||
123 | // «(subScope.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name» -> nodes | ||
124 | // p.minNewElements «p.minNewElements» = subScope.minNewElements «subScope.minNewElements» | ||
125 | // ''') | ||
126 | p.minNewElements = subScope.minNewElements | ||
127 | return true | ||
128 | } else { | ||
129 | return false | ||
130 | } | ||
131 | } | ||
132 | |||
133 | private def propagateUpperLimitDown(Scope subScope, PartialInterpretation p) { | ||
134 | if(p.maxNewElements>=0 && (p.maxNewElements<subScope.maxNewElements || subScope.maxNewElements<0)) { | ||
135 | // println(''' | ||
136 | // «(subScope.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name» -> nodes | ||
137 | // subScope.maxNewElements «subScope.maxNewElements» = p.maxNewElements «p.maxNewElements» | ||
138 | // ''') | ||
139 | subScope.maxNewElements = p.maxNewElements | ||
140 | return true | ||
141 | } else { | ||
142 | return false | ||
143 | } | ||
144 | } | ||
145 | private def removeOne(Scope scope) { | ||
146 | if(scope.maxNewElements===0) { | ||
147 | throw new IllegalArgumentException('''Inconsistent object creation: «scope.targetTypeInterpretation»''') | ||
148 | } else if(scope.maxNewElements>0) { | ||
149 | scope.maxNewElements= scope.maxNewElements-1 | ||
150 | } | ||
151 | if(scope.minNewElements>0) { | ||
152 | scope.minNewElements= scope.minNewElements-1 | ||
153 | } | ||
154 | } | ||
155 | } | ||
156 | \ No newline at end of file | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/AbstractPolyhedronSaturationOperator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/AbstractPolyhedronSaturationOperator.xtend new file mode 100644 index 00000000..94f97e94 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/AbstractPolyhedronSaturationOperator.xtend | |||
@@ -0,0 +1,53 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality | ||
2 | |||
3 | import com.google.common.collect.ImmutableList | ||
4 | import org.eclipse.xtend.lib.annotations.Accessors | ||
5 | |||
6 | abstract class AbstractPolyhedronSaturationOperator implements PolyhedronSaturationOperator { | ||
7 | @Accessors val Polyhedron polyhedron | ||
8 | |||
9 | new(Polyhedron polyhedron) { | ||
10 | if (polyhedron.dimensions.empty) { | ||
11 | throw new IllegalArgumentException("Polyhedron must have at least one dimension.") | ||
12 | } | ||
13 | this.polyhedron = polyhedron | ||
14 | } | ||
15 | |||
16 | override saturate() { | ||
17 | if (polyhedron.expressionsToSaturate.empty) { | ||
18 | return PolyhedronSaturationResult.SATURATED | ||
19 | } | ||
20 | for (constraint : polyhedron.constraints) { | ||
21 | if (constraint.zero) { | ||
22 | if (constraint.lowerBound !== null && constraint.lowerBound > 0) { | ||
23 | return PolyhedronSaturationResult.EMPTY | ||
24 | } | ||
25 | if (constraint.upperBound !== null && constraint.upperBound < 0) { | ||
26 | return PolyhedronSaturationResult.EMPTY | ||
27 | } | ||
28 | } else { | ||
29 | if (constraint.lowerBound !== null && constraint.upperBound !== null && | ||
30 | constraint.upperBound < constraint.lowerBound) { | ||
31 | return PolyhedronSaturationResult.EMPTY | ||
32 | } | ||
33 | } | ||
34 | } | ||
35 | doSaturate() | ||
36 | } | ||
37 | |||
38 | protected def PolyhedronSaturationResult doSaturate() | ||
39 | |||
40 | protected def getNonTrivialConstraints() { | ||
41 | ImmutableList.copyOf(polyhedron.constraints.filter [ constraint | | ||
42 | (constraint.lowerBound !== null || constraint.upperBound !== null) && !constraint.zero | ||
43 | ]) | ||
44 | } | ||
45 | |||
46 | private static def isZero(LinearConstraint constraint) { | ||
47 | constraint.coefficients.values.forall[it == 0] | ||
48 | } | ||
49 | |||
50 | override close() throws Exception { | ||
51 | // Nothing to close by default. | ||
52 | } | ||
53 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/CbcPolyhedronSolver.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/CbcPolyhedronSolver.xtend new file mode 100644 index 00000000..75c396b4 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/CbcPolyhedronSolver.xtend | |||
@@ -0,0 +1,237 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality | ||
2 | |||
3 | import com.google.common.collect.ImmutableList | ||
4 | import com.google.common.collect.ImmutableMap | ||
5 | import hu.bme.mit.inf.dslreasoner.ilp.cbc.CbcResult | ||
6 | import hu.bme.mit.inf.dslreasoner.ilp.cbc.CbcSolver | ||
7 | import java.util.HashSet | ||
8 | import java.util.List | ||
9 | import java.util.Map | ||
10 | import java.util.Set | ||
11 | import org.eclipse.xtend.lib.annotations.FinalFieldsConstructor | ||
12 | |||
13 | @FinalFieldsConstructor | ||
14 | class CbcPolyhedronSolver implements PolyhedronSolver { | ||
15 | val boolean lpRelaxation | ||
16 | val double timeoutSeconds | ||
17 | val boolean silent | ||
18 | |||
19 | new() { | ||
20 | this(false, -1, true) | ||
21 | } | ||
22 | |||
23 | override createSaturationOperator(Polyhedron polyhedron) { | ||
24 | new CbcSaturationOperator(polyhedron, lpRelaxation, timeoutSeconds, silent) | ||
25 | } | ||
26 | } | ||
27 | |||
28 | class CbcSaturationOperator extends AbstractPolyhedronSaturationOperator { | ||
29 | static val EPSILON = 1e-6 | ||
30 | |||
31 | val boolean lpRelaxation | ||
32 | val double timeoutSeconds | ||
33 | val boolean silent | ||
34 | val double[] columnLowerBounds | ||
35 | val double[] columnUpperBounds | ||
36 | val double[] objective | ||
37 | val Map<Dimension, Integer> dimensionsToIndicesMap | ||
38 | |||
39 | new(Polyhedron polyhedron, boolean lpRelaxation, double timeoutSeconds, boolean silent) { | ||
40 | super(polyhedron) | ||
41 | this.lpRelaxation = lpRelaxation | ||
42 | this.timeoutSeconds = timeoutSeconds | ||
43 | this.silent = silent | ||
44 | val numDimensions = polyhedron.dimensions.size | ||
45 | columnLowerBounds = newDoubleArrayOfSize(numDimensions) | ||
46 | columnUpperBounds = newDoubleArrayOfSize(numDimensions) | ||
47 | objective = newDoubleArrayOfSize(numDimensions) | ||
48 | dimensionsToIndicesMap = ImmutableMap.copyOf(polyhedron.dimensions.indexed.toMap([value], [key])) | ||
49 | } | ||
50 | |||
51 | override doSaturate() { | ||
52 | val numDimensions = polyhedron.dimensions.size | ||
53 | for (var int j = 0; j < numDimensions; j++) { | ||
54 | val dimension = polyhedron.dimensions.get(j) | ||
55 | columnLowerBounds.set(j, dimension.lowerBound.toDouble(Double.NEGATIVE_INFINITY)) | ||
56 | columnUpperBounds.set(j, dimension.upperBound.toDouble(Double.POSITIVE_INFINITY)) | ||
57 | } | ||
58 | val constraints = nonTrivialConstraints | ||
59 | val numConstraints = constraints.size | ||
60 | val rowStarts = newIntArrayOfSize(numConstraints + 1) | ||
61 | val rowLowerBounds = newDoubleArrayOfSize(numConstraints) | ||
62 | val rowUpperBounds = newDoubleArrayOfSize(numConstraints) | ||
63 | val numEntries = constraints.map[coefficients.size].reduce[a, b|a + b] ?: 0 | ||
64 | rowStarts.set(numConstraints, numEntries) | ||
65 | val columnIndices = newIntArrayOfSize(numEntries) | ||
66 | val entries = newDoubleArrayOfSize(numEntries) | ||
67 | val unconstrainedDimensions = new HashSet | ||
68 | for (dimension : polyhedron.dimensions) { | ||
69 | if (dimension.lowerBound === null && dimension.upperBound === null) { | ||
70 | unconstrainedDimensions += dimension | ||
71 | } | ||
72 | } | ||
73 | var int index = 0 | ||
74 | for (var int i = 0; i < numConstraints; i++) { | ||
75 | rowStarts.set(i, index) | ||
76 | val constraint = constraints.get(i) | ||
77 | rowLowerBounds.set(i, constraint.lowerBound.toDouble(Double.NEGATIVE_INFINITY)) | ||
78 | rowUpperBounds.set(i, constraint.upperBound.toDouble(Double.POSITIVE_INFINITY)) | ||
79 | if (!dimensionsToIndicesMap.keySet.containsAll(constraint.coefficients.keySet)) { | ||
80 | throw new IllegalArgumentException("Constrains has unknown dimensions") | ||
81 | } | ||
82 | for (var int j = 0; j < numDimensions; j++) { | ||
83 | val dimension = polyhedron.dimensions.get(j) | ||
84 | val coefficient = constraint.coefficients.get(dimension) | ||
85 | if (coefficient !== null && coefficient != 0) { | ||
86 | unconstrainedDimensions -= dimension | ||
87 | columnIndices.set(index, j) | ||
88 | entries.set(index, coefficient) | ||
89 | index++ | ||
90 | } | ||
91 | } | ||
92 | } | ||
93 | if (index != numEntries) { | ||
94 | throw new AssertionError("Last entry does not equal the number of entries in the constraint matrix") | ||
95 | } | ||
96 | for (expressionToSaturate : polyhedron.expressionsToSaturate) { | ||
97 | val result = saturate(expressionToSaturate, rowStarts, columnIndices, entries, rowLowerBounds, | ||
98 | rowUpperBounds, unconstrainedDimensions, constraints) | ||
99 | if (result != PolyhedronSaturationResult.SATURATED) { | ||
100 | return result | ||
101 | } | ||
102 | } | ||
103 | PolyhedronSaturationResult.SATURATED | ||
104 | } | ||
105 | |||
106 | protected def saturate(LinearBoundedExpression expressionToSaturate, int[] rowStarts, int[] columnIndices, | ||
107 | double[] entries, double[] rowLowerBounds, double[] rowUpperBounds, Set<Dimension> unconstrainedDimensions, | ||
108 | ImmutableList<LinearConstraint> constraints) { | ||
109 | val numDimensions = objective.size | ||
110 | for (var int j = 0; j < numDimensions; j++) { | ||
111 | objective.set(j, 0) | ||
112 | } | ||
113 | switch (expressionToSaturate) { | ||
114 | Dimension: { | ||
115 | // CBC will return nonsensical results or call free() with invalid arguments if | ||
116 | // it is passed a fully unconstrained (-Inf lower and +Int upper bound, no inequalities) variable | ||
117 | // in the objective function. | ||
118 | if (unconstrainedDimensions.contains(expressionToSaturate)) { | ||
119 | return PolyhedronSaturationResult.SATURATED | ||
120 | } | ||
121 | val j = getIndex(expressionToSaturate) | ||
122 | objective.set(j, 1) | ||
123 | } | ||
124 | LinearConstraint: { | ||
125 | for (pair : expressionToSaturate.coefficients.entrySet) { | ||
126 | val dimension = pair.key | ||
127 | // We also have to check for unconstrained dimensions here to avoid crashing. | ||
128 | if (unconstrainedDimensions.contains(dimension)) { | ||
129 | expressionToSaturate.lowerBound = null | ||
130 | expressionToSaturate.upperBound = null | ||
131 | return PolyhedronSaturationResult.SATURATED | ||
132 | } | ||
133 | val j = getIndex(dimension) | ||
134 | objective.set(j, pair.value) | ||
135 | } | ||
136 | } | ||
137 | default: | ||
138 | throw new IllegalArgumentException("Unknown expression: " + expressionToSaturate) | ||
139 | } | ||
140 | val minimizationResult = CbcSolver.solve(columnLowerBounds, columnUpperBounds, rowStarts, columnIndices, | ||
141 | entries, rowLowerBounds, rowUpperBounds, objective, lpRelaxation, timeoutSeconds, silent) | ||
142 | switch (minimizationResult) { | ||
143 | CbcResult.SolutionBounded: { | ||
144 | val doubleValue = minimizationResult.value | ||
145 | val roundedValue = Math.ceil(doubleValue - EPSILON) | ||
146 | val intValue = roundedValue as int | ||
147 | val oldBound = expressionToSaturate.lowerBound | ||
148 | if (oldBound === null || intValue >= oldBound) { | ||
149 | expressionToSaturate.lowerBound = intValue | ||
150 | setBound(expressionToSaturate, constraints, roundedValue, columnLowerBounds, rowLowerBounds) | ||
151 | } else { | ||
152 | throw new IllegalStateException("Unexpected decrease of lower bound by " + (oldBound - doubleValue)) | ||
153 | } | ||
154 | } | ||
155 | case CbcResult.SOLUTION_UNBOUNDED: { | ||
156 | if (expressionToSaturate.lowerBound !== null) { | ||
157 | throw new IllegalStateException("Finite lower bound became infinite") | ||
158 | } | ||
159 | setBound(expressionToSaturate, constraints, Double.NEGATIVE_INFINITY, columnLowerBounds, rowLowerBounds) | ||
160 | } | ||
161 | case CbcResult.UNSAT: | ||
162 | return PolyhedronSaturationResult.EMPTY | ||
163 | case CbcResult.ABANDONED, | ||
164 | case CbcResult.TIMEOUT: | ||
165 | return PolyhedronSaturationResult.UNKNOWN | ||
166 | default: | ||
167 | throw new RuntimeException("Unknown CbcResult: " + minimizationResult) | ||
168 | } | ||
169 | for (var int j = 0; j < numDimensions; j++) { | ||
170 | val objectiveCoefficient = objective.get(j) | ||
171 | objective.set(j, -objectiveCoefficient) | ||
172 | } | ||
173 | val maximizationResult = CbcSolver.solve(columnLowerBounds, columnUpperBounds, rowStarts, columnIndices, | ||
174 | entries, rowLowerBounds, rowUpperBounds, objective, lpRelaxation, timeoutSeconds, silent) | ||
175 | switch (maximizationResult) { | ||
176 | CbcResult.SolutionBounded: { | ||
177 | val doubleValue = -maximizationResult.value | ||
178 | val roundedValue = Math.floor(doubleValue + EPSILON) | ||
179 | val intValue = roundedValue as int | ||
180 | val oldBound = expressionToSaturate.upperBound | ||
181 | if (oldBound === null || intValue <= oldBound) { | ||
182 | expressionToSaturate.upperBound = intValue | ||
183 | setBound(expressionToSaturate, constraints, roundedValue, columnUpperBounds, rowUpperBounds) | ||
184 | } else { | ||
185 | throw new IllegalStateException("Unexpected increase of upper bound by " + (doubleValue - oldBound)) | ||
186 | } | ||
187 | } | ||
188 | case CbcResult.SOLUTION_UNBOUNDED: { | ||
189 | if (expressionToSaturate.lowerBound !== null) { | ||
190 | throw new IllegalStateException("Finite upper bound became infinite") | ||
191 | } | ||
192 | expressionToSaturate.upperBound = null | ||
193 | setBound(expressionToSaturate, constraints, Double.POSITIVE_INFINITY, columnUpperBounds, rowUpperBounds) | ||
194 | } | ||
195 | case CbcResult.UNSAT: | ||
196 | throw new RuntimeException("Minimization was SAT, but maximization is UNSAT") | ||
197 | case CbcResult.ABANDONED, | ||
198 | case CbcResult.TIMEOUT: | ||
199 | return PolyhedronSaturationResult.UNKNOWN | ||
200 | default: | ||
201 | throw new RuntimeException("Unknown CbcResult: " + maximizationResult) | ||
202 | } | ||
203 | return PolyhedronSaturationResult.SATURATED | ||
204 | } | ||
205 | |||
206 | private def toDouble(Integer nullableInt, double defaultValue) { | ||
207 | if (nullableInt === null) { | ||
208 | defaultValue | ||
209 | } else { | ||
210 | nullableInt.doubleValue | ||
211 | } | ||
212 | } | ||
213 | |||
214 | private def int getIndex(Dimension dimension) { | ||
215 | val index = dimensionsToIndicesMap.get(dimension) | ||
216 | if (index === null) { | ||
217 | throw new IllegalArgumentException("Unknown dimension: " + dimension) | ||
218 | } | ||
219 | index | ||
220 | } | ||
221 | |||
222 | private def void setBound(LinearBoundedExpression expression, List<LinearConstraint> constraints, double bound, | ||
223 | double[] columnBounds, double[] rowBounds) { | ||
224 | switch (expression) { | ||
225 | Dimension: { | ||
226 | val j = getIndex(expression) | ||
227 | columnBounds.set(j, bound) | ||
228 | } | ||
229 | LinearConstraint: { | ||
230 | val i = constraints.indexOf(expression) | ||
231 | if (i >= 0) { | ||
232 | rowBounds.set(i, bound) | ||
233 | } | ||
234 | } | ||
235 | } | ||
236 | } | ||
237 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/LinearTypeConstraintHint.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/LinearTypeConstraintHint.xtend new file mode 100644 index 00000000..8c21ca1d --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/LinearTypeConstraintHint.xtend | |||
@@ -0,0 +1,30 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality | ||
2 | |||
3 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type | ||
4 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.PatternGenerator | ||
5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | ||
6 | import org.eclipse.viatra.query.runtime.api.IPatternMatch | ||
7 | import org.eclipse.viatra.query.runtime.api.ViatraQueryMatcher | ||
8 | |||
9 | interface LinearTypeExpressionBuilderFactory { | ||
10 | def ViatraQueryMatcher<? extends IPatternMatch> createMatcher(String queryName) | ||
11 | |||
12 | def LinearTypeExpressionBuilder createBuilder() | ||
13 | } | ||
14 | |||
15 | interface LinearTypeExpressionBuilder { | ||
16 | def LinearTypeExpressionBuilder add(int scale, Type type) | ||
17 | |||
18 | def LinearBoundedExpression build() | ||
19 | } | ||
20 | |||
21 | @FunctionalInterface | ||
22 | interface RelationConstraintUpdater { | ||
23 | def void update(PartialInterpretation p) | ||
24 | } | ||
25 | |||
26 | interface LinearTypeConstraintHint { | ||
27 | def CharSequence getAdditionalPatterns(PatternGenerator patternGenerator) | ||
28 | |||
29 | def RelationConstraintUpdater createConstraintUpdater(LinearTypeExpressionBuilderFactory builderFactory) | ||
30 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/MultiplicityGoalConstraintCalculator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/MultiplicityGoalConstraintCalculator.xtend index e05160d0..86a59aa1 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/MultiplicityGoalConstraintCalculator.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/MultiplicityGoalConstraintCalculator.xtend | |||
@@ -1,4 +1,4 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra | 1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality |
2 | 2 | ||
3 | import org.eclipse.emf.common.notify.Notifier | 3 | import org.eclipse.emf.common.notify.Notifier |
4 | import org.eclipse.viatra.query.runtime.api.IQuerySpecification | 4 | import org.eclipse.viatra.query.runtime.api.IQuerySpecification |
@@ -11,36 +11,36 @@ class MultiplicityGoalConstraintCalculator { | |||
11 | val IQuerySpecification<?> querySpecification; | 11 | val IQuerySpecification<?> querySpecification; |
12 | var ViatraQueryMatcher<?> matcher; | 12 | var ViatraQueryMatcher<?> matcher; |
13 | 13 | ||
14 | public new(String targetRelationName, IQuerySpecification<?> querySpecification) { | 14 | new(String targetRelationName, IQuerySpecification<?> querySpecification) { |
15 | this.targetRelationName = targetRelationName | 15 | this.targetRelationName = targetRelationName |
16 | this.querySpecification = querySpecification | 16 | this.querySpecification = querySpecification |
17 | this.matcher = null | 17 | this.matcher = null |
18 | } | 18 | } |
19 | 19 | ||
20 | public new(MultiplicityGoalConstraintCalculator other) { | 20 | new(MultiplicityGoalConstraintCalculator other) { |
21 | this.targetRelationName = other.targetRelationName | 21 | this.targetRelationName = other.targetRelationName |
22 | this.querySpecification = other.querySpecification | 22 | this.querySpecification = other.querySpecification |
23 | this.matcher = null | 23 | this.matcher = null |
24 | } | 24 | } |
25 | 25 | ||
26 | def public getName() { | 26 | def getName() { |
27 | targetRelationName | 27 | targetRelationName |
28 | } | 28 | } |
29 | 29 | ||
30 | def public init(Notifier notifier) { | 30 | def init(Notifier notifier) { |
31 | val engine = ViatraQueryEngine.on(new EMFScope(notifier)) | 31 | val engine = ViatraQueryEngine.on(new EMFScope(notifier)) |
32 | matcher = querySpecification.getMatcher(engine) | 32 | matcher = querySpecification.getMatcher(engine) |
33 | } | 33 | } |
34 | 34 | ||
35 | def public calculateValue() { | 35 | def calculateValue() { |
36 | var res = 0 | 36 | var res = 0 |
37 | val allMatches = this.matcher.allMatches | 37 | val allMatches = this.matcher.allMatches |
38 | for(match : allMatches) { | 38 | for(match : allMatches) { |
39 | //println(targetRelationName+ " missing multiplicity: "+match.get(3)) | 39 | //println(targetRelationName+ " missing multiplicity: "+match.get(3)) |
40 | val missingMultiplicity = match.get(4) as Integer | 40 | val missingMultiplicity = match.get(2) as Integer |
41 | res += missingMultiplicity | 41 | res += missingMultiplicity |
42 | } | 42 | } |
43 | //println(targetRelationName+ " all missing multiplicities: "+res) | 43 | //println(targetRelationName+ " all missing multiplicities: "+res) |
44 | return res | 44 | return res |
45 | } | 45 | } |
46 | } \ No newline at end of file | 46 | } |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/PolyhedronScopePropagator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/PolyhedronScopePropagator.xtend new file mode 100644 index 00000000..51dba244 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/PolyhedronScopePropagator.xtend | |||
@@ -0,0 +1,578 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality | ||
2 | |||
3 | import com.google.common.cache.Cache | ||
4 | import com.google.common.cache.CacheBuilder | ||
5 | import com.google.common.collect.ImmutableList | ||
6 | import com.google.common.collect.ImmutableMap | ||
7 | import com.google.common.collect.ImmutableSet | ||
8 | import com.google.common.collect.Maps | ||
9 | import com.google.common.collect.Sets | ||
10 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Relation | ||
11 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type | ||
12 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.ModelGenerationStatistics | ||
13 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.UnifinishedMultiplicityQueries | ||
14 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialComplexTypeInterpretation | ||
15 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | ||
16 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialPrimitiveInterpretation | ||
17 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.Scope | ||
18 | import java.util.ArrayDeque | ||
19 | import java.util.ArrayList | ||
20 | import java.util.Collection | ||
21 | import java.util.HashMap | ||
22 | import java.util.HashSet | ||
23 | import java.util.List | ||
24 | import java.util.Map | ||
25 | import java.util.Set | ||
26 | import javax.naming.OperationNotSupportedException | ||
27 | import org.eclipse.viatra.query.runtime.api.IPatternMatch | ||
28 | import org.eclipse.viatra.query.runtime.api.IQuerySpecification | ||
29 | import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine | ||
30 | import org.eclipse.viatra.query.runtime.api.ViatraQueryMatcher | ||
31 | import org.eclipse.viatra.query.runtime.emf.EMFScope | ||
32 | import org.eclipse.xtend.lib.annotations.FinalFieldsConstructor | ||
33 | |||
34 | class PolyhedronScopePropagator extends TypeHierarchyScopePropagator { | ||
35 | static val CACHE_SIZE = 10000 | ||
36 | |||
37 | val boolean updateHeuristic | ||
38 | val Map<Scope, LinearBoundedExpression> scopeBounds | ||
39 | val LinearBoundedExpression topLevelBounds | ||
40 | val Polyhedron polyhedron | ||
41 | val PolyhedronSaturationOperator operator | ||
42 | val Set<Relation> relevantRelations | ||
43 | val Cache<PolyhedronSignature, PolyhedronSignature> cache = CacheBuilder.newBuilder.maximumSize(CACHE_SIZE).build | ||
44 | List<RelationConstraintUpdater> updaters = emptyList | ||
45 | |||
46 | new(PartialInterpretation p, ModelGenerationStatistics statistics, Set<? extends Type> possibleNewDynamicTypes, | ||
47 | Map<RelationMultiplicityConstraint, UnifinishedMultiplicityQueries> unfinishedMultiplicityQueries, | ||
48 | IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> hasElementInContainmentQuery, | ||
49 | Map<String, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> allPatternsByName, | ||
50 | Collection<LinearTypeConstraintHint> hints, PolyhedronSolver solver, boolean propagateRelations, | ||
51 | boolean updateHeuristic) { | ||
52 | super(p, statistics) | ||
53 | this.updateHeuristic = updateHeuristic | ||
54 | val builder = new PolyhedronBuilder(p) | ||
55 | builder.buildPolyhedron(possibleNewDynamicTypes) | ||
56 | scopeBounds = builder.scopeBounds | ||
57 | topLevelBounds = builder.topLevelBounds | ||
58 | polyhedron = builder.polyhedron | ||
59 | operator = solver.createSaturationOperator(polyhedron) | ||
60 | propagateAllScopeConstraints() | ||
61 | if (propagateRelations) { | ||
62 | val maximumNumberOfNewNodes = topLevelBounds.upperBound | ||
63 | if (maximumNumberOfNewNodes === null) { | ||
64 | throw new IllegalStateException("Could not determine maximum number of new nodes, it may be unbounded") | ||
65 | } | ||
66 | if (maximumNumberOfNewNodes <= 0) { | ||
67 | throw new IllegalStateException("Maximum number of new nodes is not positive") | ||
68 | } | ||
69 | builder.buildMultiplicityConstraints(unfinishedMultiplicityQueries, hasElementInContainmentQuery, | ||
70 | allPatternsByName, hints, maximumNumberOfNewNodes) | ||
71 | relevantRelations = builder.relevantRelations | ||
72 | updaters = builder.updaters | ||
73 | } else { | ||
74 | relevantRelations = emptySet | ||
75 | } | ||
76 | } | ||
77 | |||
78 | override void doPropagateAllScopeConstraints() { | ||
79 | super.doPropagateAllScopeConstraints() | ||
80 | resetBounds() | ||
81 | populatePolyhedronFromScope() | ||
82 | // println(polyhedron) | ||
83 | val signature = polyhedron.createSignature | ||
84 | val cachedSignature = cache.getIfPresent(signature) | ||
85 | switch (cachedSignature) { | ||
86 | case null: { | ||
87 | statistics.incrementScopePropagationSolverCount | ||
88 | val result = operator.saturate() | ||
89 | if (result == PolyhedronSaturationResult.EMPTY) { | ||
90 | cache.put(signature, PolyhedronSignature.EMPTY) | ||
91 | setScopesInvalid() | ||
92 | } else { | ||
93 | val resultSignature = polyhedron.createSignature | ||
94 | cache.put(signature, resultSignature) | ||
95 | populateScopesFromPolyhedron() | ||
96 | } | ||
97 | } | ||
98 | case PolyhedronSignature.EMPTY: | ||
99 | setScopesInvalid() | ||
100 | PolyhedronSignature.Bounds: { | ||
101 | polyhedron.applySignature(signature) | ||
102 | populateScopesFromPolyhedron() | ||
103 | } | ||
104 | default: | ||
105 | throw new IllegalStateException("Unknown polyhedron signature: " + signature) | ||
106 | } | ||
107 | // println(polyhedron) | ||
108 | if (updateHeuristic) { | ||
109 | copyScopeBoundsToHeuristic() | ||
110 | } | ||
111 | } | ||
112 | |||
113 | override propagateAdditionToRelation(Relation r) { | ||
114 | super.propagateAdditionToRelation(r) | ||
115 | if (relevantRelations.contains(r)) { | ||
116 | propagateAllScopeConstraints() | ||
117 | } | ||
118 | } | ||
119 | |||
120 | def resetBounds() { | ||
121 | for (dimension : polyhedron.dimensions) { | ||
122 | dimension.lowerBound = 0 | ||
123 | dimension.upperBound = null | ||
124 | } | ||
125 | for (constraint : polyhedron.constraints) { | ||
126 | constraint.lowerBound = null | ||
127 | constraint.upperBound = null | ||
128 | } | ||
129 | } | ||
130 | |||
131 | private def populatePolyhedronFromScope() { | ||
132 | topLevelBounds.tightenLowerBound(partialInterpretation.minNewElements) | ||
133 | if (partialInterpretation.maxNewElements >= 0) { | ||
134 | topLevelBounds.tightenUpperBound(partialInterpretation.maxNewElements) | ||
135 | } | ||
136 | for (pair : scopeBounds.entrySet) { | ||
137 | val scope = pair.key | ||
138 | val bounds = pair.value | ||
139 | bounds.tightenLowerBound(scope.minNewElements) | ||
140 | if (scope.maxNewElements >= 0) { | ||
141 | bounds.tightenUpperBound(scope.maxNewElements) | ||
142 | } | ||
143 | } | ||
144 | for (updater : updaters) { | ||
145 | updater.update(partialInterpretation) | ||
146 | } | ||
147 | } | ||
148 | |||
149 | private def populateScopesFromPolyhedron() { | ||
150 | checkBounds(topLevelBounds) | ||
151 | if (partialInterpretation.minNewElements > topLevelBounds.lowerBound) { | ||
152 | throw new IllegalArgumentException('''Lower bound of «topLevelBounds» smaller than top-level scope: «partialInterpretation.minNewElements»''') | ||
153 | } else if (partialInterpretation.minNewElements != topLevelBounds.lowerBound) { | ||
154 | partialInterpretation.minNewElements = topLevelBounds.lowerBound | ||
155 | } | ||
156 | val topLevelUpperBound = topLevelBounds.upperBound ?: -1 | ||
157 | if (partialInterpretation.maxNewElements >= 0 && topLevelUpperBound >= 0 && | ||
158 | partialInterpretation.maxNewElements < topLevelUpperBound) { | ||
159 | throw new IllegalArgumentException('''Upper bound of «topLevelBounds» larger than top-level scope: «partialInterpretation.maxNewElements»''') | ||
160 | } else if (partialInterpretation.maxNewElements != topLevelUpperBound) { | ||
161 | partialInterpretation.maxNewElements = topLevelUpperBound | ||
162 | } | ||
163 | for (pair : scopeBounds.entrySet) { | ||
164 | val scope = pair.key | ||
165 | val bounds = pair.value | ||
166 | checkBounds(bounds) | ||
167 | if (scope.minNewElements > bounds.lowerBound) { | ||
168 | throw new IllegalArgumentException('''Lower bound of «bounds» smaller than «scope.targetTypeInterpretation» scope: «scope.minNewElements»''') | ||
169 | } else if (scope.minNewElements != bounds.lowerBound) { | ||
170 | scope.minNewElements = bounds.lowerBound | ||
171 | } | ||
172 | val upperBound = bounds.upperBound ?: -1 | ||
173 | if (scope.maxNewElements >= 0 && upperBound >= 0 && scope.maxNewElements < upperBound) { | ||
174 | throw new IllegalArgumentException('''Upper bound of «bounds» larger than «scope.targetTypeInterpretation» scope: «scope.maxNewElements»''') | ||
175 | } else if (scope.maxNewElements != upperBound) { | ||
176 | scope.maxNewElements = upperBound | ||
177 | } | ||
178 | } | ||
179 | } | ||
180 | |||
181 | private def checkBounds(LinearBoundedExpression bounds) { | ||
182 | if (bounds.lowerBound === null) { | ||
183 | throw new IllegalArgumentException("Infinite lower bound: " + bounds) | ||
184 | } else if (bounds.lowerBound < 0) { | ||
185 | throw new IllegalArgumentException("Negative lower bound: " + bounds) | ||
186 | } | ||
187 | if (bounds.upperBound !== null && bounds.upperBound < 0) { | ||
188 | throw new IllegalArgumentException("Negative upper bound: " + bounds) | ||
189 | } | ||
190 | } | ||
191 | |||
192 | private def setScopesInvalid() { | ||
193 | partialInterpretation.minNewElements = Integer.MAX_VALUE | ||
194 | partialInterpretation.maxNewElements = 0 | ||
195 | for (scope : partialInterpretation.scopes) { | ||
196 | scope.minNewElements = Integer.MAX_VALUE | ||
197 | scope.maxNewElements = 0 | ||
198 | } | ||
199 | } | ||
200 | |||
201 | private static def <T extends IPatternMatch> getCalculatedMultiplicity(ViatraQueryMatcher<T> matcher, | ||
202 | PartialInterpretation p) { | ||
203 | val match = matcher.newEmptyMatch | ||
204 | match.set(0, p.problem) | ||
205 | match.set(1, p) | ||
206 | val iterator = matcher.streamAllMatches(match).iterator | ||
207 | if (!iterator.hasNext) { | ||
208 | return null | ||
209 | } | ||
210 | val value = iterator.next.get(2) as Integer | ||
211 | if (iterator.hasNext) { | ||
212 | throw new IllegalArgumentException("Multiplicity calculation query has more than one match") | ||
213 | } | ||
214 | value | ||
215 | } | ||
216 | |||
217 | @FinalFieldsConstructor | ||
218 | private static class PolyhedronBuilder implements LinearTypeExpressionBuilderFactory { | ||
219 | static val INFINITY_SCALE = 10 | ||
220 | |||
221 | val PartialInterpretation p | ||
222 | |||
223 | Map<Type, Dimension> instanceCounts | ||
224 | Map<Type, Map<Dimension, Integer>> subtypeDimensions | ||
225 | Map<Map<Dimension, Integer>, LinearBoundedExpression> expressionsCache | ||
226 | Map<Type, LinearBoundedExpression> typeBounds | ||
227 | int infinity | ||
228 | ViatraQueryEngine queryEngine | ||
229 | Map<String, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> allPatternsByName | ||
230 | ImmutableList.Builder<RelationConstraintUpdater> updatersBuilder | ||
231 | |||
232 | Map<Scope, LinearBoundedExpression> scopeBounds | ||
233 | LinearBoundedExpression topLevelBounds | ||
234 | Polyhedron polyhedron | ||
235 | Set<Relation> relevantRelations | ||
236 | List<RelationConstraintUpdater> updaters | ||
237 | |||
238 | def buildPolyhedron(Set<? extends Type> possibleNewDynamicTypes) { | ||
239 | instanceCounts = possibleNewDynamicTypes.toInvertedMap[new Dimension(name, 0, null)] | ||
240 | val types = p.problem.types | ||
241 | expressionsCache = Maps.newHashMapWithExpectedSize(types.size) | ||
242 | subtypeDimensions = types.toInvertedMap[findSubtypeDimensions.toInvertedMap[1]] | ||
243 | typeBounds = ImmutableMap.copyOf(subtypeDimensions.mapValues[toExpression]) | ||
244 | scopeBounds = buildScopeBounds | ||
245 | topLevelBounds = instanceCounts.values.toInvertedMap[1].toExpression | ||
246 | val dimensions = ImmutableList.copyOf(instanceCounts.values) | ||
247 | val expressionsToSaturate = ImmutableList.copyOf(scopeBounds.values) | ||
248 | polyhedron = new Polyhedron(dimensions, new ArrayList, expressionsToSaturate) | ||
249 | addCachedConstraintsToPolyhedron() | ||
250 | } | ||
251 | |||
252 | def buildMultiplicityConstraints( | ||
253 | Map<RelationMultiplicityConstraint, UnifinishedMultiplicityQueries> constraints, | ||
254 | IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> hasElementInContainmentQuery, | ||
255 | Map<String, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> allPatternsByName, | ||
256 | Collection<LinearTypeConstraintHint> hints, int maximumNuberOfNewNodes) { | ||
257 | infinity = maximumNuberOfNewNodes * INFINITY_SCALE | ||
258 | queryEngine = ViatraQueryEngine.on(new EMFScope(p)) | ||
259 | this.allPatternsByName = allPatternsByName | ||
260 | updatersBuilder = ImmutableList.builder | ||
261 | val containmentConstraints = constraints.entrySet.filter[key.containment].groupBy[key.targetType] | ||
262 | for (pair : containmentConstraints.entrySet) { | ||
263 | buildContainmentConstraints(pair.key, pair.value) | ||
264 | } | ||
265 | buildConstainmentRootConstraints(containmentConstraints.keySet, hasElementInContainmentQuery) | ||
266 | for (pair : constraints.entrySet) { | ||
267 | val constraint = pair.key | ||
268 | if (!constraint.containment) { | ||
269 | buildNonContainmentConstraints(constraint, pair.value) | ||
270 | } | ||
271 | } | ||
272 | buildRelevantRelations(constraints.keySet) | ||
273 | for (hint : hints) { | ||
274 | updatersBuilder.add(hint.createConstraintUpdater(this)) | ||
275 | } | ||
276 | updaters = updatersBuilder.build | ||
277 | addCachedConstraintsToPolyhedron() | ||
278 | } | ||
279 | |||
280 | private def buildRelevantRelations(Set<RelationMultiplicityConstraint> constraints) { | ||
281 | val builder = ImmutableSet.builder | ||
282 | for (constraint : constraints) { | ||
283 | builder.add(constraint.relation) | ||
284 | if (constraint.inverseRelation !== null) { | ||
285 | builder.add(constraint.inverseRelation) | ||
286 | } | ||
287 | } | ||
288 | relevantRelations = builder.build | ||
289 | } | ||
290 | |||
291 | private def addCachedConstraintsToPolyhedron() { | ||
292 | val constraints = new HashSet | ||
293 | constraints.addAll(expressionsCache.values.filter(LinearConstraint)) | ||
294 | constraints.removeAll(polyhedron.constraints) | ||
295 | polyhedron.constraints.addAll(constraints) | ||
296 | } | ||
297 | |||
298 | private def buildContainmentConstraints(Type containedType, | ||
299 | List<Map.Entry<RelationMultiplicityConstraint, UnifinishedMultiplicityQueries>> constraints) { | ||
300 | val typeCoefficients = subtypeDimensions.get(containedType) | ||
301 | val orphansLowerBoundCoefficients = new HashMap(typeCoefficients) | ||
302 | val orphansUpperBoundCoefficients = new HashMap(typeCoefficients) | ||
303 | val unfinishedMultiplicitiesMatchersBuilder = ImmutableList.builder | ||
304 | val remainingContentsQueriesBuilder = ImmutableList.builder | ||
305 | for (pair : constraints) { | ||
306 | val constraint = pair.key | ||
307 | val containerCoefficients = subtypeDimensions.get(constraint.sourceType) | ||
308 | if (constraint.isUpperBoundFinite) { | ||
309 | orphansLowerBoundCoefficients.addCoefficients(-constraint.upperBound, containerCoefficients) | ||
310 | } else { | ||
311 | orphansLowerBoundCoefficients.addCoefficients(-infinity, containerCoefficients) | ||
312 | } | ||
313 | orphansUpperBoundCoefficients.addCoefficients(-constraint.lowerBound, containerCoefficients) | ||
314 | val queries = pair.value | ||
315 | if (constraint.constrainsUnfinished) { | ||
316 | if (queries.unfinishedMultiplicityQuery === null) { | ||
317 | throw new IllegalArgumentException( | ||
318 | "Containment constraints need unfinished multiplicity queries") | ||
319 | } | ||
320 | unfinishedMultiplicitiesMatchersBuilder.add( | ||
321 | queries.unfinishedMultiplicityQuery.getMatcher(queryEngine)) | ||
322 | } | ||
323 | if (queries.remainingContentsQuery === null) { | ||
324 | throw new IllegalArgumentException("Containment constraints need remaining contents queries") | ||
325 | } | ||
326 | remainingContentsQueriesBuilder.add(queries.remainingContentsQuery.getMatcher(queryEngine)) | ||
327 | } | ||
328 | val orphanLowerBound = orphansLowerBoundCoefficients.toExpression | ||
329 | val orphanUpperBound = orphansUpperBoundCoefficients.toExpression | ||
330 | val updater = new ContainmentConstraintUpdater(containedType.name, orphanLowerBound, orphanUpperBound, | ||
331 | unfinishedMultiplicitiesMatchersBuilder.build, remainingContentsQueriesBuilder.build) | ||
332 | updatersBuilder.add(updater) | ||
333 | } | ||
334 | |||
335 | private def buildConstainmentRootConstraints(Set<Type> containedTypes, | ||
336 | IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> hasElementInContainmentQuery) { | ||
337 | val matcher = hasElementInContainmentQuery.getMatcher(queryEngine) | ||
338 | val rootDimensions = Sets.newHashSet(instanceCounts.values) | ||
339 | for (type : containedTypes) { | ||
340 | val containedDimensions = subtypeDimensions.get(type).keySet | ||
341 | rootDimensions.removeAll(containedDimensions) | ||
342 | } | ||
343 | for (dimension : rootDimensions) { | ||
344 | updatersBuilder.add(new ContainmentRootConstraintUpdater(dimension, matcher)) | ||
345 | } | ||
346 | } | ||
347 | |||
348 | private def buildNonContainmentConstraints(RelationMultiplicityConstraint constraint, | ||
349 | UnifinishedMultiplicityQueries queries) { | ||
350 | if (constraint.constrainsRemainingInverse) { | ||
351 | if (queries.unfinishedMultiplicityQuery === null) { | ||
352 | throw new IllegalArgumentException("Reference constraints need unfinished multiplicity queries") | ||
353 | } | ||
354 | val unfinishedMultiplicityMatcher = queries.unfinishedMultiplicityQuery.getMatcher(queryEngine) | ||
355 | if (queries.remainingInverseMultiplicityQuery === null) { | ||
356 | throw new IllegalArgumentException( | ||
357 | "Reference constraints need remaining inverse multiplicity queries") | ||
358 | } | ||
359 | val remainingInverseMultiplicityMatcher = queries.remainingInverseMultiplicityQuery.getMatcher( | ||
360 | queryEngine) | ||
361 | val availableMultiplicityCoefficients = new HashMap | ||
362 | availableMultiplicityCoefficients.addCoefficients(constraint.inverseUpperBound, | ||
363 | subtypeDimensions.get(constraint.targetType)) | ||
364 | availableMultiplicityCoefficients.addCoefficients(-constraint.lowerBound, | ||
365 | subtypeDimensions.get(constraint.targetType)) | ||
366 | val availableMultiplicity = availableMultiplicityCoefficients.toExpression | ||
367 | updatersBuilder.add( | ||
368 | new UnfinishedMultiplicityConstraintUpdater(constraint.relation.name, availableMultiplicity, | ||
369 | unfinishedMultiplicityMatcher, remainingInverseMultiplicityMatcher)) | ||
370 | } | ||
371 | if (constraint.constrainsUnrepairable) { | ||
372 | if (queries.unrepairableMultiplicityQuery === null) { | ||
373 | throw new IllegalArgumentException("Reference constraints need unrepairable multiplicity queries") | ||
374 | } | ||
375 | val unrepairableMultiplicityMatcher = queries.unrepairableMultiplicityQuery.getMatcher(queryEngine) | ||
376 | val targetTypeCardinality = typeBounds.get(constraint.targetType) | ||
377 | updatersBuilder.add( | ||
378 | new UnrepairableMultiplicityConstraintUpdater(constraint.relation.name, targetTypeCardinality, | ||
379 | unrepairableMultiplicityMatcher)) | ||
380 | } | ||
381 | } | ||
382 | |||
383 | private static def addCoefficients(Map<Dimension, Integer> accumulator, int scale, Map<Dimension, Integer> a) { | ||
384 | for (pair : a.entrySet) { | ||
385 | val dimension = pair.key | ||
386 | val currentValue = accumulator.get(pair.key) ?: 0 | ||
387 | val newValue = currentValue + scale * pair.value | ||
388 | if (newValue == 0) { | ||
389 | accumulator.remove(dimension) | ||
390 | } else { | ||
391 | accumulator.put(dimension, newValue) | ||
392 | } | ||
393 | } | ||
394 | } | ||
395 | |||
396 | private def findSubtypeDimensions(Type type) { | ||
397 | val subtypes = new HashSet | ||
398 | val dimensions = new HashSet | ||
399 | val stack = new ArrayDeque | ||
400 | stack.addLast(type) | ||
401 | while (!stack.empty) { | ||
402 | val subtype = stack.removeLast | ||
403 | if (subtypes.add(subtype)) { | ||
404 | val dimension = instanceCounts.get(subtype) | ||
405 | if (dimension !== null) { | ||
406 | dimensions.add(dimension) | ||
407 | } | ||
408 | stack.addAll(subtype.subtypes) | ||
409 | } | ||
410 | } | ||
411 | dimensions | ||
412 | } | ||
413 | |||
414 | private def toExpression(Map<Dimension, Integer> coefficients) { | ||
415 | expressionsCache.computeIfAbsent(coefficients) [ c | | ||
416 | if (c.size == 1 && c.entrySet.head.value == 1) { | ||
417 | c.entrySet.head.key | ||
418 | } else { | ||
419 | new LinearConstraint(c, null, null) | ||
420 | } | ||
421 | ] | ||
422 | } | ||
423 | |||
424 | private def buildScopeBounds() { | ||
425 | val scopeBoundsBuilder = ImmutableMap.builder | ||
426 | for (scope : p.scopes) { | ||
427 | switch (targetTypeInterpretation : scope.targetTypeInterpretation) { | ||
428 | PartialPrimitiveInterpretation: | ||
429 | throw new OperationNotSupportedException("Primitive type scopes are not yet implemented") | ||
430 | PartialComplexTypeInterpretation: { | ||
431 | val complexType = targetTypeInterpretation.interpretationOf | ||
432 | val typeBound = typeBounds.get(complexType) | ||
433 | if (typeBound === null) { | ||
434 | if (scope.minNewElements > 0) { | ||
435 | throw new IllegalArgumentException("Found scope for " + complexType.name + | ||
436 | ", but the type cannot be instantiated") | ||
437 | } | ||
438 | } else { | ||
439 | scopeBoundsBuilder.put(scope, typeBound) | ||
440 | } | ||
441 | } | ||
442 | default: | ||
443 | throw new IllegalArgumentException("Unknown PartialTypeInterpretation: " + | ||
444 | targetTypeInterpretation) | ||
445 | } | ||
446 | } | ||
447 | scopeBoundsBuilder.build | ||
448 | } | ||
449 | |||
450 | override createMatcher(String queryName) { | ||
451 | val querySpecification = allPatternsByName.get(queryName) | ||
452 | if (querySpecification === null) { | ||
453 | throw new IllegalArgumentException("Unknown pattern: " + queryName) | ||
454 | } | ||
455 | querySpecification.getMatcher(queryEngine) | ||
456 | } | ||
457 | |||
458 | override createBuilder() { | ||
459 | new PolyhedronBuilderLinearTypeExpressionBuilder(this) | ||
460 | } | ||
461 | } | ||
462 | |||
463 | @FinalFieldsConstructor | ||
464 | private static class PolyhedronBuilderLinearTypeExpressionBuilder implements LinearTypeExpressionBuilder { | ||
465 | val PolyhedronBuilder polyhedronBuilder | ||
466 | val Map<Dimension, Integer> coefficients = new HashMap | ||
467 | |||
468 | override add(int scale, Type type) { | ||
469 | val typeCoefficients = polyhedronBuilder.subtypeDimensions.get(type) | ||
470 | if (typeCoefficients === null) { | ||
471 | throw new IllegalArgumentException("Unknown type: " + type) | ||
472 | } | ||
473 | PolyhedronBuilder.addCoefficients(coefficients, scale, typeCoefficients) | ||
474 | this | ||
475 | } | ||
476 | |||
477 | override build() { | ||
478 | polyhedronBuilder.toExpression(coefficients) | ||
479 | } | ||
480 | } | ||
481 | |||
482 | @FinalFieldsConstructor | ||
483 | private static class ContainmentConstraintUpdater implements RelationConstraintUpdater { | ||
484 | val String name | ||
485 | val LinearBoundedExpression orphansLowerBound | ||
486 | val LinearBoundedExpression orphansUpperBound | ||
487 | val List<ViatraQueryMatcher<? extends IPatternMatch>> unfinishedMultiplicitiesMatchers | ||
488 | val List<ViatraQueryMatcher<? extends IPatternMatch>> remainingContentsQueries | ||
489 | |||
490 | override update(PartialInterpretation p) { | ||
491 | tightenLowerBound(p) | ||
492 | tightenUpperBound(p) | ||
493 | } | ||
494 | |||
495 | private def tightenLowerBound(PartialInterpretation p) { | ||
496 | var int sum = 0 | ||
497 | for (matcher : remainingContentsQueries) { | ||
498 | val value = matcher.getCalculatedMultiplicity(p) | ||
499 | if (value === null) { | ||
500 | throw new IllegalArgumentException("Remaining contents count is missing for " + name) | ||
501 | } | ||
502 | if (value == -1) { | ||
503 | // Infinite upper bound, no need to tighten. | ||
504 | return | ||
505 | } | ||
506 | sum += value | ||
507 | } | ||
508 | orphansLowerBound.tightenUpperBound(sum) | ||
509 | } | ||
510 | |||
511 | private def tightenUpperBound(PartialInterpretation p) { | ||
512 | var int sum = 0 | ||
513 | for (matcher : unfinishedMultiplicitiesMatchers) { | ||
514 | val value = matcher.getCalculatedMultiplicity(p) | ||
515 | if (value === null) { | ||
516 | throw new IllegalArgumentException("Unfinished multiplicity is missing for " + name) | ||
517 | } | ||
518 | sum += value | ||
519 | } | ||
520 | orphansUpperBound.tightenLowerBound(sum) | ||
521 | } | ||
522 | } | ||
523 | |||
524 | @FinalFieldsConstructor | ||
525 | private static class ContainmentRootConstraintUpdater implements RelationConstraintUpdater { | ||
526 | val LinearBoundedExpression typeCardinality | ||
527 | val ViatraQueryMatcher<? extends IPatternMatch> hasElementInContainmentMatcher | ||
528 | |||
529 | override update(PartialInterpretation p) { | ||
530 | if (hasElementInContainmentMatcher.hasMatch(p)) { | ||
531 | typeCardinality.tightenUpperBound(0) | ||
532 | } else { | ||
533 | typeCardinality.tightenUpperBound(1) | ||
534 | } | ||
535 | } | ||
536 | |||
537 | private static def <T extends IPatternMatch> hasMatch(ViatraQueryMatcher<T> matcher, PartialInterpretation p) { | ||
538 | val match = matcher.newMatch(p.problem, p) | ||
539 | matcher.countMatches(match) != 0 | ||
540 | } | ||
541 | } | ||
542 | |||
543 | @FinalFieldsConstructor | ||
544 | private static class UnfinishedMultiplicityConstraintUpdater implements RelationConstraintUpdater { | ||
545 | val String name | ||
546 | val LinearBoundedExpression availableMultiplicityExpression | ||
547 | val ViatraQueryMatcher<? extends IPatternMatch> unfinishedMultiplicityMatcher | ||
548 | val ViatraQueryMatcher<? extends IPatternMatch> remainingInverseMultiplicityMatcher | ||
549 | |||
550 | override update(PartialInterpretation p) { | ||
551 | val unfinishedMultiplicity = unfinishedMultiplicityMatcher.getCalculatedMultiplicity(p) | ||
552 | if (unfinishedMultiplicity === null) { | ||
553 | throw new IllegalArgumentException("Unfinished multiplicity is missing for " + name) | ||
554 | } | ||
555 | val remainingInverseMultiplicity = remainingInverseMultiplicityMatcher.getCalculatedMultiplicity(p) | ||
556 | if (remainingInverseMultiplicity === null) { | ||
557 | throw new IllegalArgumentException("Remaining inverse multiplicity is missing for " + name) | ||
558 | } | ||
559 | val int requiredMultiplicity = unfinishedMultiplicity - remainingInverseMultiplicity | ||
560 | availableMultiplicityExpression.tightenLowerBound(requiredMultiplicity) | ||
561 | } | ||
562 | } | ||
563 | |||
564 | @FinalFieldsConstructor | ||
565 | private static class UnrepairableMultiplicityConstraintUpdater implements RelationConstraintUpdater { | ||
566 | val String name | ||
567 | val LinearBoundedExpression targetCardinalityExpression | ||
568 | val ViatraQueryMatcher<? extends IPatternMatch> unrepairableMultiplicityMatcher | ||
569 | |||
570 | override update(PartialInterpretation p) { | ||
571 | val value = unrepairableMultiplicityMatcher.getCalculatedMultiplicity(p) | ||
572 | if (value === null) { | ||
573 | throw new IllegalArgumentException("Unrepairable multiplicity is missing for " + name) | ||
574 | } | ||
575 | targetCardinalityExpression.tightenLowerBound(value) | ||
576 | } | ||
577 | } | ||
578 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/PolyhedronSolver.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/PolyhedronSolver.xtend new file mode 100644 index 00000000..4e046190 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/PolyhedronSolver.xtend | |||
@@ -0,0 +1,179 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality | ||
2 | |||
3 | import java.util.List | ||
4 | import java.util.Map | ||
5 | import org.eclipse.xtend.lib.annotations.Accessors | ||
6 | import org.eclipse.xtend.lib.annotations.Data | ||
7 | import org.eclipse.xtend.lib.annotations.FinalFieldsConstructor | ||
8 | |||
9 | interface PolyhedronSolver { | ||
10 | def PolyhedronSaturationOperator createSaturationOperator(Polyhedron polyhedron) | ||
11 | } | ||
12 | |||
13 | enum PolyhedronSaturationResult { | ||
14 | SATURATED, | ||
15 | EMPTY, | ||
16 | UNKNOWN | ||
17 | } | ||
18 | |||
19 | interface PolyhedronSaturationOperator extends AutoCloseable { | ||
20 | def Polyhedron getPolyhedron() | ||
21 | |||
22 | def PolyhedronSaturationResult saturate() | ||
23 | } | ||
24 | |||
25 | @FinalFieldsConstructor | ||
26 | @Accessors | ||
27 | class Polyhedron { | ||
28 | /** | ||
29 | * The list of dimensions (variables) for this polyhedron. | ||
30 | * | ||
31 | * This list must not be modified after the polyhedron was created. | ||
32 | * However, lower and upper bounds of the dimensions may be changed. | ||
33 | * | ||
34 | * Names of dimensions in this list are assumed to be unique. | ||
35 | */ | ||
36 | val List<Dimension> dimensions | ||
37 | |||
38 | /** | ||
39 | * The list of constraints defining this polyhedron. | ||
40 | * | ||
41 | * The list and its elements may be freely modified. | ||
42 | */ | ||
43 | val List<LinearConstraint> constraints | ||
44 | |||
45 | /** | ||
46 | * The list of constraints that should be saturated (tightened) | ||
47 | * when a {@link PolyhedronSaturationOperator} is invoked. | ||
48 | * | ||
49 | * This list may be freely modified. | ||
50 | * | ||
51 | * Place all dimensions and constraints here to saturate all the bounds. | ||
52 | */ | ||
53 | val List<LinearBoundedExpression> expressionsToSaturate | ||
54 | |||
55 | override toString() ''' | ||
56 | Dimensions: | ||
57 | «FOR dimension : dimensions» | ||
58 | «dimension» | ||
59 | «ENDFOR» | ||
60 | Constraints: | ||
61 | «FOR constraint : constraints» | ||
62 | «constraint» | ||
63 | «ENDFOR» | ||
64 | ''' | ||
65 | |||
66 | def createSignature() { | ||
67 | val size = dimensions.size + constraints.size | ||
68 | val lowerBounds = newArrayOfSize(size) | ||
69 | val upperBounds = newArrayOfSize(size) | ||
70 | var int i = 0 | ||
71 | for (dimension : dimensions) { | ||
72 | lowerBounds.set(i, dimension.lowerBound) | ||
73 | upperBounds.set(i, dimension.upperBound) | ||
74 | i++ | ||
75 | } | ||
76 | for (constraint : constraints) { | ||
77 | lowerBounds.set(i, constraint.lowerBound) | ||
78 | upperBounds.set(i, constraint.upperBound) | ||
79 | i++ | ||
80 | } | ||
81 | new PolyhedronSignature.Bounds(lowerBounds, upperBounds) | ||
82 | } | ||
83 | |||
84 | def applySignature(PolyhedronSignature.Bounds signature) { | ||
85 | val lowerBounds = signature.lowerBounds | ||
86 | val upperBounds = signature.upperBounds | ||
87 | var int i = 0 | ||
88 | for (dimension : dimensions) { | ||
89 | dimension.lowerBound = lowerBounds.get(i) | ||
90 | dimension.upperBound = upperBounds.get(i) | ||
91 | i++ | ||
92 | } | ||
93 | for (constraint : constraints) { | ||
94 | constraint.lowerBound = lowerBounds.get(i) | ||
95 | constraint.upperBound = upperBounds.get(i) | ||
96 | i++ | ||
97 | } | ||
98 | } | ||
99 | } | ||
100 | |||
101 | abstract class PolyhedronSignature { | ||
102 | public static val EMPTY = new PolyhedronSignature { | ||
103 | override toString() { | ||
104 | "PolyhedronSignature.EMPTY" | ||
105 | } | ||
106 | } | ||
107 | |||
108 | private new() { | ||
109 | } | ||
110 | |||
111 | @Data | ||
112 | static class Bounds extends PolyhedronSignature { | ||
113 | val Integer[] lowerBounds | ||
114 | val Integer[] upperBounds | ||
115 | } | ||
116 | } | ||
117 | |||
118 | @Accessors | ||
119 | abstract class LinearBoundedExpression { | ||
120 | var Integer lowerBound | ||
121 | var Integer upperBound | ||
122 | |||
123 | def void tightenLowerBound(Integer tighterBound) { | ||
124 | if (lowerBound === null || (tighterBound !== null && lowerBound < tighterBound)) { | ||
125 | lowerBound = tighterBound | ||
126 | } | ||
127 | } | ||
128 | |||
129 | def void tightenUpperBound(Integer tighterBound) { | ||
130 | if (upperBound === null || (tighterBound !== null && upperBound > tighterBound)) { | ||
131 | upperBound = tighterBound | ||
132 | } | ||
133 | } | ||
134 | |||
135 | def void assertEqualsTo(int bound) { | ||
136 | tightenLowerBound(bound) | ||
137 | tightenUpperBound(bound) | ||
138 | } | ||
139 | } | ||
140 | |||
141 | @Accessors | ||
142 | class Dimension extends LinearBoundedExpression { | ||
143 | val String name | ||
144 | |||
145 | @FinalFieldsConstructor | ||
146 | new() { | ||
147 | } | ||
148 | |||
149 | new(String name, Integer lowerBound, Integer upperBound) { | ||
150 | this(name) | ||
151 | this.lowerBound = lowerBound | ||
152 | this.upperBound = upperBound | ||
153 | } | ||
154 | |||
155 | override toString() { | ||
156 | '''«IF lowerBound !== null»«lowerBound» <= «ENDIF»«name»«IF upperBound !== null» <= «upperBound»«ENDIF»''' | ||
157 | } | ||
158 | |||
159 | } | ||
160 | |||
161 | @Accessors | ||
162 | class LinearConstraint extends LinearBoundedExpression { | ||
163 | val Map<Dimension, Integer> coefficients | ||
164 | |||
165 | @FinalFieldsConstructor | ||
166 | new() { | ||
167 | } | ||
168 | |||
169 | new(Map<Dimension, Integer> coefficients, Integer lowerBound, Integer upperBound) { | ||
170 | this(coefficients) | ||
171 | this.lowerBound = lowerBound | ||
172 | this.upperBound = upperBound | ||
173 | } | ||
174 | |||
175 | override toString() { | ||
176 | '''«IF lowerBound !== null»«lowerBound» <= «ENDIF»«FOR pair : coefficients.entrySet SEPARATOR " + "»«IF pair.value != 1»«pair.value» * «ENDIF»«pair.key.name»«ENDFOR»«IF upperBound !== null» <= «upperBound»«ENDIF»''' | ||
177 | } | ||
178 | |||
179 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/RelationConstraintCalculator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/RelationConstraintCalculator.xtend new file mode 100644 index 00000000..c92260ea --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/RelationConstraintCalculator.xtend | |||
@@ -0,0 +1,137 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality | ||
2 | |||
3 | import com.google.common.collect.ImmutableList | ||
4 | import com.google.common.collect.ImmutableSet | ||
5 | import hu.bme.mit.inf.dslreasoner.ecore2logic.ecore2logicannotations.InverseRelationAssertion | ||
6 | import hu.bme.mit.inf.dslreasoner.ecore2logic.ecore2logicannotations.LowerMultiplicityAssertion | ||
7 | import hu.bme.mit.inf.dslreasoner.ecore2logic.ecore2logicannotations.UpperMultiplicityAssertion | ||
8 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.ComplexTypeReference | ||
9 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Relation | ||
10 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem | ||
11 | import java.util.HashMap | ||
12 | import java.util.List | ||
13 | import org.eclipse.xtend.lib.annotations.Data | ||
14 | |||
15 | @Data | ||
16 | class RelationConstraints { | ||
17 | val List<RelationMultiplicityConstraint> multiplicityConstraints | ||
18 | } | ||
19 | |||
20 | @Data | ||
21 | class RelationMultiplicityConstraint { | ||
22 | Relation relation | ||
23 | Relation inverseRelation | ||
24 | boolean containment | ||
25 | boolean container | ||
26 | int lowerBound | ||
27 | int upperBound | ||
28 | int inverseUpperBound | ||
29 | |||
30 | def isUpperBoundFinite() { | ||
31 | upperBound >= 0 | ||
32 | } | ||
33 | |||
34 | private def isInverseUpperBoundFinite() { | ||
35 | inverseUpperBound >= 0 | ||
36 | } | ||
37 | |||
38 | private def canHaveMultipleSourcesPerTarget() { | ||
39 | inverseUpperBound != 1 | ||
40 | } | ||
41 | |||
42 | def constrainsUnfinished() { | ||
43 | lowerBound >= 1 && (!container || lowerBound >= 2) | ||
44 | } | ||
45 | |||
46 | def constrainsUnrepairable() { | ||
47 | constrainsUnfinished && canHaveMultipleSourcesPerTarget && false | ||
48 | } | ||
49 | |||
50 | def constrainsRemainingInverse() { | ||
51 | lowerBound >= 1 && !containment && inverseUpperBoundFinite | ||
52 | } | ||
53 | |||
54 | def constrainsRemainingContents() { | ||
55 | containment | ||
56 | } | ||
57 | |||
58 | def isActive() { | ||
59 | constrainsUnfinished || constrainsUnrepairable || constrainsRemainingInverse || constrainsRemainingContents | ||
60 | } | ||
61 | |||
62 | def getSourceType() { | ||
63 | getParamType(0) | ||
64 | } | ||
65 | |||
66 | def getTargetType() { | ||
67 | getParamType(1) | ||
68 | } | ||
69 | |||
70 | private def getParamType(int i) { | ||
71 | val parameters = relation.parameters | ||
72 | if (i < parameters.size) { | ||
73 | val firstParam = parameters.get(i) | ||
74 | if (firstParam instanceof ComplexTypeReference) { | ||
75 | return firstParam.referred | ||
76 | } | ||
77 | } | ||
78 | throw new IllegalArgumentException("Constraint with unknown source type") | ||
79 | } | ||
80 | } | ||
81 | |||
82 | class RelationConstraintCalculator { | ||
83 | def calculateRelationConstraints(LogicProblem problem) { | ||
84 | val containmentRelations = switch (problem.containmentHierarchies.size) { | ||
85 | case 0: | ||
86 | <Relation>emptySet | ||
87 | case 1: | ||
88 | ImmutableSet.copyOf(problem.containmentHierarchies.head.containmentRelations) | ||
89 | default: | ||
90 | throw new IllegalArgumentException("Only a single containment hierarchy is supported") | ||
91 | } | ||
92 | val inverseRelations = new HashMap<Relation, Relation> | ||
93 | val lowerMultiplicities = new HashMap<Relation, Integer> | ||
94 | val upperMultiplicities = new HashMap<Relation, Integer> | ||
95 | for (relation : problem.relations) { | ||
96 | lowerMultiplicities.put(relation, 0) | ||
97 | upperMultiplicities.put(relation, -1) | ||
98 | } | ||
99 | for (annotation : problem.annotations) { | ||
100 | switch (annotation) { | ||
101 | InverseRelationAssertion: { | ||
102 | inverseRelations.put(annotation.inverseA, annotation.inverseB) | ||
103 | inverseRelations.put(annotation.inverseB, annotation.inverseA) | ||
104 | } | ||
105 | LowerMultiplicityAssertion: | ||
106 | lowerMultiplicities.put(annotation.relation, annotation.lower) | ||
107 | UpperMultiplicityAssertion: | ||
108 | upperMultiplicities.put(annotation.relation, annotation.upper) | ||
109 | } | ||
110 | } | ||
111 | val multiplicityConstraintsBuilder = ImmutableList.builder() | ||
112 | for (relation : problem.relations) { | ||
113 | val containment = containmentRelations.contains(relation) | ||
114 | val lowerMultiplicity = lowerMultiplicities.get(relation) | ||
115 | val upperMultiplicity = upperMultiplicities.get(relation) | ||
116 | var container = false | ||
117 | var inverseUpperMultiplicity = -1 | ||
118 | val inverseRelation = inverseRelations.get(relation) | ||
119 | if (inverseRelation !== null) { | ||
120 | inverseUpperMultiplicity = upperMultiplicities.get(inverseRelation) | ||
121 | container = containmentRelations.contains(inverseRelation) | ||
122 | } | ||
123 | if (containment) { | ||
124 | inverseUpperMultiplicity = 1 | ||
125 | } | ||
126 | val constraint = new RelationMultiplicityConstraint(relation, inverseRelation, containment, container, | ||
127 | lowerMultiplicity, upperMultiplicity, inverseUpperMultiplicity) | ||
128 | if (constraint.isActive) { | ||
129 | if (relation.parameters.size != 2) { | ||
130 | throw new IllegalArgumentException('''Relation «relation.name» has multiplicity or containment constraints, but it is not binary''') | ||
131 | } | ||
132 | multiplicityConstraintsBuilder.add(constraint) | ||
133 | } | ||
134 | } | ||
135 | new RelationConstraints(multiplicityConstraintsBuilder.build) | ||
136 | } | ||
137 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/ScopePropagator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/ScopePropagator.xtend new file mode 100644 index 00000000..2376fb38 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/ScopePropagator.xtend | |||
@@ -0,0 +1,126 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality | ||
2 | |||
3 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Relation | ||
4 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.ModelGenerationStatistics | ||
5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialComplexTypeInterpretation | ||
6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | ||
7 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialTypeInterpratation | ||
8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.Scope | ||
9 | import java.util.HashMap | ||
10 | import java.util.HashSet | ||
11 | import java.util.Map | ||
12 | import java.util.Set | ||
13 | import org.eclipse.xtend.lib.annotations.Accessors | ||
14 | |||
15 | class ScopePropagator { | ||
16 | @Accessors(PROTECTED_GETTER) val PartialInterpretation partialInterpretation | ||
17 | @Accessors(PROTECTED_GETTER) val ModelGenerationStatistics statistics | ||
18 | val Map<PartialTypeInterpratation, Scope> type2Scope | ||
19 | @Accessors(PROTECTED_GETTER) val Map<Scope, Set<Scope>> superScopes | ||
20 | @Accessors(PROTECTED_GETTER) val Map<Scope, Set<Scope>> subScopes | ||
21 | |||
22 | new(PartialInterpretation p, ModelGenerationStatistics statistics) { | ||
23 | partialInterpretation = p | ||
24 | this.statistics = statistics | ||
25 | type2Scope = new HashMap | ||
26 | for (scope : p.scopes) { | ||
27 | type2Scope.put(scope.targetTypeInterpretation, scope) | ||
28 | } | ||
29 | |||
30 | superScopes = new HashMap | ||
31 | subScopes = new HashMap | ||
32 | for (scope : p.scopes) { | ||
33 | superScopes.put(scope, new HashSet) | ||
34 | subScopes.put(scope, new HashSet) | ||
35 | } | ||
36 | |||
37 | for (scope : p.scopes) { | ||
38 | val target = scope.targetTypeInterpretation | ||
39 | if (target instanceof PartialComplexTypeInterpretation) { | ||
40 | val supertypeInterpretations = target.supertypeInterpretation | ||
41 | for (supertypeInterpretation : supertypeInterpretations) { | ||
42 | val supertypeScope = type2Scope.get(supertypeInterpretation) | ||
43 | superScopes.get(scope).add(supertypeScope) | ||
44 | subScopes.get(supertypeScope).add(scope) | ||
45 | } | ||
46 | } | ||
47 | } | ||
48 | var boolean changed | ||
49 | do { | ||
50 | changed = false | ||
51 | for (scope : p.scopes) { | ||
52 | val subScopeSet = subScopes.get(scope) | ||
53 | val superScopeSet = superScopes.get(scope) | ||
54 | for (subScope : subScopeSet) { | ||
55 | changed = changed || superScopes.get(subScope).addAll(superScopeSet) | ||
56 | } | ||
57 | for (superScope : superScopeSet) { | ||
58 | changed = changed || subScopes.get(superScope).addAll(subScopeSet) | ||
59 | } | ||
60 | } | ||
61 | } while (changed) | ||
62 | |||
63 | copyScopeBoundsToHeuristic() | ||
64 | } | ||
65 | |||
66 | def propagateAllScopeConstraints() { | ||
67 | statistics.incrementScopePropagationCount() | ||
68 | doPropagateAllScopeConstraints() | ||
69 | } | ||
70 | |||
71 | protected def copyScopeBoundsToHeuristic() { | ||
72 | partialInterpretation.minNewElementsHeuristic = partialInterpretation.minNewElements | ||
73 | for (scope : partialInterpretation.scopes) { | ||
74 | scope.minNewElementsHeuristic = scope.minNewElements | ||
75 | } | ||
76 | } | ||
77 | |||
78 | protected def void doPropagateAllScopeConstraints() { | ||
79 | // Nothing to propagate. | ||
80 | } | ||
81 | |||
82 | def propagateAdditionToType(PartialTypeInterpratation t) { | ||
83 | // println('''Adding to «(t as PartialComplexTypeInterpretation).interpretationOf.name»''') | ||
84 | val targetScope = type2Scope.get(t) | ||
85 | if (targetScope !== null) { | ||
86 | targetScope.removeOne | ||
87 | val sups = superScopes.get(targetScope) | ||
88 | sups.forEach[removeOne] | ||
89 | } | ||
90 | if (this.partialInterpretation.minNewElements > 0) { | ||
91 | this.partialInterpretation.minNewElements = this.partialInterpretation.minNewElements - 1 | ||
92 | } | ||
93 | if (this.partialInterpretation.minNewElementsHeuristic > 0) { | ||
94 | this.partialInterpretation.minNewElementsHeuristic = this.partialInterpretation.minNewElementsHeuristic - 1 | ||
95 | } | ||
96 | if (this.partialInterpretation.maxNewElements > 0) { | ||
97 | this.partialInterpretation.maxNewElements = this.partialInterpretation.maxNewElements - 1 | ||
98 | } else if (this.partialInterpretation.maxNewElements === 0) { | ||
99 | throw new IllegalArgumentException('''Inconsistent object creation: lower node limit is 0!''') | ||
100 | } | ||
101 | propagateAllScopeConstraints | ||
102 | |||
103 | // println('''Target Scope: «targetScope.minNewElements» - «targetScope.maxNewElements»''') | ||
104 | // println(''' «this.partialInterpretation.minNewElements» - «this.partialInterpretation.maxNewElements»''') | ||
105 | // this.partialInterpretation.scopes.forEach[println(''' «(it.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name»: «it.minNewElements»-«it.maxNewElements»''')] | ||
106 | // println('''All constraints are propagated upon increasing «(t as PartialComplexTypeInterpretation).interpretationOf.name»''') | ||
107 | } | ||
108 | |||
109 | def void propagateAdditionToRelation(Relation r) { | ||
110 | // Nothing to propagate. | ||
111 | } | ||
112 | |||
113 | private def removeOne(Scope scope) { | ||
114 | if (scope.maxNewElements === 0) { | ||
115 | throw new IllegalArgumentException('''Inconsistent object creation: «scope.targetTypeInterpretation»''') | ||
116 | } else if (scope.maxNewElements > 0) { | ||
117 | scope.maxNewElements = scope.maxNewElements - 1 | ||
118 | } | ||
119 | if (scope.minNewElements > 0) { | ||
120 | scope.minNewElements = scope.minNewElements - 1 | ||
121 | } | ||
122 | if (scope.minNewElementsHeuristic > 0) { | ||
123 | scope.minNewElementsHeuristic = scope.minNewElementsHeuristic - 1 | ||
124 | } | ||
125 | } | ||
126 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/ScopePropagatorStrategy.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/ScopePropagatorStrategy.xtend new file mode 100644 index 00000000..3165917a --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/ScopePropagatorStrategy.xtend | |||
@@ -0,0 +1,71 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality | ||
2 | |||
3 | import org.eclipse.xtend.lib.annotations.Data | ||
4 | import org.eclipse.xtend.lib.annotations.FinalFieldsConstructor | ||
5 | |||
6 | enum PolyhedralScopePropagatorConstraints { | ||
7 | TypeHierarchy, | ||
8 | Relational | ||
9 | } | ||
10 | |||
11 | enum PolyhedralScopePropagatorSolver { | ||
12 | Z3Real, | ||
13 | Z3Integer, | ||
14 | Cbc, | ||
15 | Clp | ||
16 | } | ||
17 | |||
18 | abstract class ScopePropagatorStrategy { | ||
19 | public static val None = new Simple("None") | ||
20 | |||
21 | public static val Basic = new Simple("Basic") | ||
22 | |||
23 | public static val BasicTypeHierarchy = new Simple("BasicTypeHierarchy") | ||
24 | |||
25 | private new() { | ||
26 | } | ||
27 | |||
28 | def boolean requiresUpperBoundIndexing() | ||
29 | |||
30 | static class Simple extends ScopePropagatorStrategy { | ||
31 | val String name | ||
32 | |||
33 | @FinalFieldsConstructor | ||
34 | private new() { | ||
35 | } | ||
36 | |||
37 | override requiresUpperBoundIndexing() { | ||
38 | false | ||
39 | } | ||
40 | |||
41 | override toString() { | ||
42 | name | ||
43 | } | ||
44 | } | ||
45 | |||
46 | @Data | ||
47 | static class Polyhedral extends ScopePropagatorStrategy { | ||
48 | public static val UNLIMITED_TIME = -1 | ||
49 | |||
50 | val PolyhedralScopePropagatorConstraints constraints | ||
51 | val PolyhedralScopePropagatorSolver solver | ||
52 | val boolean updateHeuristic | ||
53 | val double timeoutSeconds | ||
54 | |||
55 | @FinalFieldsConstructor | ||
56 | new() { | ||
57 | } | ||
58 | |||
59 | new(PolyhedralScopePropagatorConstraints constraints, PolyhedralScopePropagatorSolver solver, boolean updateHeuristic) { | ||
60 | this(constraints, solver, updateHeuristic, UNLIMITED_TIME) | ||
61 | } | ||
62 | |||
63 | new(PolyhedralScopePropagatorConstraints constraints, PolyhedralScopePropagatorSolver solver) { | ||
64 | this(constraints, solver, true) | ||
65 | } | ||
66 | |||
67 | override requiresUpperBoundIndexing() { | ||
68 | constraints == PolyhedralScopePropagatorConstraints.Relational | ||
69 | } | ||
70 | } | ||
71 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/TypeHierarchyScopePropagator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/TypeHierarchyScopePropagator.xtend new file mode 100644 index 00000000..d1704b39 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/TypeHierarchyScopePropagator.xtend | |||
@@ -0,0 +1,85 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality | ||
2 | |||
3 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.ModelGenerationStatistics | ||
4 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | ||
5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.Scope | ||
6 | |||
7 | class TypeHierarchyScopePropagator extends ScopePropagator { | ||
8 | |||
9 | new(PartialInterpretation p, ModelGenerationStatistics statistics) { | ||
10 | super(p, statistics) | ||
11 | } | ||
12 | |||
13 | protected override doPropagateAllScopeConstraints() { | ||
14 | var boolean hadChanged | ||
15 | do { | ||
16 | hadChanged = false | ||
17 | for (superScopeEntry : superScopes.entrySet) { | ||
18 | val sub = superScopeEntry.key | ||
19 | hadChanged = propagateLowerLimitUp(sub, partialInterpretation) || hadChanged | ||
20 | hadChanged = propagateUpperLimitDown(sub, partialInterpretation) || hadChanged | ||
21 | for (sup : superScopeEntry.value) { | ||
22 | hadChanged = propagateLowerLimitUp(sub, sup) || hadChanged | ||
23 | hadChanged = propagateUpperLimitDown(sub, sup) || hadChanged | ||
24 | } | ||
25 | } | ||
26 | } while (hadChanged) | ||
27 | } | ||
28 | |||
29 | private def propagateLowerLimitUp(Scope subScope, Scope superScope) { | ||
30 | var changed = false | ||
31 | if (subScope.minNewElements > superScope.minNewElements) { | ||
32 | superScope.minNewElements = subScope.minNewElements | ||
33 | changed = true | ||
34 | } | ||
35 | if (subScope.minNewElementsHeuristic > superScope.minNewElementsHeuristic) { | ||
36 | superScope.minNewElementsHeuristic = subScope.minNewElementsHeuristic | ||
37 | changed = true | ||
38 | } | ||
39 | changed | ||
40 | } | ||
41 | |||
42 | private def propagateUpperLimitDown(Scope subScope, Scope superScope) { | ||
43 | if (superScope.maxNewElements >= 0 && | ||
44 | (superScope.maxNewElements < subScope.maxNewElements || subScope.maxNewElements < 0)) { | ||
45 | // println(''' | ||
46 | // «(subScope.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name» -> «(superScope.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name» | ||
47 | // subScope.maxNewElements «subScope.maxNewElements» = superScope.maxNewElements «superScope.maxNewElements» | ||
48 | // ''') | ||
49 | subScope.maxNewElements = superScope.maxNewElements | ||
50 | return true | ||
51 | } else { | ||
52 | return false | ||
53 | } | ||
54 | } | ||
55 | |||
56 | private def propagateLowerLimitUp(Scope subScope, PartialInterpretation p) { | ||
57 | var changed = false | ||
58 | if (subScope.minNewElements > p.minNewElements) { | ||
59 | // println(''' | ||
60 | // «(subScope.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name» -> nodes | ||
61 | // p.minNewElements «p.minNewElements» = subScope.minNewElements «subScope.minNewElements» | ||
62 | // ''') | ||
63 | p.minNewElements = subScope.minNewElements | ||
64 | changed = true | ||
65 | } | ||
66 | if (subScope.minNewElementsHeuristic > p.minNewElementsHeuristic) { | ||
67 | p.minNewElementsHeuristic = subScope.minNewElementsHeuristic | ||
68 | changed = true | ||
69 | } | ||
70 | changed | ||
71 | } | ||
72 | |||
73 | private def propagateUpperLimitDown(Scope subScope, PartialInterpretation p) { | ||
74 | if (p.maxNewElements >= 0 && (p.maxNewElements < subScope.maxNewElements || subScope.maxNewElements < 0)) { | ||
75 | // println(''' | ||
76 | // «(subScope.targetTypeInterpretation as PartialComplexTypeInterpretation).interpretationOf.name» -> nodes | ||
77 | // subScope.maxNewElements «subScope.maxNewElements» = p.maxNewElements «p.maxNewElements» | ||
78 | // ''') | ||
79 | subScope.maxNewElements = p.maxNewElements | ||
80 | return true | ||
81 | } else { | ||
82 | return false | ||
83 | } | ||
84 | } | ||
85 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/Z3PolyhedronSolver.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/Z3PolyhedronSolver.xtend new file mode 100644 index 00000000..3b831433 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/cardinality/Z3PolyhedronSolver.xtend | |||
@@ -0,0 +1,272 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality | ||
2 | |||
3 | import com.microsoft.z3.AlgebraicNum | ||
4 | import com.microsoft.z3.ArithExpr | ||
5 | import com.microsoft.z3.Context | ||
6 | import com.microsoft.z3.Expr | ||
7 | import com.microsoft.z3.IntNum | ||
8 | import com.microsoft.z3.Optimize | ||
9 | import com.microsoft.z3.RatNum | ||
10 | import com.microsoft.z3.Status | ||
11 | import com.microsoft.z3.Symbol | ||
12 | import java.math.BigDecimal | ||
13 | import java.math.MathContext | ||
14 | import java.math.RoundingMode | ||
15 | import java.util.Map | ||
16 | import org.eclipse.xtend.lib.annotations.Accessors | ||
17 | import org.eclipse.xtend.lib.annotations.FinalFieldsConstructor | ||
18 | |||
19 | class Z3PolyhedronSolver implements PolyhedronSolver { | ||
20 | val boolean lpRelaxation | ||
21 | val double timeoutSeconds | ||
22 | |||
23 | @FinalFieldsConstructor | ||
24 | new() { | ||
25 | } | ||
26 | |||
27 | new() { | ||
28 | this(false, -1) | ||
29 | } | ||
30 | |||
31 | override createSaturationOperator(Polyhedron polyhedron) { | ||
32 | new DisposingZ3SaturationOperator(this, polyhedron) | ||
33 | } | ||
34 | |||
35 | def createPersistentSaturationOperator(Polyhedron polyhedron) { | ||
36 | new Z3SaturationOperator(polyhedron, lpRelaxation, timeoutSeconds) | ||
37 | } | ||
38 | } | ||
39 | |||
40 | @FinalFieldsConstructor | ||
41 | class DisposingZ3SaturationOperator implements PolyhedronSaturationOperator { | ||
42 | val Z3PolyhedronSolver solver | ||
43 | @Accessors val Polyhedron polyhedron | ||
44 | |||
45 | override saturate() { | ||
46 | val persistentOperator = solver.createPersistentSaturationOperator(polyhedron) | ||
47 | try { | ||
48 | persistentOperator.saturate | ||
49 | } finally { | ||
50 | persistentOperator.close | ||
51 | } | ||
52 | } | ||
53 | |||
54 | override close() throws Exception { | ||
55 | // Nothing to close. | ||
56 | } | ||
57 | } | ||
58 | |||
59 | class Z3SaturationOperator extends AbstractPolyhedronSaturationOperator { | ||
60 | static val INFINITY_SYMBOL_NAME = "oo" | ||
61 | static val MULT_SYMBOL_NAME = "*" | ||
62 | static val TIMEOUT_SYMBOL_NAME = "timeout" | ||
63 | static val INTEGER_PRECISION = new BigDecimal(Integer.MAX_VALUE).precision | ||
64 | static val ROUND_DOWN = new MathContext(INTEGER_PRECISION, RoundingMode.FLOOR) | ||
65 | static val ROUND_UP = new MathContext(INTEGER_PRECISION, RoundingMode.CEILING) | ||
66 | // The interval isolating the number is smaller than 1/10^precision. | ||
67 | static val ALGEBRAIC_NUMBER_ROUNDING = 0 | ||
68 | |||
69 | extension val Context context | ||
70 | val Symbol infinitySymbol | ||
71 | val Symbol multSymbol | ||
72 | val Map<Dimension, ArithExpr> variables | ||
73 | val int timeoutMilliseconds | ||
74 | |||
75 | new(Polyhedron polyhedron, boolean lpRelaxation, double timeoutSeconds) { | ||
76 | super(polyhedron) | ||
77 | context = new Context | ||
78 | infinitySymbol = context.mkSymbol(INFINITY_SYMBOL_NAME) | ||
79 | multSymbol = context.mkSymbol(MULT_SYMBOL_NAME) | ||
80 | variables = polyhedron.dimensions.toInvertedMap [ dimension | | ||
81 | val name = dimension.name | ||
82 | if (lpRelaxation) { | ||
83 | mkRealConst(name) | ||
84 | } else { | ||
85 | mkIntConst(name) | ||
86 | } | ||
87 | ] | ||
88 | timeoutMilliseconds = Math.ceil(timeoutSeconds * 1000) as int | ||
89 | } | ||
90 | |||
91 | override doSaturate() { | ||
92 | val status = executeSolver() | ||
93 | convertStatusToSaturationResult(status) | ||
94 | } | ||
95 | |||
96 | private def convertStatusToSaturationResult(Status status) { | ||
97 | switch (status) { | ||
98 | case SATISFIABLE: | ||
99 | PolyhedronSaturationResult.SATURATED | ||
100 | case UNSATISFIABLE: | ||
101 | PolyhedronSaturationResult.EMPTY | ||
102 | case UNKNOWN: | ||
103 | PolyhedronSaturationResult.UNKNOWN | ||
104 | default: | ||
105 | throw new IllegalArgumentException("Unknown Status: " + status) | ||
106 | } | ||
107 | } | ||
108 | |||
109 | private def executeSolver() { | ||
110 | for (expressionToSaturate : polyhedron.expressionsToSaturate) { | ||
111 | val expr = expressionToSaturate.toExpr | ||
112 | val lowerResult = saturateLowerBound(expr, expressionToSaturate) | ||
113 | if (lowerResult != Status.SATISFIABLE) { | ||
114 | return lowerResult | ||
115 | } | ||
116 | val upperResult = saturateUpperBound(expr, expressionToSaturate) | ||
117 | if (upperResult != Status.SATISFIABLE) { | ||
118 | return upperResult | ||
119 | } | ||
120 | } | ||
121 | Status.SATISFIABLE | ||
122 | } | ||
123 | |||
124 | private def saturateLowerBound(ArithExpr expr, LinearBoundedExpression expressionToSaturate) { | ||
125 | val optimize = prepareOptimize | ||
126 | val handle = optimize.MkMinimize(expr) | ||
127 | val status = optimize.Check() | ||
128 | if (status == Status.SATISFIABLE) { | ||
129 | val value = switch (resultExpr : handle.lower) { | ||
130 | IntNum: | ||
131 | resultExpr.getInt() | ||
132 | RatNum: | ||
133 | ceil(resultExpr) | ||
134 | AlgebraicNum: | ||
135 | ceil(resultExpr.toUpper(ALGEBRAIC_NUMBER_ROUNDING)) | ||
136 | default: | ||
137 | if (isNegativeInfinity(resultExpr)) { | ||
138 | null | ||
139 | } else { | ||
140 | throw new IllegalArgumentException("Integer result expected, got: " + resultExpr) | ||
141 | } | ||
142 | } | ||
143 | expressionToSaturate.lowerBound = value | ||
144 | } | ||
145 | status | ||
146 | } | ||
147 | |||
148 | private def floor(RatNum ratNum) { | ||
149 | val numerator = new BigDecimal(ratNum.numerator.bigInteger) | ||
150 | val denominator = new BigDecimal(ratNum.denominator.bigInteger) | ||
151 | numerator.divide(denominator, ROUND_DOWN).setScale(0, RoundingMode.FLOOR).intValue | ||
152 | } | ||
153 | |||
154 | private def saturateUpperBound(ArithExpr expr, LinearBoundedExpression expressionToSaturate) { | ||
155 | val optimize = prepareOptimize | ||
156 | val handle = optimize.MkMaximize(expr) | ||
157 | val status = optimize.Check() | ||
158 | if (status == Status.SATISFIABLE) { | ||
159 | val value = switch (resultExpr : handle.upper) { | ||
160 | IntNum: | ||
161 | resultExpr.getInt() | ||
162 | RatNum: | ||
163 | floor(resultExpr) | ||
164 | AlgebraicNum: | ||
165 | floor(resultExpr.toLower(ALGEBRAIC_NUMBER_ROUNDING)) | ||
166 | default: | ||
167 | if (isPositiveInfinity(resultExpr)) { | ||
168 | null | ||
169 | } else { | ||
170 | throw new IllegalArgumentException("Integer result expected, got: " + resultExpr) | ||
171 | } | ||
172 | } | ||
173 | expressionToSaturate.upperBound = value | ||
174 | } | ||
175 | status | ||
176 | } | ||
177 | |||
178 | private def ceil(RatNum ratNum) { | ||
179 | val numerator = new BigDecimal(ratNum.numerator.bigInteger) | ||
180 | val denominator = new BigDecimal(ratNum.denominator.bigInteger) | ||
181 | numerator.divide(denominator, ROUND_UP).setScale(0, RoundingMode.CEILING).intValue | ||
182 | } | ||
183 | |||
184 | private def isPositiveInfinity(Expr expr) { | ||
185 | expr.app && expr.getFuncDecl.name == infinitySymbol | ||
186 | } | ||
187 | |||
188 | private def isNegativeInfinity(Expr expr) { | ||
189 | // Negative infinity is represented as (* (- 1) oo) | ||
190 | if (!expr.app || expr.getFuncDecl.name != multSymbol || expr.numArgs != 2) { | ||
191 | return false | ||
192 | } | ||
193 | isPositiveInfinity(expr.args.get(1)) | ||
194 | } | ||
195 | |||
196 | private def prepareOptimize() { | ||
197 | val optimize = mkOptimize() | ||
198 | if (timeoutMilliseconds >= 0) { | ||
199 | val params = mkParams() | ||
200 | // We cannot turn TIMEOUT_SYMBOL_NAME into a Symbol in the constructor, | ||
201 | // because there is no add(Symbol, int) overload. | ||
202 | params.add(TIMEOUT_SYMBOL_NAME, timeoutMilliseconds) | ||
203 | optimize.parameters = params | ||
204 | } | ||
205 | assertConstraints(optimize) | ||
206 | optimize | ||
207 | } | ||
208 | |||
209 | private def assertConstraints(Optimize it) { | ||
210 | for (pair : variables.entrySet) { | ||
211 | assertBounds(pair.value, pair.key) | ||
212 | } | ||
213 | for (constraint : nonTrivialConstraints) { | ||
214 | val expr = createLinearCombination(constraint.coefficients) | ||
215 | assertBounds(expr, constraint) | ||
216 | } | ||
217 | } | ||
218 | |||
219 | private def assertBounds(Optimize it, ArithExpr expression, LinearBoundedExpression bounds) { | ||
220 | val lowerBound = bounds.lowerBound | ||
221 | val upperBound = bounds.upperBound | ||
222 | if (lowerBound == upperBound) { | ||
223 | if (lowerBound === null) { | ||
224 | return | ||
225 | } | ||
226 | Assert(mkEq(expression, mkInt(lowerBound))) | ||
227 | } else { | ||
228 | if (lowerBound !== null) { | ||
229 | Assert(mkGe(expression, mkInt(lowerBound))) | ||
230 | } | ||
231 | if (upperBound !== null) { | ||
232 | Assert(mkLe(expression, mkInt(upperBound))) | ||
233 | } | ||
234 | } | ||
235 | } | ||
236 | |||
237 | private def toExpr(LinearBoundedExpression linearBoundedExpression) { | ||
238 | switch (linearBoundedExpression) { | ||
239 | Dimension: variables.get(linearBoundedExpression) | ||
240 | LinearConstraint: createLinearCombination(linearBoundedExpression.coefficients) | ||
241 | default: throw new IllegalArgumentException("Unknown linear bounded expression:" + linearBoundedExpression) | ||
242 | } | ||
243 | } | ||
244 | |||
245 | private def createLinearCombination(Map<Dimension, Integer> coefficients) { | ||
246 | val size = coefficients.size | ||
247 | if (size == 0) { | ||
248 | return mkInt(0) | ||
249 | } | ||
250 | val array = newArrayOfSize(size) | ||
251 | var int i = 0 | ||
252 | for (pair : coefficients.entrySet) { | ||
253 | val variable = variables.get(pair.key) | ||
254 | if (variable === null) { | ||
255 | throw new IllegalArgumentException("Unknown dimension: " + pair.key.name) | ||
256 | } | ||
257 | val coefficient = pair.value | ||
258 | val term = if (coefficient == 1) { | ||
259 | variable | ||
260 | } else { | ||
261 | mkMul(mkInt(coefficient), variable) | ||
262 | } | ||
263 | array.set(i, term) | ||
264 | i++ | ||
265 | } | ||
266 | mkAdd(array) | ||
267 | } | ||
268 | |||
269 | override close() throws Exception { | ||
270 | context.close() | ||
271 | } | ||
272 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/Interval.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/Interval.xtend new file mode 100644 index 00000000..691c8783 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/Interval.xtend | |||
@@ -0,0 +1,584 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval | ||
2 | |||
3 | import java.math.BigDecimal | ||
4 | import java.math.MathContext | ||
5 | import java.math.RoundingMode | ||
6 | import org.eclipse.xtend.lib.annotations.Data | ||
7 | |||
8 | abstract class Interval implements Comparable<Interval> { | ||
9 | static val PRECISION = 32 | ||
10 | package static val ROUND_DOWN = new MathContext(PRECISION, RoundingMode.FLOOR) | ||
11 | package static val ROUND_UP = new MathContext(PRECISION, RoundingMode.CEILING) | ||
12 | |||
13 | private new() { | ||
14 | } | ||
15 | |||
16 | abstract def boolean mustEqual(Interval other) | ||
17 | |||
18 | abstract def boolean mayEqual(Interval other) | ||
19 | |||
20 | def mustNotEqual(Interval other) { | ||
21 | !mayEqual(other) | ||
22 | } | ||
23 | |||
24 | def mayNotEqual(Interval other) { | ||
25 | !mustEqual(other) | ||
26 | } | ||
27 | |||
28 | abstract def boolean mustBeLessThan(Interval other) | ||
29 | |||
30 | abstract def boolean mayBeLessThan(Interval other) | ||
31 | |||
32 | def mustBeLessThanOrEqual(Interval other) { | ||
33 | !mayBeGreaterThan(other) | ||
34 | } | ||
35 | |||
36 | def mayBeLessThanOrEqual(Interval other) { | ||
37 | !mustBeGreaterThan(other) | ||
38 | } | ||
39 | |||
40 | def mustBeGreaterThan(Interval other) { | ||
41 | other.mustBeLessThan(this) | ||
42 | } | ||
43 | |||
44 | def mayBeGreaterThan(Interval other) { | ||
45 | other.mayBeLessThan(this) | ||
46 | } | ||
47 | |||
48 | def mustBeGreaterThanOrEqual(Interval other) { | ||
49 | other.mustBeLessThanOrEqual(this) | ||
50 | } | ||
51 | |||
52 | def mayBeGreaterThanOrEqual(Interval other) { | ||
53 | other.mayBeLessThanOrEqual(this) | ||
54 | } | ||
55 | |||
56 | abstract def Interval min(Interval other) | ||
57 | |||
58 | abstract def Interval max(Interval other) | ||
59 | |||
60 | abstract def Interval join(Interval other) | ||
61 | |||
62 | def +() { | ||
63 | this | ||
64 | } | ||
65 | |||
66 | abstract def Interval -() | ||
67 | |||
68 | abstract def Interval +(Interval other) | ||
69 | |||
70 | abstract def Interval -(Interval other) | ||
71 | |||
72 | abstract def Interval *(int count) | ||
73 | |||
74 | abstract def Interval *(Interval other) | ||
75 | |||
76 | abstract def Interval /(Interval other) | ||
77 | |||
78 | abstract def Interval **(Interval other) | ||
79 | |||
80 | public static val EMPTY = new Interval { | ||
81 | override mustEqual(Interval other) { | ||
82 | true | ||
83 | } | ||
84 | |||
85 | override mayEqual(Interval other) { | ||
86 | false | ||
87 | } | ||
88 | |||
89 | override mustBeLessThan(Interval other) { | ||
90 | true | ||
91 | } | ||
92 | |||
93 | override mayBeLessThan(Interval other) { | ||
94 | false | ||
95 | } | ||
96 | |||
97 | override min(Interval other) { | ||
98 | EMPTY | ||
99 | } | ||
100 | |||
101 | override max(Interval other) { | ||
102 | EMPTY | ||
103 | } | ||
104 | |||
105 | override join(Interval other) { | ||
106 | other | ||
107 | } | ||
108 | |||
109 | override -() { | ||
110 | EMPTY | ||
111 | } | ||
112 | |||
113 | override +(Interval other) { | ||
114 | EMPTY | ||
115 | } | ||
116 | |||
117 | override -(Interval other) { | ||
118 | EMPTY | ||
119 | } | ||
120 | |||
121 | override *(int count) { | ||
122 | EMPTY | ||
123 | } | ||
124 | |||
125 | override *(Interval other) { | ||
126 | EMPTY | ||
127 | } | ||
128 | |||
129 | override /(Interval other) { | ||
130 | EMPTY | ||
131 | } | ||
132 | |||
133 | override **(Interval other) { | ||
134 | EMPTY | ||
135 | } | ||
136 | |||
137 | override toString() { | ||
138 | "∅" | ||
139 | } | ||
140 | |||
141 | override compareTo(Interval o) { | ||
142 | if (o == EMPTY) { | ||
143 | 0 | ||
144 | } else { | ||
145 | -1 | ||
146 | } | ||
147 | } | ||
148 | |||
149 | } | ||
150 | |||
151 | public static val Interval ZERO = new NonEmpty(BigDecimal.ZERO, BigDecimal.ZERO) | ||
152 | |||
153 | public static val Interval UNBOUNDED = new NonEmpty(null, null) | ||
154 | |||
155 | static def Interval of(BigDecimal lower, BigDecimal upper) { | ||
156 | new NonEmpty(lower, upper) | ||
157 | } | ||
158 | |||
159 | static def between(double lower, double upper) { | ||
160 | of(new BigDecimal(lower, ROUND_DOWN), new BigDecimal(upper, ROUND_UP)) | ||
161 | } | ||
162 | |||
163 | static def upTo(double upper) { | ||
164 | of(null, new BigDecimal(upper, ROUND_UP)) | ||
165 | } | ||
166 | |||
167 | static def above(double lower) { | ||
168 | of(new BigDecimal(lower, ROUND_DOWN), null) | ||
169 | } | ||
170 | |||
171 | @Data | ||
172 | private static class NonEmpty extends Interval { | ||
173 | val BigDecimal lower | ||
174 | val BigDecimal upper | ||
175 | |||
176 | /** | ||
177 | * Construct a new non-empty interval. | ||
178 | * | ||
179 | * @param lower The lower bound of the interval. Use <code>null</code> for negative infinity. | ||
180 | * @param upper The upper bound of the interval. Use <code>null</code> for positive infinity. | ||
181 | */ | ||
182 | new(BigDecimal lower, BigDecimal upper) { | ||
183 | if (lower !== null && upper !== null && lower > upper) { | ||
184 | throw new IllegalArgumentException("Lower bound of interval must not be larger than upper bound") | ||
185 | } | ||
186 | this.lower = lower | ||
187 | this.upper = upper | ||
188 | } | ||
189 | |||
190 | override mustEqual(Interval other) { | ||
191 | switch (other) { | ||
192 | case EMPTY: true | ||
193 | NonEmpty: lower == upper && lower == other.lower && lower == other.upper | ||
194 | default: throw new IllegalArgumentException("Unknown interval: " + other) | ||
195 | } | ||
196 | } | ||
197 | |||
198 | override mayEqual(Interval other) { | ||
199 | if (other instanceof NonEmpty) { | ||
200 | (lower === null || other.upper === null || lower <= other.upper) && | ||
201 | (other.lower === null || upper === null || other.lower <= upper) | ||
202 | } else { | ||
203 | false | ||
204 | } | ||
205 | } | ||
206 | |||
207 | override mustBeLessThan(Interval other) { | ||
208 | switch (other) { | ||
209 | case EMPTY: true | ||
210 | NonEmpty: upper !== null && other.lower !== null && upper < other.lower | ||
211 | default: throw new IllegalArgumentException("Unknown interval: " + other) | ||
212 | } | ||
213 | } | ||
214 | |||
215 | override mayBeLessThan(Interval other) { | ||
216 | if (other instanceof NonEmpty) { | ||
217 | lower === null || other.upper === null || lower < other.upper | ||
218 | } else { | ||
219 | false | ||
220 | } | ||
221 | } | ||
222 | |||
223 | override min(Interval other) { | ||
224 | switch (other) { | ||
225 | case EMPTY: this | ||
226 | NonEmpty: min(other) | ||
227 | default: throw new IllegalArgumentException("Unknown interval: " + other) | ||
228 | } | ||
229 | } | ||
230 | |||
231 | def min(NonEmpty other) { | ||
232 | new NonEmpty( | ||
233 | lower.tryMin(other.lower), | ||
234 | if(other.upper === null) upper else if(upper === null) other.upper else upper.min(other.upper) | ||
235 | ) | ||
236 | } | ||
237 | |||
238 | override max(Interval other) { | ||
239 | switch (other) { | ||
240 | case EMPTY: this | ||
241 | NonEmpty: max(other) | ||
242 | default: throw new IllegalArgumentException("Unknown interval: " + other) | ||
243 | } | ||
244 | } | ||
245 | |||
246 | def max(NonEmpty other) { | ||
247 | new NonEmpty( | ||
248 | if(other.lower === null) lower else if(lower === null) other.lower else lower.max(other.lower), | ||
249 | upper.tryMax(other.upper) | ||
250 | ) | ||
251 | } | ||
252 | |||
253 | override join(Interval other) { | ||
254 | switch (other) { | ||
255 | case EMPTY: this | ||
256 | NonEmpty: new NonEmpty(lower.tryMin(other.lower), upper.tryMax(other.upper)) | ||
257 | default: throw new IllegalArgumentException("Unknown interval: " + other) | ||
258 | } | ||
259 | } | ||
260 | |||
261 | override -() { | ||
262 | new NonEmpty(upper?.negate(ROUND_DOWN), lower?.negate(ROUND_UP)) | ||
263 | } | ||
264 | |||
265 | override +(Interval other) { | ||
266 | switch (other) { | ||
267 | case EMPTY: EMPTY | ||
268 | NonEmpty: this + other | ||
269 | default: throw new IllegalArgumentException("Unknown interval: " + other) | ||
270 | } | ||
271 | } | ||
272 | |||
273 | def +(NonEmpty other) { | ||
274 | new NonEmpty( | ||
275 | lower.tryAdd(other.lower, ROUND_DOWN), | ||
276 | upper.tryAdd(other.upper, ROUND_UP) | ||
277 | ) | ||
278 | } | ||
279 | |||
280 | private static def tryAdd(BigDecimal a, BigDecimal b, MathContext mc) { | ||
281 | if (b === null) { | ||
282 | null | ||
283 | } else { | ||
284 | a?.add(b, mc) | ||
285 | } | ||
286 | } | ||
287 | |||
288 | override -(Interval other) { | ||
289 | switch (other) { | ||
290 | case EMPTY: EMPTY | ||
291 | NonEmpty: this - other | ||
292 | default: throw new IllegalArgumentException("Unknown interval: " + other) | ||
293 | } | ||
294 | } | ||
295 | |||
296 | def -(NonEmpty other) { | ||
297 | new NonEmpty( | ||
298 | lower.trySubtract(other.upper, ROUND_DOWN), | ||
299 | upper.trySubtract(other.lower, ROUND_UP) | ||
300 | ) | ||
301 | } | ||
302 | |||
303 | private static def trySubtract(BigDecimal a, BigDecimal b, MathContext mc) { | ||
304 | if (b === null) { | ||
305 | null | ||
306 | } else { | ||
307 | a?.subtract(b, mc) | ||
308 | } | ||
309 | } | ||
310 | |||
311 | override *(int count) { | ||
312 | val bigCount = new BigDecimal(count) | ||
313 | new NonEmpty( | ||
314 | lower.tryMultiply(bigCount, ROUND_DOWN), | ||
315 | upper.tryMultiply(bigCount, ROUND_UP) | ||
316 | ) | ||
317 | } | ||
318 | |||
319 | override *(Interval other) { | ||
320 | switch (other) { | ||
321 | case EMPTY: EMPTY | ||
322 | NonEmpty: this * other | ||
323 | default: throw new IllegalArgumentException("Unknown interval: " + other) | ||
324 | } | ||
325 | } | ||
326 | |||
327 | def *(NonEmpty other) { | ||
328 | if (this == ZERO || other == ZERO) { | ||
329 | ZERO | ||
330 | } else if (nonpositive) { | ||
331 | if (other.nonpositive) { | ||
332 | new NonEmpty( | ||
333 | upper.multiply(other.upper, ROUND_DOWN), | ||
334 | lower.tryMultiply(other.lower, ROUND_UP) | ||
335 | ) | ||
336 | } else if (other.nonnegative) { | ||
337 | new NonEmpty( | ||
338 | lower.tryMultiply(other.upper, ROUND_DOWN), | ||
339 | upper.multiply(other.lower, ROUND_UP) | ||
340 | ) | ||
341 | } else { | ||
342 | new NonEmpty( | ||
343 | lower.tryMultiply(other.upper, ROUND_DOWN), | ||
344 | lower.tryMultiply(other.lower, ROUND_UP) | ||
345 | ) | ||
346 | } | ||
347 | } else if (nonnegative) { | ||
348 | if (other.nonpositive) { | ||
349 | new NonEmpty( | ||
350 | upper.tryMultiply(other.lower, ROUND_DOWN), | ||
351 | lower.multiply(other.upper, ROUND_UP) | ||
352 | ) | ||
353 | } else if (other.nonnegative) { | ||
354 | new NonEmpty( | ||
355 | lower.multiply(other.lower, ROUND_DOWN), | ||
356 | upper.tryMultiply(other.upper, ROUND_UP) | ||
357 | ) | ||
358 | } else { | ||
359 | new NonEmpty( | ||
360 | upper.tryMultiply(other.lower, ROUND_DOWN), | ||
361 | upper.tryMultiply(other.upper, ROUND_UP) | ||
362 | ) | ||
363 | } | ||
364 | } else { | ||
365 | if (other.nonpositive) { | ||
366 | new NonEmpty( | ||
367 | upper.tryMultiply(other.lower, ROUND_DOWN), | ||
368 | lower.tryMultiply(other.lower, ROUND_UP) | ||
369 | ) | ||
370 | } else if (other.nonnegative) { | ||
371 | new NonEmpty( | ||
372 | lower.tryMultiply(other.upper, ROUND_DOWN), | ||
373 | upper.tryMultiply(other.upper, ROUND_UP) | ||
374 | ) | ||
375 | } else { | ||
376 | new NonEmpty( | ||
377 | lower.tryMultiply(other.upper, ROUND_DOWN).tryMin(upper.tryMultiply(other.lower, ROUND_DOWN)), | ||
378 | lower.tryMultiply(other.lower, ROUND_UP).tryMax(upper.tryMultiply(other.upper, ROUND_UP)) | ||
379 | ) | ||
380 | } | ||
381 | } | ||
382 | } | ||
383 | |||
384 | private def isNonpositive() { | ||
385 | upper !== null && upper <= BigDecimal.ZERO | ||
386 | } | ||
387 | |||
388 | private def isNonnegative() { | ||
389 | lower !== null && lower >= BigDecimal.ZERO | ||
390 | } | ||
391 | |||
392 | private static def tryMultiply(BigDecimal a, BigDecimal b, MathContext mc) { | ||
393 | if (b === null) { | ||
394 | null | ||
395 | } else { | ||
396 | a?.multiply(b, mc) | ||
397 | } | ||
398 | } | ||
399 | |||
400 | private static def tryMin(BigDecimal a, BigDecimal b) { | ||
401 | if (b === null) { | ||
402 | null | ||
403 | } else { | ||
404 | a?.min(b) | ||
405 | } | ||
406 | } | ||
407 | |||
408 | private static def tryMax(BigDecimal a, BigDecimal b) { | ||
409 | if (b === null) { | ||
410 | null | ||
411 | } else { | ||
412 | a?.max(b) | ||
413 | } | ||
414 | } | ||
415 | |||
416 | override /(Interval other) { | ||
417 | switch (other) { | ||
418 | case EMPTY: EMPTY | ||
419 | NonEmpty: this / other | ||
420 | default: throw new IllegalArgumentException("Unknown interval: " + other) | ||
421 | } | ||
422 | } | ||
423 | |||
424 | def /(NonEmpty other) { | ||
425 | if (other == ZERO) { | ||
426 | EMPTY | ||
427 | } else if (this == ZERO) { | ||
428 | ZERO | ||
429 | } else if (other.strictlyNegative) { | ||
430 | if (nonpositive) { | ||
431 | new NonEmpty( | ||
432 | upper.tryDivide(other.lower, ROUND_DOWN), | ||
433 | lower.tryDivide(other.upper, ROUND_UP) | ||
434 | ) | ||
435 | } else if (nonnegative) { | ||
436 | new NonEmpty( | ||
437 | upper.tryDivide(other.upper, ROUND_DOWN), | ||
438 | lower.tryDivide(other.lower, ROUND_UP) | ||
439 | ) | ||
440 | } else { // lower < 0 < upper | ||
441 | new NonEmpty( | ||
442 | upper.tryDivide(other.upper, ROUND_DOWN), | ||
443 | lower.tryDivide(other.upper, ROUND_UP) | ||
444 | ) | ||
445 | } | ||
446 | } else if (other.strictlyPositive) { | ||
447 | if (nonpositive) { | ||
448 | new NonEmpty( | ||
449 | lower.tryDivide(other.lower, ROUND_DOWN), | ||
450 | upper.tryDivide(other.upper, ROUND_UP) | ||
451 | ) | ||
452 | } else if (nonnegative) { | ||
453 | new NonEmpty( | ||
454 | lower.tryDivide(other.upper, ROUND_DOWN), | ||
455 | upper.tryDivide(other.lower, ROUND_UP) | ||
456 | ) | ||
457 | } else { // lower < 0 < upper | ||
458 | new NonEmpty( | ||
459 | lower.tryDivide(other.lower, ROUND_DOWN), | ||
460 | upper.tryDivide(other.lower, ROUND_UP) | ||
461 | ) | ||
462 | } | ||
463 | } else { // other contains 0 | ||
464 | if (other.lower == BigDecimal.ZERO) { // 0 == other.lower < other.upper, because [0, 0] was exluded earlier | ||
465 | if (nonpositive) { | ||
466 | new NonEmpty(null, upper.tryDivide(other.upper, ROUND_UP)) | ||
467 | } else if (nonnegative) { | ||
468 | new NonEmpty(lower.tryDivide(other.upper, ROUND_DOWN), null) | ||
469 | } else { // lower < 0 < upper | ||
470 | UNBOUNDED | ||
471 | } | ||
472 | } else if (other.upper == BigDecimal.ZERO) { // other.lower < other.upper == 0 | ||
473 | if (nonpositive) { | ||
474 | new NonEmpty(upper.tryDivide(other.lower, ROUND_DOWN), null) | ||
475 | } else if (nonnegative) { | ||
476 | new NonEmpty(null, lower.tryDivide(other.lower, ROUND_UP)) | ||
477 | } else { // lower < 0 < upper | ||
478 | UNBOUNDED | ||
479 | } | ||
480 | } else { // other.lower < 0 < other.upper | ||
481 | UNBOUNDED | ||
482 | } | ||
483 | } | ||
484 | } | ||
485 | |||
486 | private def isStrictlyNegative() { | ||
487 | upper !== null && upper < BigDecimal.ZERO | ||
488 | } | ||
489 | |||
490 | private def isStrictlyPositive() { | ||
491 | lower !== null && lower > BigDecimal.ZERO | ||
492 | } | ||
493 | |||
494 | private static def tryDivide(BigDecimal a, BigDecimal b, MathContext mc) { | ||
495 | if (b === null) { | ||
496 | BigDecimal.ZERO | ||
497 | } else { | ||
498 | a?.divide(b, mc) | ||
499 | } | ||
500 | } | ||
501 | |||
502 | override **(Interval other) { | ||
503 | switch (other) { | ||
504 | case EMPTY: EMPTY | ||
505 | NonEmpty: this ** other | ||
506 | default: throw new IllegalArgumentException("Unknown interval: " + other) | ||
507 | } | ||
508 | } | ||
509 | |||
510 | def **(NonEmpty other) { | ||
511 | // XXX This should use proper rounding for log and exp instead of | ||
512 | // converting to double. | ||
513 | // XXX We should not ignore (integer) powers of negative numbers. | ||
514 | val lowerLog = if (lower === null || lower <= BigDecimal.ZERO) { | ||
515 | null | ||
516 | } else { | ||
517 | new BigDecimal(Math.log(lower.doubleValue), ROUND_DOWN) | ||
518 | } | ||
519 | val upperLog = if (upper === null) { | ||
520 | null | ||
521 | } else if (upper == BigDecimal.ZERO) { | ||
522 | return ZERO | ||
523 | } else if (upper < BigDecimal.ZERO) { | ||
524 | return EMPTY | ||
525 | } else { | ||
526 | new BigDecimal(Math.log(upper.doubleValue), ROUND_UP) | ||
527 | } | ||
528 | val log = new NonEmpty(lowerLog, upperLog) | ||
529 | val product = log * other | ||
530 | if (product instanceof NonEmpty) { | ||
531 | val lowerResult = if (product.lower === null) { | ||
532 | BigDecimal.ZERO | ||
533 | } else { | ||
534 | new BigDecimal(Math.exp(product.lower.doubleValue), ROUND_DOWN) | ||
535 | } | ||
536 | val upperResult = if (product.upper === null) { | ||
537 | null | ||
538 | } else { | ||
539 | new BigDecimal(Math.exp(product.upper.doubleValue), ROUND_UP) | ||
540 | } | ||
541 | new NonEmpty(lowerResult, upperResult) | ||
542 | } else { | ||
543 | throw new IllegalArgumentException("Unknown interval: " + product) | ||
544 | } | ||
545 | } | ||
546 | |||
547 | override toString() { | ||
548 | '''«IF lower === null»(-∞«ELSE»[«lower»«ENDIF», «IF upper === null»∞)«ELSE»«upper»]«ENDIF»''' | ||
549 | } | ||
550 | |||
551 | override compareTo(Interval o) { | ||
552 | switch (o) { | ||
553 | case EMPTY: 1 | ||
554 | NonEmpty: compareTo(o) | ||
555 | default: throw new IllegalArgumentException("Unknown interval: " + o) | ||
556 | } | ||
557 | } | ||
558 | |||
559 | def compareTo(NonEmpty o) { | ||
560 | if (lower === null) { | ||
561 | if (o.lower !== null) { | ||
562 | return -1 | ||
563 | } | ||
564 | } else if (o.lower === null) { // lower !== null | ||
565 | return 1 | ||
566 | } else { // both lower and o.lower are finite | ||
567 | val lowerDifference = lower.compareTo(o.lower) | ||
568 | if (lowerDifference != 0) { | ||
569 | return lowerDifference | ||
570 | } | ||
571 | } | ||
572 | if (upper === null) { | ||
573 | if (o.upper === null) { | ||
574 | return 0 | ||
575 | } else { | ||
576 | return 1 | ||
577 | } | ||
578 | } else if (o.upper === null) { // upper !== null | ||
579 | return -1 | ||
580 | } | ||
581 | upper.compareTo(o.upper) | ||
582 | } | ||
583 | } | ||
584 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalAggregationMode.java b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalAggregationMode.java new file mode 100644 index 00000000..f106e305 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalAggregationMode.java | |||
@@ -0,0 +1,99 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval; | ||
2 | |||
3 | import java.util.function.BinaryOperator; | ||
4 | |||
5 | public enum IntervalAggregationMode implements BinaryOperator<Interval> { | ||
6 | SUM("intervalSum", "Sum a set of intervals") { | ||
7 | @Override | ||
8 | public IntervalRedBlackNode createNode(Interval interval) { | ||
9 | return new IntervalRedBlackNode(interval) { | ||
10 | public boolean isMultiplicitySensitive() { | ||
11 | return true; | ||
12 | } | ||
13 | |||
14 | public Interval multiply(Interval interval, int count) { | ||
15 | return interval.operator_multiply(count); | ||
16 | }; | ||
17 | |||
18 | @Override | ||
19 | public Interval op(Interval left, Interval right) { | ||
20 | return left.operator_plus(right); | ||
21 | } | ||
22 | }; | ||
23 | } | ||
24 | |||
25 | @Override | ||
26 | public Interval getNeutral() { | ||
27 | return Interval.ZERO; | ||
28 | } | ||
29 | }, | ||
30 | |||
31 | MIN("intervalMin", "Find the minimum a set of intervals") { | ||
32 | @Override | ||
33 | public IntervalRedBlackNode createNode(Interval interval) { | ||
34 | return new IntervalRedBlackNode(interval) { | ||
35 | @Override | ||
36 | public Interval op(Interval left, Interval right) { | ||
37 | return left.min(right); | ||
38 | } | ||
39 | }; | ||
40 | } | ||
41 | }, | ||
42 | |||
43 | MAX("intervalMax", "Find the maximum a set of intervals") { | ||
44 | @Override | ||
45 | public IntervalRedBlackNode createNode(Interval interval) { | ||
46 | return new IntervalRedBlackNode(interval) { | ||
47 | @Override | ||
48 | public Interval op(Interval left, Interval right) { | ||
49 | return left.max(right); | ||
50 | } | ||
51 | }; | ||
52 | } | ||
53 | }, | ||
54 | |||
55 | JOIN("intervalJoin", "Calculate the smallest interval containing all the intervals in a set") { | ||
56 | @Override | ||
57 | public IntervalRedBlackNode createNode(Interval interval) { | ||
58 | return new IntervalRedBlackNode(interval) { | ||
59 | @Override | ||
60 | public Interval op(Interval left, Interval right) { | ||
61 | return left.join(right); | ||
62 | } | ||
63 | }; | ||
64 | } | ||
65 | }; | ||
66 | |||
67 | private final String modeName; | ||
68 | private final String description; | ||
69 | private final IntervalRedBlackNode empty; | ||
70 | |||
71 | IntervalAggregationMode(String modeName, String description) { | ||
72 | this.modeName = modeName; | ||
73 | this.description = description; | ||
74 | empty = createNode(null); | ||
75 | } | ||
76 | |||
77 | public String getModeName() { | ||
78 | return modeName; | ||
79 | } | ||
80 | |||
81 | public String getDescription() { | ||
82 | return description; | ||
83 | } | ||
84 | |||
85 | public IntervalRedBlackNode getEmpty() { | ||
86 | return empty; | ||
87 | } | ||
88 | |||
89 | @Override | ||
90 | public Interval apply(Interval left, Interval right) { | ||
91 | return empty.op(left, right); | ||
92 | } | ||
93 | |||
94 | public abstract IntervalRedBlackNode createNode(Interval interval); | ||
95 | |||
96 | public Interval getNeutral() { | ||
97 | return Interval.EMPTY; | ||
98 | } | ||
99 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalAggregationOperator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalAggregationOperator.xtend new file mode 100644 index 00000000..21d3d73b --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalAggregationOperator.xtend | |||
@@ -0,0 +1,48 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval | ||
2 | |||
3 | import java.util.stream.Stream | ||
4 | import org.eclipse.viatra.query.runtime.matchers.psystem.aggregations.IMultisetAggregationOperator | ||
5 | import org.eclipse.xtend.lib.annotations.Accessors | ||
6 | import org.eclipse.xtend.lib.annotations.FinalFieldsConstructor | ||
7 | |||
8 | @FinalFieldsConstructor | ||
9 | class IntervalAggregationOperator implements IMultisetAggregationOperator<Interval, IntervalRedBlackNode, Interval> { | ||
10 | @Accessors val IntervalAggregationMode mode | ||
11 | |||
12 | override getName() { | ||
13 | mode.modeName | ||
14 | } | ||
15 | |||
16 | override getShortDescription() { | ||
17 | mode.description | ||
18 | } | ||
19 | |||
20 | override createNeutral() { | ||
21 | mode.empty | ||
22 | } | ||
23 | |||
24 | override isNeutral(IntervalRedBlackNode result) { | ||
25 | result.leaf | ||
26 | } | ||
27 | |||
28 | override update(IntervalRedBlackNode oldResult, Interval updateValue, boolean isInsertion) { | ||
29 | if (isInsertion) { | ||
30 | val newNode = mode.createNode(updateValue) | ||
31 | oldResult.add(newNode) | ||
32 | } else { | ||
33 | oldResult.remove(updateValue) | ||
34 | } | ||
35 | } | ||
36 | |||
37 | override getAggregate(IntervalRedBlackNode result) { | ||
38 | if (result.leaf) { | ||
39 | mode.neutral | ||
40 | } else { | ||
41 | result.result | ||
42 | } | ||
43 | } | ||
44 | |||
45 | override aggregateStream(Stream<Interval> stream) { | ||
46 | stream.reduce(mode).orElse(mode.neutral) | ||
47 | } | ||
48 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalHullAggregatorOperator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalHullAggregatorOperator.xtend new file mode 100644 index 00000000..ce48eca1 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalHullAggregatorOperator.xtend | |||
@@ -0,0 +1,87 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval | ||
2 | |||
3 | import java.math.BigDecimal | ||
4 | import java.math.MathContext | ||
5 | import java.util.SortedMap | ||
6 | import java.util.TreeMap | ||
7 | import java.util.stream.Stream | ||
8 | import org.eclipse.viatra.query.runtime.matchers.psystem.aggregations.IMultisetAggregationOperator | ||
9 | |||
10 | abstract class IntervalHullAggregatorOperator<T extends Comparable<T>> implements IMultisetAggregationOperator<T, SortedMap<T, Integer>, Interval> { | ||
11 | protected new() { | ||
12 | } | ||
13 | |||
14 | override getName() { | ||
15 | "intervalHull" | ||
16 | } | ||
17 | |||
18 | override getShortDescription() { | ||
19 | "Calculates the interval hull of a set of numbers" | ||
20 | } | ||
21 | |||
22 | override createNeutral() { | ||
23 | new TreeMap | ||
24 | } | ||
25 | |||
26 | override getAggregate(SortedMap<T, Integer> result) { | ||
27 | if (result.neutral) { | ||
28 | Interval.EMPTY | ||
29 | } else { | ||
30 | toInterval(result.firstKey, result.lastKey) | ||
31 | } | ||
32 | } | ||
33 | |||
34 | protected abstract def BigDecimal toBigDecimal(T value, MathContext mc) | ||
35 | |||
36 | private def toInterval(T min, T max) { | ||
37 | Interval.of(min.toBigDecimal(Interval.ROUND_DOWN), max.toBigDecimal(Interval.ROUND_UP)) | ||
38 | } | ||
39 | |||
40 | override isNeutral(SortedMap<T, Integer> result) { | ||
41 | result.empty | ||
42 | } | ||
43 | |||
44 | override update(SortedMap<T, Integer> oldResult, T updateValue, boolean isInsertion) { | ||
45 | if (isInsertion) { | ||
46 | oldResult.compute(updateValue) [ key, value | | ||
47 | if (value === null) { | ||
48 | 1 | ||
49 | } else if (value > 0) { | ||
50 | value + 1 | ||
51 | } else { | ||
52 | throw new IllegalStateException("Invalid count: " + value) | ||
53 | } | ||
54 | ] | ||
55 | } else { | ||
56 | oldResult.compute(updateValue) [ key, value | | ||
57 | if (value === 1) { | ||
58 | null | ||
59 | } else if (value > 1) { | ||
60 | value - 1 | ||
61 | } else { | ||
62 | throw new IllegalStateException("Invalid count: " + value) | ||
63 | } | ||
64 | ] | ||
65 | } | ||
66 | oldResult | ||
67 | } | ||
68 | |||
69 | override aggregateStream(Stream<T> stream) { | ||
70 | val iterator = stream.iterator | ||
71 | if (!iterator.hasNext) { | ||
72 | return Interval.EMPTY | ||
73 | } | ||
74 | var min = iterator.next | ||
75 | var max = min | ||
76 | while (iterator.hasNext) { | ||
77 | val element = iterator.next | ||
78 | if (element.compareTo(min) < 0) { | ||
79 | min = element | ||
80 | } | ||
81 | if (element.compareTo(max) > 0) { | ||
82 | max = element | ||
83 | } | ||
84 | } | ||
85 | toInterval(min, max) | ||
86 | } | ||
87 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalRedBlackNode.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalRedBlackNode.xtend new file mode 100644 index 00000000..3aa575bc --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/IntervalRedBlackNode.xtend | |||
@@ -0,0 +1,177 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval | ||
2 | |||
3 | abstract class IntervalRedBlackNode extends RedBlackNode<IntervalRedBlackNode> { | ||
4 | public val Interval interval | ||
5 | public var int count = 1 | ||
6 | public var Interval result | ||
7 | |||
8 | new(Interval interval) { | ||
9 | this.interval = interval | ||
10 | } | ||
11 | |||
12 | def boolean isMultiplicitySensitive() { | ||
13 | false | ||
14 | } | ||
15 | |||
16 | def Interval multiply(Interval interval, int count) { | ||
17 | interval | ||
18 | } | ||
19 | |||
20 | abstract def Interval op(Interval left, Interval right) | ||
21 | |||
22 | override augment() { | ||
23 | val value = calcualteAugmentation() | ||
24 | if (result == value) { | ||
25 | false | ||
26 | } else { | ||
27 | result = value | ||
28 | true | ||
29 | } | ||
30 | } | ||
31 | |||
32 | private def calcualteAugmentation() { | ||
33 | var value = multiply(interval, count) | ||
34 | if (!left.leaf) { | ||
35 | value = op(value, left.result) | ||
36 | } | ||
37 | if (!right.leaf) { | ||
38 | value = op(value, right.result) | ||
39 | } | ||
40 | value | ||
41 | } | ||
42 | |||
43 | override assertNodeIsValid() { | ||
44 | super.assertNodeIsValid() | ||
45 | if (leaf) { | ||
46 | return | ||
47 | } | ||
48 | if (count <= 0) { | ||
49 | throw new IllegalStateException("Node with nonpositive count") | ||
50 | } | ||
51 | val value = calcualteAugmentation() | ||
52 | if (result != value) { | ||
53 | throw new IllegalStateException("Node with invalid augmentation: " + result + " != " + value) | ||
54 | } | ||
55 | } | ||
56 | |||
57 | override assertSubtreeIsValid() { | ||
58 | super.assertSubtreeIsValid() | ||
59 | assertNodeIsValid() | ||
60 | } | ||
61 | |||
62 | override compareTo(IntervalRedBlackNode other) { | ||
63 | if (leaf || other.leaf) { | ||
64 | throw new IllegalArgumentException("One of the nodes is a leaf node") | ||
65 | } | ||
66 | interval.compareTo(other.interval) | ||
67 | } | ||
68 | |||
69 | def add(IntervalRedBlackNode newNode) { | ||
70 | if (parent !== null) { | ||
71 | throw new IllegalArgumentException("This is not the root of a tree") | ||
72 | } | ||
73 | if (leaf) { | ||
74 | newNode.isRed = false | ||
75 | newNode.left = this | ||
76 | newNode.right = this | ||
77 | newNode.parent = null | ||
78 | newNode.augment | ||
79 | return newNode | ||
80 | } | ||
81 | val modifiedNode = addWithoutFixup(newNode) | ||
82 | if (modifiedNode === newNode) { | ||
83 | // Must augment here, because fixInsertion() might call augment() | ||
84 | // on a node repeatedly, which might lose the change notification the | ||
85 | // second time it is called, and the augmentation will fail to | ||
86 | // reach the root. | ||
87 | modifiedNode.augmentRecursively | ||
88 | modifiedNode.isRed = true | ||
89 | return modifiedNode.fixInsertion | ||
90 | } | ||
91 | if (multiplicitySensitive) { | ||
92 | modifiedNode.augmentRecursively | ||
93 | } | ||
94 | this | ||
95 | } | ||
96 | |||
97 | private def addWithoutFixup(IntervalRedBlackNode newNode) { | ||
98 | var node = this | ||
99 | while (!node.leaf) { | ||
100 | val comparison = node.interval.compareTo(newNode.interval) | ||
101 | if (comparison < 0) { | ||
102 | if (node.left.leaf) { | ||
103 | newNode.left = node.left | ||
104 | newNode.right = node.left | ||
105 | node.left = newNode | ||
106 | newNode.parent = node | ||
107 | return newNode | ||
108 | } else { | ||
109 | node = node.left | ||
110 | } | ||
111 | } else if (comparison > 0) { | ||
112 | if (node.right.leaf) { | ||
113 | newNode.left = node.right | ||
114 | newNode.right = node.right | ||
115 | node.right = newNode | ||
116 | newNode.parent = node | ||
117 | return newNode | ||
118 | } else { | ||
119 | node = node.right | ||
120 | } | ||
121 | } else { // comparison == 0 | ||
122 | newNode.parent = null | ||
123 | node.count++ | ||
124 | return node | ||
125 | } | ||
126 | } | ||
127 | throw new IllegalStateException("Reached leaf node while searching for insertion point") | ||
128 | } | ||
129 | |||
130 | private def augmentRecursively() { | ||
131 | for (var node = this; node !== null; node = node.parent) { | ||
132 | if (!node.augment) { | ||
133 | return | ||
134 | } | ||
135 | } | ||
136 | } | ||
137 | |||
138 | def remove(Interval interval) { | ||
139 | val node = find(interval) | ||
140 | node.count-- | ||
141 | if (node.count == 0) { | ||
142 | return node.remove | ||
143 | } | ||
144 | if (multiplicitySensitive) { | ||
145 | node.augmentRecursively | ||
146 | } | ||
147 | this | ||
148 | } | ||
149 | |||
150 | private def find(Interval interval) { | ||
151 | var node = this | ||
152 | while (!node.leaf) { | ||
153 | val comparison = node.interval.compareTo(interval) | ||
154 | if (comparison < 0) { | ||
155 | node = node.left | ||
156 | } else if (comparison > 0) { | ||
157 | node = node.right | ||
158 | } else { // comparison == 0 | ||
159 | return node | ||
160 | } | ||
161 | } | ||
162 | throw new IllegalStateException("Reached leaf node while searching for interval to remove") | ||
163 | } | ||
164 | |||
165 | override toString() { | ||
166 | if (leaf) { | ||
167 | "L" | ||
168 | } else { | ||
169 | ''' | ||
170 | «IF isRed»R«ELSE»B«ENDIF» «count»«interval» : «result» | ||
171 | «left» | ||
172 | «right» | ||
173 | ''' | ||
174 | } | ||
175 | } | ||
176 | |||
177 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/RedBlackNode.java b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/RedBlackNode.java new file mode 100644 index 00000000..8c40816b --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/RedBlackNode.java | |||
@@ -0,0 +1,1392 @@ | |||
1 | /* | ||
2 | * The MIT License (MIT) | ||
3 | * | ||
4 | * Copyright (c) 2016 btrekkie | ||
5 | * | ||
6 | * Permission is hereby granted, free of charge, to any person obtaining a copy | ||
7 | * of this software and associated documentation files (the "Software"), to deal | ||
8 | * in the Software without restriction, including without limitation the rights | ||
9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
10 | * copies of the Software, and to permit persons to whom the Software is | ||
11 | * furnished to do so, subject to the following conditions: | ||
12 | * | ||
13 | * The above copyright notice and this permission notice shall be included in all | ||
14 | * copies or substantial portions of the Software. | ||
15 | * | ||
16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||
19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||
21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||
22 | * SOFTWARE. | ||
23 | */ | ||
24 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval; | ||
25 | |||
26 | import java.lang.reflect.Array; | ||
27 | import java.util.Collection; | ||
28 | import java.util.Comparator; | ||
29 | import java.util.HashSet; | ||
30 | import java.util.Iterator; | ||
31 | import java.util.Set; | ||
32 | |||
33 | /** | ||
34 | * A node in a red-black tree ( https://en.wikipedia.org/wiki/Red%E2%80%93black_tree ). Compared to a class like Java's | ||
35 | * TreeMap, RedBlackNode is a low-level data structure. The internals of a node are exposed as public fields, allowing | ||
36 | * clients to directly observe and manipulate the structure of the tree. This gives clients flexibility, although it | ||
37 | * also enables them to violate the red-black or BST properties. The RedBlackNode class provides methods for performing | ||
38 | * various standard operations, such as insertion and removal. | ||
39 | * | ||
40 | * Unlike most implementations of binary search trees, RedBlackNode supports arbitrary augmentation. By subclassing | ||
41 | * RedBlackNode, clients can add arbitrary data and augmentation information to each node. For example, if we were to | ||
42 | * use a RedBlackNode subclass to implement a sorted set, the subclass would have a field storing an element in the set. | ||
43 | * If we wanted to keep track of the number of non-leaf nodes in each subtree, we would store this as a "size" field and | ||
44 | * override augment() to update this field. All RedBlackNode methods (such as "insert" and remove()) call augment() as | ||
45 | * necessary to correctly maintain the augmentation information, unless otherwise indicated. | ||
46 | * | ||
47 | * The values of the tree are stored in the non-leaf nodes. RedBlackNode does not support use cases where values must be | ||
48 | * stored in the leaf nodes. It is recommended that all of the leaf nodes in a given tree be the same (black) | ||
49 | * RedBlackNode instance, to save space. The root of an empty tree is a leaf node, as opposed to null. | ||
50 | * | ||
51 | * For reference, a red-black tree is a binary search tree satisfying the following properties: | ||
52 | * | ||
53 | * - Every node is colored red or black. | ||
54 | * - The leaf nodes, which are dummy nodes that do not store any values, are colored black. | ||
55 | * - The root is black. | ||
56 | * - Both children of each red node are black. | ||
57 | * - Every path from the root to a leaf contains the same number of black nodes. | ||
58 | * | ||
59 | * @param <N> The type of node in the tree. For example, we might have | ||
60 | * "class FooNode<T> extends RedBlackNode<FooNode<T>>". | ||
61 | * @author Bill Jacobs | ||
62 | */ | ||
63 | public abstract class RedBlackNode<N extends RedBlackNode<N>> implements Comparable<N> { | ||
64 | /** A Comparator that compares Comparable elements using their natural order. */ | ||
65 | private static final Comparator<Comparable<Object>> NATURAL_ORDER = new Comparator<Comparable<Object>>() { | ||
66 | @Override | ||
67 | public int compare(Comparable<Object> value1, Comparable<Object> value2) { | ||
68 | return value1.compareTo(value2); | ||
69 | } | ||
70 | }; | ||
71 | |||
72 | /** The parent of this node, if any. "parent" is null if this is a leaf node. */ | ||
73 | public N parent; | ||
74 | |||
75 | /** The left child of this node. "left" is null if this is a leaf node. */ | ||
76 | public N left; | ||
77 | |||
78 | /** The right child of this node. "right" is null if this is a leaf node. */ | ||
79 | public N right; | ||
80 | |||
81 | /** Whether the node is colored red, as opposed to black. */ | ||
82 | public boolean isRed; | ||
83 | |||
84 | /** | ||
85 | * Sets any augmentation information about the subtree rooted at this node that is stored in this node. For | ||
86 | * example, if we augment each node by subtree size (the number of non-leaf nodes in the subtree), this method would | ||
87 | * set the size field of this node to be equal to the size field of the left child plus the size field of the right | ||
88 | * child plus one. | ||
89 | * | ||
90 | * "Augmentation information" is information that we can compute about a subtree rooted at some node, preferably | ||
91 | * based only on the augmentation information in the node's two children and the information in the node. Examples | ||
92 | * of augmentation information are the sum of the values in a subtree and the number of non-leaf nodes in a subtree. | ||
93 | * Augmentation information may not depend on the colors of the nodes. | ||
94 | * | ||
95 | * This method returns whether the augmentation information in any of the ancestors of this node might have been | ||
96 | * affected by changes in this subtree since the last call to augment(). In the usual case, where the augmentation | ||
97 | * information depends only on the information in this node and the augmentation information in its immediate | ||
98 | * children, this is equivalent to whether the augmentation information changed as a result of this call to | ||
99 | * augment(). For example, in the case of subtree size, this returns whether the value of the size field prior to | ||
100 | * calling augment() differed from the size field of the left child plus the size field of the right child plus one. | ||
101 | * False positives are permitted. The return value is unspecified if we have not called augment() on this node | ||
102 | * before. | ||
103 | * | ||
104 | * This method may assume that this is not a leaf node. It may not assume that the augmentation information stored | ||
105 | * in any of the tree's nodes is correct. However, if the augmentation information stored in all of the node's | ||
106 | * descendants is correct, then the augmentation information stored in this node must be correct after calling | ||
107 | * augment(). | ||
108 | */ | ||
109 | public boolean augment() { | ||
110 | return false; | ||
111 | } | ||
112 | |||
113 | /** | ||
114 | * Throws a RuntimeException if we detect that this node locally violates any invariants specific to this subclass | ||
115 | * of RedBlackNode. For example, if this stores the size of the subtree rooted at this node, this should throw a | ||
116 | * RuntimeException if the size field of this is not equal to the size field of the left child plus the size field | ||
117 | * of the right child plus one. Note that we may call this on a leaf node. | ||
118 | * | ||
119 | * assertSubtreeIsValid() calls assertNodeIsValid() on each node, or at least starts to do so until it detects a | ||
120 | * problem. assertNodeIsValid() should assume the node is in a tree that satisfies all properties common to all | ||
121 | * red-black trees, as assertSubtreeIsValid() is responsible for such checks. assertNodeIsValid() should be | ||
122 | * "downward-looking", i.e. it should ignore any information in "parent", and it should be "local", i.e. it should | ||
123 | * only check a constant number of descendants. To include "global" checks, such as verifying the BST property | ||
124 | * concerning ordering, override assertSubtreeIsValid(). assertOrderIsValid is useful for checking the BST | ||
125 | * property. | ||
126 | */ | ||
127 | public void assertNodeIsValid() { | ||
128 | |||
129 | } | ||
130 | |||
131 | /** Returns whether this is a leaf node. */ | ||
132 | public boolean isLeaf() { | ||
133 | return left == null; | ||
134 | } | ||
135 | |||
136 | /** Returns the root of the tree that contains this node. */ | ||
137 | public N root() { | ||
138 | @SuppressWarnings("unchecked") | ||
139 | N node = (N)this; | ||
140 | while (node.parent != null) { | ||
141 | node = node.parent; | ||
142 | } | ||
143 | return node; | ||
144 | } | ||
145 | |||
146 | /** Returns the first node in the subtree rooted at this node, if any. */ | ||
147 | public N min() { | ||
148 | if (isLeaf()) { | ||
149 | return null; | ||
150 | } | ||
151 | @SuppressWarnings("unchecked") | ||
152 | N node = (N)this; | ||
153 | while (!node.left.isLeaf()) { | ||
154 | node = node.left; | ||
155 | } | ||
156 | return node; | ||
157 | } | ||
158 | |||
159 | /** Returns the last node in the subtree rooted at this node, if any. */ | ||
160 | public N max() { | ||
161 | if (isLeaf()) { | ||
162 | return null; | ||
163 | } | ||
164 | @SuppressWarnings("unchecked") | ||
165 | N node = (N)this; | ||
166 | while (!node.right.isLeaf()) { | ||
167 | node = node.right; | ||
168 | } | ||
169 | return node; | ||
170 | } | ||
171 | |||
172 | /** Returns the node immediately before this in the tree that contains this node, if any. */ | ||
173 | public N predecessor() { | ||
174 | if (!left.isLeaf()) { | ||
175 | N node; | ||
176 | for (node = left; !node.right.isLeaf(); node = node.right); | ||
177 | return node; | ||
178 | } else if (parent == null) { | ||
179 | return null; | ||
180 | } else { | ||
181 | @SuppressWarnings("unchecked") | ||
182 | N node = (N)this; | ||
183 | while (node.parent != null && node.parent.left == node) { | ||
184 | node = node.parent; | ||
185 | } | ||
186 | return node.parent; | ||
187 | } | ||
188 | } | ||
189 | |||
190 | /** Returns the node immediately after this in the tree that contains this node, if any. */ | ||
191 | public N successor() { | ||
192 | if (!right.isLeaf()) { | ||
193 | N node; | ||
194 | for (node = right; !node.left.isLeaf(); node = node.left); | ||
195 | return node; | ||
196 | } else if (parent == null) { | ||
197 | return null; | ||
198 | } else { | ||
199 | @SuppressWarnings("unchecked") | ||
200 | N node = (N)this; | ||
201 | while (node.parent != null && node.parent.right == node) { | ||
202 | node = node.parent; | ||
203 | } | ||
204 | return node.parent; | ||
205 | } | ||
206 | } | ||
207 | |||
208 | /** | ||
209 | * Performs a left rotation about this node. This method assumes that !isLeaf() && !right.isLeaf(). It calls | ||
210 | * augment() on this node and on its resulting parent. However, it does not call augment() on any of the resulting | ||
211 | * parent's ancestors, because that is normally the responsibility of the caller. | ||
212 | * @return The return value from calling augment() on the resulting parent. | ||
213 | */ | ||
214 | public boolean rotateLeft() { | ||
215 | if (isLeaf() || right.isLeaf()) { | ||
216 | throw new IllegalArgumentException("The node or its right child is a leaf"); | ||
217 | } | ||
218 | N newParent = right; | ||
219 | right = newParent.left; | ||
220 | @SuppressWarnings("unchecked") | ||
221 | N nThis = (N)this; | ||
222 | if (!right.isLeaf()) { | ||
223 | right.parent = nThis; | ||
224 | } | ||
225 | newParent.parent = parent; | ||
226 | parent = newParent; | ||
227 | newParent.left = nThis; | ||
228 | if (newParent.parent != null) { | ||
229 | if (newParent.parent.left == this) { | ||
230 | newParent.parent.left = newParent; | ||
231 | } else { | ||
232 | newParent.parent.right = newParent; | ||
233 | } | ||
234 | } | ||
235 | augment(); | ||
236 | return newParent.augment(); | ||
237 | } | ||
238 | |||
239 | /** | ||
240 | * Performs a right rotation about this node. This method assumes that !isLeaf() && !left.isLeaf(). It calls | ||
241 | * augment() on this node and on its resulting parent. However, it does not call augment() on any of the resulting | ||
242 | * parent's ancestors, because that is normally the responsibility of the caller. | ||
243 | * @return The return value from calling augment() on the resulting parent. | ||
244 | */ | ||
245 | public boolean rotateRight() { | ||
246 | if (isLeaf() || left.isLeaf()) { | ||
247 | throw new IllegalArgumentException("The node or its left child is a leaf"); | ||
248 | } | ||
249 | N newParent = left; | ||
250 | left = newParent.right; | ||
251 | @SuppressWarnings("unchecked") | ||
252 | N nThis = (N)this; | ||
253 | if (!left.isLeaf()) { | ||
254 | left.parent = nThis; | ||
255 | } | ||
256 | newParent.parent = parent; | ||
257 | parent = newParent; | ||
258 | newParent.right = nThis; | ||
259 | if (newParent.parent != null) { | ||
260 | if (newParent.parent.left == this) { | ||
261 | newParent.parent.left = newParent; | ||
262 | } else { | ||
263 | newParent.parent.right = newParent; | ||
264 | } | ||
265 | } | ||
266 | augment(); | ||
267 | return newParent.augment(); | ||
268 | } | ||
269 | |||
270 | /** | ||
271 | * Performs red-black insertion fixup. To be more precise, this fixes a tree that satisfies all of the requirements | ||
272 | * of red-black trees, except that this may be a red child of a red node, and if this is the root, the root may be | ||
273 | * red. node.isRed must initially be true. This method assumes that this is not a leaf node. The method performs | ||
274 | * any rotations by calling rotateLeft() and rotateRight(). This method is more efficient than fixInsertion if | ||
275 | * "augment" is false or augment() might return false. | ||
276 | * @param augment Whether to set the augmentation information for "node" and its ancestors, by calling augment(). | ||
277 | */ | ||
278 | public void fixInsertionWithoutGettingRoot(boolean augment) { | ||
279 | if (!isRed) { | ||
280 | throw new IllegalArgumentException("The node must be red"); | ||
281 | } | ||
282 | boolean changed = augment; | ||
283 | if (augment) { | ||
284 | augment(); | ||
285 | } | ||
286 | |||
287 | RedBlackNode<N> node = this; | ||
288 | while (node.parent != null && node.parent.isRed) { | ||
289 | N parent = node.parent; | ||
290 | N grandparent = parent.parent; | ||
291 | if (grandparent.left.isRed && grandparent.right.isRed) { | ||
292 | grandparent.left.isRed = false; | ||
293 | grandparent.right.isRed = false; | ||
294 | grandparent.isRed = true; | ||
295 | |||
296 | if (changed) { | ||
297 | changed = parent.augment(); | ||
298 | if (changed) { | ||
299 | changed = grandparent.augment(); | ||
300 | } | ||
301 | } | ||
302 | node = grandparent; | ||
303 | } else { | ||
304 | if (parent.left == node) { | ||
305 | if (grandparent.right == parent) { | ||
306 | parent.rotateRight(); | ||
307 | node = parent; | ||
308 | parent = node.parent; | ||
309 | } | ||
310 | } else if (grandparent.left == parent) { | ||
311 | parent.rotateLeft(); | ||
312 | node = parent; | ||
313 | parent = node.parent; | ||
314 | } | ||
315 | |||
316 | if (parent.left == node) { | ||
317 | boolean grandparentChanged = grandparent.rotateRight(); | ||
318 | if (augment) { | ||
319 | changed = grandparentChanged; | ||
320 | } | ||
321 | } else { | ||
322 | boolean grandparentChanged = grandparent.rotateLeft(); | ||
323 | if (augment) { | ||
324 | changed = grandparentChanged; | ||
325 | } | ||
326 | } | ||
327 | |||
328 | parent.isRed = false; | ||
329 | grandparent.isRed = true; | ||
330 | node = parent; | ||
331 | break; | ||
332 | } | ||
333 | } | ||
334 | |||
335 | if (node.parent == null) { | ||
336 | node.isRed = false; | ||
337 | } | ||
338 | if (changed) { | ||
339 | for (node = node.parent; node != null; node = node.parent) { | ||
340 | if (!node.augment()) { | ||
341 | break; | ||
342 | } | ||
343 | } | ||
344 | } | ||
345 | } | ||
346 | |||
347 | /** | ||
348 | * Performs red-black insertion fixup. To be more precise, this fixes a tree that satisfies all of the requirements | ||
349 | * of red-black trees, except that this may be a red child of a red node, and if this is the root, the root may be | ||
350 | * red. node.isRed must initially be true. This method assumes that this is not a leaf node. The method performs | ||
351 | * any rotations by calling rotateLeft() and rotateRight(). This method is more efficient than fixInsertion() if | ||
352 | * augment() might return false. | ||
353 | */ | ||
354 | public void fixInsertionWithoutGettingRoot() { | ||
355 | fixInsertionWithoutGettingRoot(true); | ||
356 | } | ||
357 | |||
358 | /** | ||
359 | * Performs red-black insertion fixup. To be more precise, this fixes a tree that satisfies all of the requirements | ||
360 | * of red-black trees, except that this may be a red child of a red node, and if this is the root, the root may be | ||
361 | * red. node.isRed must initially be true. This method assumes that this is not a leaf node. The method performs | ||
362 | * any rotations by calling rotateLeft() and rotateRight(). | ||
363 | * @param augment Whether to set the augmentation information for "node" and its ancestors, by calling augment(). | ||
364 | * @return The root of the resulting tree. | ||
365 | */ | ||
366 | public N fixInsertion(boolean augment) { | ||
367 | fixInsertionWithoutGettingRoot(augment); | ||
368 | return root(); | ||
369 | } | ||
370 | |||
371 | /** | ||
372 | * Performs red-black insertion fixup. To be more precise, this fixes a tree that satisfies all of the requirements | ||
373 | * of red-black trees, except that this may be a red child of a red node, and if this is the root, the root may be | ||
374 | * red. node.isRed must initially be true. This method assumes that this is not a leaf node. The method performs | ||
375 | * any rotations by calling rotateLeft() and rotateRight(). | ||
376 | * @return The root of the resulting tree. | ||
377 | */ | ||
378 | public N fixInsertion() { | ||
379 | fixInsertionWithoutGettingRoot(true); | ||
380 | return root(); | ||
381 | } | ||
382 | |||
383 | /** Returns a Comparator that compares instances of N using their natural order, as in N.compareTo. */ | ||
384 | @SuppressWarnings({"rawtypes", "unchecked"}) | ||
385 | private Comparator<N> naturalOrder() { | ||
386 | Comparator comparator = (Comparator)NATURAL_ORDER; | ||
387 | return (Comparator<N>)comparator; | ||
388 | } | ||
389 | |||
390 | /** | ||
391 | * Inserts the specified node into the tree rooted at this node. Assumes this is the root. We treat newNode as a | ||
392 | * solitary node that does not belong to any tree, and we ignore its initial "parent", "left", "right", and isRed | ||
393 | * fields. | ||
394 | * | ||
395 | * If it is not efficient or convenient to find the location for a node using a Comparator, then you should manually | ||
396 | * add the node to the appropriate location, color it red, and call fixInsertion(). | ||
397 | * | ||
398 | * @param newNode The node to insert. | ||
399 | * @param allowDuplicates Whether to insert newNode if there is an equal node in the tree. To check whether we | ||
400 | * inserted newNode, check whether newNode.parent is null and the return value differs from newNode. | ||
401 | * @param comparator A comparator indicating where to put the node. If this is null, we use the nodes' natural | ||
402 | * order, as in N.compareTo. If you are passing null, then you must override the compareTo method, because the | ||
403 | * default implementation requires the nodes to already be in the same tree. | ||
404 | * @return The root of the resulting tree. | ||
405 | */ | ||
406 | public N insert(N newNode, boolean allowDuplicates, Comparator<? super N> comparator) { | ||
407 | if (parent != null) { | ||
408 | throw new IllegalArgumentException("This is not the root of a tree"); | ||
409 | } | ||
410 | @SuppressWarnings("unchecked") | ||
411 | N nThis = (N)this; | ||
412 | if (isLeaf()) { | ||
413 | newNode.isRed = false; | ||
414 | newNode.left = nThis; | ||
415 | newNode.right = nThis; | ||
416 | newNode.parent = null; | ||
417 | newNode.augment(); | ||
418 | return newNode; | ||
419 | } | ||
420 | if (comparator == null) { | ||
421 | comparator = naturalOrder(); | ||
422 | } | ||
423 | |||
424 | N node = nThis; | ||
425 | int comparison; | ||
426 | while (true) { | ||
427 | comparison = comparator.compare(newNode, node); | ||
428 | if (comparison < 0) { | ||
429 | if (!node.left.isLeaf()) { | ||
430 | node = node.left; | ||
431 | } else { | ||
432 | newNode.left = node.left; | ||
433 | newNode.right = node.left; | ||
434 | node.left = newNode; | ||
435 | newNode.parent = node; | ||
436 | break; | ||
437 | } | ||
438 | } else if (comparison > 0 || allowDuplicates) { | ||
439 | if (!node.right.isLeaf()) { | ||
440 | node = node.right; | ||
441 | } else { | ||
442 | newNode.left = node.right; | ||
443 | newNode.right = node.right; | ||
444 | node.right = newNode; | ||
445 | newNode.parent = node; | ||
446 | break; | ||
447 | } | ||
448 | } else { | ||
449 | newNode.parent = null; | ||
450 | return nThis; | ||
451 | } | ||
452 | } | ||
453 | newNode.isRed = true; | ||
454 | return newNode.fixInsertion(); | ||
455 | } | ||
456 | |||
457 | /** | ||
458 | * Moves this node to its successor's former position in the tree and vice versa, i.e. sets the "left", "right", | ||
459 | * "parent", and isRed fields of each. This method assumes that this is not a leaf node. | ||
460 | * @return The node with which we swapped. | ||
461 | */ | ||
462 | private N swapWithSuccessor() { | ||
463 | N replacement = successor(); | ||
464 | boolean oldReplacementIsRed = replacement.isRed; | ||
465 | N oldReplacementLeft = replacement.left; | ||
466 | N oldReplacementRight = replacement.right; | ||
467 | N oldReplacementParent = replacement.parent; | ||
468 | |||
469 | replacement.isRed = isRed; | ||
470 | replacement.left = left; | ||
471 | replacement.right = right; | ||
472 | replacement.parent = parent; | ||
473 | if (parent != null) { | ||
474 | if (parent.left == this) { | ||
475 | parent.left = replacement; | ||
476 | } else { | ||
477 | parent.right = replacement; | ||
478 | } | ||
479 | } | ||
480 | |||
481 | @SuppressWarnings("unchecked") | ||
482 | N nThis = (N)this; | ||
483 | isRed = oldReplacementIsRed; | ||
484 | left = oldReplacementLeft; | ||
485 | right = oldReplacementRight; | ||
486 | if (oldReplacementParent == this) { | ||
487 | parent = replacement; | ||
488 | parent.right = nThis; | ||
489 | } else { | ||
490 | parent = oldReplacementParent; | ||
491 | parent.left = nThis; | ||
492 | } | ||
493 | |||
494 | replacement.right.parent = replacement; | ||
495 | if (!replacement.left.isLeaf()) { | ||
496 | replacement.left.parent = replacement; | ||
497 | } | ||
498 | if (!right.isLeaf()) { | ||
499 | right.parent = nThis; | ||
500 | } | ||
501 | return replacement; | ||
502 | } | ||
503 | |||
504 | /** | ||
505 | * Performs red-black deletion fixup. To be more precise, this fixes a tree that satisfies all of the requirements | ||
506 | * of red-black trees, except that all paths from the root to a leaf that pass through the sibling of this node have | ||
507 | * one fewer black node than all other root-to-leaf paths. This method assumes that this is not a leaf node. | ||
508 | */ | ||
509 | private void fixSiblingDeletion() { | ||
510 | RedBlackNode<N> sibling = this; | ||
511 | boolean changed = true; | ||
512 | boolean haveAugmentedParent = false; | ||
513 | boolean haveAugmentedGrandparent = false; | ||
514 | while (true) { | ||
515 | N parent = sibling.parent; | ||
516 | if (sibling.isRed) { | ||
517 | parent.isRed = true; | ||
518 | sibling.isRed = false; | ||
519 | if (parent.left == sibling) { | ||
520 | changed = parent.rotateRight(); | ||
521 | sibling = parent.left; | ||
522 | } else { | ||
523 | changed = parent.rotateLeft(); | ||
524 | sibling = parent.right; | ||
525 | } | ||
526 | haveAugmentedParent = true; | ||
527 | haveAugmentedGrandparent = true; | ||
528 | } else if (!sibling.left.isRed && !sibling.right.isRed) { | ||
529 | sibling.isRed = true; | ||
530 | if (parent.isRed) { | ||
531 | parent.isRed = false; | ||
532 | break; | ||
533 | } else { | ||
534 | if (changed && !haveAugmentedParent) { | ||
535 | changed = parent.augment(); | ||
536 | } | ||
537 | N grandparent = parent.parent; | ||
538 | if (grandparent == null) { | ||
539 | break; | ||
540 | } else if (grandparent.left == parent) { | ||
541 | sibling = grandparent.right; | ||
542 | } else { | ||
543 | sibling = grandparent.left; | ||
544 | } | ||
545 | haveAugmentedParent = haveAugmentedGrandparent; | ||
546 | haveAugmentedGrandparent = false; | ||
547 | } | ||
548 | } else { | ||
549 | if (sibling == parent.left) { | ||
550 | if (!sibling.left.isRed) { | ||
551 | sibling.rotateLeft(); | ||
552 | sibling = sibling.parent; | ||
553 | } | ||
554 | } else if (!sibling.right.isRed) { | ||
555 | sibling.rotateRight(); | ||
556 | sibling = sibling.parent; | ||
557 | } | ||
558 | sibling.isRed = parent.isRed; | ||
559 | parent.isRed = false; | ||
560 | if (sibling == parent.left) { | ||
561 | sibling.left.isRed = false; | ||
562 | changed = parent.rotateRight(); | ||
563 | } else { | ||
564 | sibling.right.isRed = false; | ||
565 | changed = parent.rotateLeft(); | ||
566 | } | ||
567 | haveAugmentedParent = haveAugmentedGrandparent; | ||
568 | haveAugmentedGrandparent = false; | ||
569 | break; | ||
570 | } | ||
571 | } | ||
572 | |||
573 | // Update augmentation info | ||
574 | N parent = sibling.parent; | ||
575 | if (changed && parent != null) { | ||
576 | if (!haveAugmentedParent) { | ||
577 | changed = parent.augment(); | ||
578 | } | ||
579 | if (changed && parent.parent != null) { | ||
580 | parent = parent.parent; | ||
581 | if (!haveAugmentedGrandparent) { | ||
582 | changed = parent.augment(); | ||
583 | } | ||
584 | if (changed) { | ||
585 | for (parent = parent.parent; parent != null; parent = parent.parent) { | ||
586 | if (!parent.augment()) { | ||
587 | break; | ||
588 | } | ||
589 | } | ||
590 | } | ||
591 | } | ||
592 | } | ||
593 | } | ||
594 | |||
595 | /** | ||
596 | * Removes this node from the tree that contains it. The effect of this method on the fields of this node is | ||
597 | * unspecified. This method assumes that this is not a leaf node. This method is more efficient than remove() if | ||
598 | * augment() might return false. | ||
599 | * | ||
600 | * If the node has two children, we begin by moving the node's successor to its former position, by changing the | ||
601 | * successor's "left", "right", "parent", and isRed fields. | ||
602 | */ | ||
603 | public void removeWithoutGettingRoot() { | ||
604 | if (isLeaf()) { | ||
605 | throw new IllegalArgumentException("Attempted to remove a leaf node"); | ||
606 | } | ||
607 | N replacement; | ||
608 | if (left.isLeaf() || right.isLeaf()) { | ||
609 | replacement = null; | ||
610 | } else { | ||
611 | replacement = swapWithSuccessor(); | ||
612 | } | ||
613 | |||
614 | N child; | ||
615 | if (!left.isLeaf()) { | ||
616 | child = left; | ||
617 | } else if (!right.isLeaf()) { | ||
618 | child = right; | ||
619 | } else { | ||
620 | child = null; | ||
621 | } | ||
622 | |||
623 | if (child != null) { | ||
624 | // Replace this node with its child | ||
625 | child.parent = parent; | ||
626 | if (parent != null) { | ||
627 | if (parent.left == this) { | ||
628 | parent.left = child; | ||
629 | } else { | ||
630 | parent.right = child; | ||
631 | } | ||
632 | } | ||
633 | child.isRed = false; | ||
634 | |||
635 | if (child.parent != null) { | ||
636 | N parent; | ||
637 | for (parent = child.parent; parent != null; parent = parent.parent) { | ||
638 | if (!parent.augment()) { | ||
639 | break; | ||
640 | } | ||
641 | } | ||
642 | } | ||
643 | } else if (parent != null) { | ||
644 | // Replace this node with a leaf node | ||
645 | N leaf = left; | ||
646 | N parent = this.parent; | ||
647 | N sibling; | ||
648 | if (parent.left == this) { | ||
649 | parent.left = leaf; | ||
650 | sibling = parent.right; | ||
651 | } else { | ||
652 | parent.right = leaf; | ||
653 | sibling = parent.left; | ||
654 | } | ||
655 | |||
656 | if (!isRed) { | ||
657 | RedBlackNode<N> siblingNode = sibling; | ||
658 | siblingNode.fixSiblingDeletion(); | ||
659 | } else { | ||
660 | while (parent != null) { | ||
661 | if (!parent.augment()) { | ||
662 | break; | ||
663 | } | ||
664 | parent = parent.parent; | ||
665 | } | ||
666 | } | ||
667 | } | ||
668 | |||
669 | if (replacement != null) { | ||
670 | replacement.augment(); | ||
671 | for (N parent = replacement.parent; parent != null; parent = parent.parent) { | ||
672 | if (!parent.augment()) { | ||
673 | break; | ||
674 | } | ||
675 | } | ||
676 | } | ||
677 | |||
678 | // Clear any previously existing links, so that we're more likely to encounter an exception if we attempt to | ||
679 | // access the removed node | ||
680 | parent = null; | ||
681 | left = null; | ||
682 | right = null; | ||
683 | isRed = true; | ||
684 | } | ||
685 | |||
686 | /** | ||
687 | * Removes this node from the tree that contains it. The effect of this method on the fields of this node is | ||
688 | * unspecified. This method assumes that this is not a leaf node. | ||
689 | * | ||
690 | * If the node has two children, we begin by moving the node's successor to its former position, by changing the | ||
691 | * successor's "left", "right", "parent", and isRed fields. | ||
692 | * | ||
693 | * @return The root of the resulting tree. | ||
694 | */ | ||
695 | public N remove() { | ||
696 | if (isLeaf()) { | ||
697 | throw new IllegalArgumentException("Attempted to remove a leaf node"); | ||
698 | } | ||
699 | |||
700 | // Find an arbitrary non-leaf node in the tree other than this node | ||
701 | N node; | ||
702 | if (parent != null) { | ||
703 | node = parent; | ||
704 | } else if (!left.isLeaf()) { | ||
705 | node = left; | ||
706 | } else if (!right.isLeaf()) { | ||
707 | node = right; | ||
708 | } else { | ||
709 | return left; | ||
710 | } | ||
711 | |||
712 | removeWithoutGettingRoot(); | ||
713 | return node.root(); | ||
714 | } | ||
715 | |||
716 | /** | ||
717 | * Returns the root of a perfectly height-balanced subtree containing the next "size" (non-leaf) nodes from | ||
718 | * "iterator", in iteration order. This method is responsible for setting the "left", "right", "parent", and isRed | ||
719 | * fields of the nodes, and calling augment() as appropriate. It ignores the initial values of the "left", "right", | ||
720 | * "parent", and isRed fields. | ||
721 | * @param iterator The nodes. | ||
722 | * @param size The number of nodes. | ||
723 | * @param height The "height" of the subtree's root node above the deepest leaf in the tree that contains it. Since | ||
724 | * insertion fixup is slow if there are too many red nodes and deleteion fixup is slow if there are too few red | ||
725 | * nodes, we compromise and have red nodes at every fourth level. We color a node red iff its "height" is equal | ||
726 | * to 1 mod 4. | ||
727 | * @param leaf The leaf node. | ||
728 | * @return The root of the subtree. | ||
729 | */ | ||
730 | private static <N extends RedBlackNode<N>> N createTree( | ||
731 | Iterator<? extends N> iterator, int size, int height, N leaf) { | ||
732 | if (size == 0) { | ||
733 | return leaf; | ||
734 | } else { | ||
735 | N left = createTree(iterator, (size - 1) / 2, height - 1, leaf); | ||
736 | N node = iterator.next(); | ||
737 | N right = createTree(iterator, size / 2, height - 1, leaf); | ||
738 | |||
739 | node.isRed = height % 4 == 1; | ||
740 | node.left = left; | ||
741 | node.right = right; | ||
742 | if (!left.isLeaf()) { | ||
743 | left.parent = node; | ||
744 | } | ||
745 | if (!right.isLeaf()) { | ||
746 | right.parent = node; | ||
747 | } | ||
748 | |||
749 | node.augment(); | ||
750 | return node; | ||
751 | } | ||
752 | } | ||
753 | |||
754 | /** | ||
755 | * Returns the root of a perfectly height-balanced tree containing the specified nodes, in iteration order. This | ||
756 | * method is responsible for setting the "left", "right", "parent", and isRed fields of the nodes (excluding | ||
757 | * "leaf"), and calling augment() as appropriate. It ignores the initial values of the "left", "right", "parent", | ||
758 | * and isRed fields. | ||
759 | * @param nodes The nodes. | ||
760 | * @param leaf The leaf node. | ||
761 | * @return The root of the tree. | ||
762 | */ | ||
763 | public static <N extends RedBlackNode<N>> N createTree(Collection<? extends N> nodes, N leaf) { | ||
764 | int size = nodes.size(); | ||
765 | if (size == 0) { | ||
766 | return leaf; | ||
767 | } | ||
768 | |||
769 | int height = 0; | ||
770 | for (int subtreeSize = size; subtreeSize > 0; subtreeSize /= 2) { | ||
771 | height++; | ||
772 | } | ||
773 | |||
774 | N node = createTree(nodes.iterator(), size, height, leaf); | ||
775 | node.parent = null; | ||
776 | node.isRed = false; | ||
777 | return node; | ||
778 | } | ||
779 | |||
780 | /** | ||
781 | * Concatenates to the end of the tree rooted at this node. To be precise, given that all of the nodes in this | ||
782 | * precede the node "pivot", which precedes all of the nodes in "last", this returns the root of a tree containing | ||
783 | * all of these nodes. This method destroys the trees rooted at "this" and "last". We treat "pivot" as a solitary | ||
784 | * node that does not belong to any tree, and we ignore its initial "parent", "left", "right", and isRed fields. | ||
785 | * This method assumes that this node and "last" are the roots of their respective trees. | ||
786 | * | ||
787 | * This method takes O(log N) time. It is more efficient than inserting "pivot" and then calling concatenate(last). | ||
788 | * It is considerably more efficient than inserting "pivot" and all of the nodes in "last". | ||
789 | */ | ||
790 | public N concatenate(N last, N pivot) { | ||
791 | // If the black height of "first", where first = this, is less than or equal to that of "last", starting at the | ||
792 | // root of "last", we keep going left until we reach a black node whose black height is equal to that of | ||
793 | // "first". Then, we make "pivot" the parent of that node and of "first", coloring it red, and perform | ||
794 | // insertion fixup on the pivot. If the black height of "first" is greater than that of "last", we do the | ||
795 | // mirror image of the above. | ||
796 | |||
797 | if (parent != null) { | ||
798 | throw new IllegalArgumentException("This is not the root of a tree"); | ||
799 | } | ||
800 | if (last.parent != null) { | ||
801 | throw new IllegalArgumentException("\"last\" is not the root of a tree"); | ||
802 | } | ||
803 | |||
804 | // Compute the black height of the trees | ||
805 | int firstBlackHeight = 0; | ||
806 | @SuppressWarnings("unchecked") | ||
807 | N first = (N)this; | ||
808 | for (N node = first; node != null; node = node.right) { | ||
809 | if (!node.isRed) { | ||
810 | firstBlackHeight++; | ||
811 | } | ||
812 | } | ||
813 | int lastBlackHeight = 0; | ||
814 | for (N node = last; node != null; node = node.right) { | ||
815 | if (!node.isRed) { | ||
816 | lastBlackHeight++; | ||
817 | } | ||
818 | } | ||
819 | |||
820 | // Identify the children and parent of pivot | ||
821 | N firstChild = first; | ||
822 | N lastChild = last; | ||
823 | N parent; | ||
824 | if (firstBlackHeight <= lastBlackHeight) { | ||
825 | parent = null; | ||
826 | int blackHeight = lastBlackHeight; | ||
827 | while (blackHeight > firstBlackHeight) { | ||
828 | if (!lastChild.isRed) { | ||
829 | blackHeight--; | ||
830 | } | ||
831 | parent = lastChild; | ||
832 | lastChild = lastChild.left; | ||
833 | } | ||
834 | if (lastChild.isRed) { | ||
835 | parent = lastChild; | ||
836 | lastChild = lastChild.left; | ||
837 | } | ||
838 | } else { | ||
839 | parent = null; | ||
840 | int blackHeight = firstBlackHeight; | ||
841 | while (blackHeight > lastBlackHeight) { | ||
842 | if (!firstChild.isRed) { | ||
843 | blackHeight--; | ||
844 | } | ||
845 | parent = firstChild; | ||
846 | firstChild = firstChild.right; | ||
847 | } | ||
848 | if (firstChild.isRed) { | ||
849 | parent = firstChild; | ||
850 | firstChild = firstChild.right; | ||
851 | } | ||
852 | } | ||
853 | |||
854 | // Add "pivot" to the tree | ||
855 | pivot.isRed = true; | ||
856 | pivot.parent = parent; | ||
857 | if (parent != null) { | ||
858 | if (firstBlackHeight < lastBlackHeight) { | ||
859 | parent.left = pivot; | ||
860 | } else { | ||
861 | parent.right = pivot; | ||
862 | } | ||
863 | } | ||
864 | pivot.left = firstChild; | ||
865 | if (!firstChild.isLeaf()) { | ||
866 | firstChild.parent = pivot; | ||
867 | } | ||
868 | pivot.right = lastChild; | ||
869 | if (!lastChild.isLeaf()) { | ||
870 | lastChild.parent = pivot; | ||
871 | } | ||
872 | |||
873 | // Perform insertion fixup | ||
874 | return pivot.fixInsertion(); | ||
875 | } | ||
876 | |||
877 | /** | ||
878 | * Concatenates the tree rooted at "last" to the end of the tree rooted at this node. To be precise, given that all | ||
879 | * of the nodes in this precede all of the nodes in "last", this returns the root of a tree containing all of these | ||
880 | * nodes. This method destroys the trees rooted at "this" and "last". It assumes that this node and "last" are the | ||
881 | * roots of their respective trees. This method takes O(log N) time. It is considerably more efficient than | ||
882 | * inserting all of the nodes in "last". | ||
883 | */ | ||
884 | public N concatenate(N last) { | ||
885 | if (parent != null || last.parent != null) { | ||
886 | throw new IllegalArgumentException("The node is not the root of a tree"); | ||
887 | } | ||
888 | if (isLeaf()) { | ||
889 | return last; | ||
890 | } else if (last.isLeaf()) { | ||
891 | @SuppressWarnings("unchecked") | ||
892 | N nThis = (N)this; | ||
893 | return nThis; | ||
894 | } else { | ||
895 | N node = last.min(); | ||
896 | last = node.remove(); | ||
897 | return concatenate(last, node); | ||
898 | } | ||
899 | } | ||
900 | |||
901 | /** | ||
902 | * Splits the tree rooted at this node into two trees, so that the first element of the return value is the root of | ||
903 | * a tree consisting of the nodes that were before the specified node, and the second element of the return value is | ||
904 | * the root of a tree consisting of the nodes that were equal to or after the specified node. This method is | ||
905 | * destructive, meaning it does not preserve the original tree. It assumes that this node is the root and is in the | ||
906 | * same tree as splitNode. It takes O(log N) time. It is considerably more efficient than removing all of the | ||
907 | * nodes at or after splitNode and then creating a new tree from those nodes. | ||
908 | * @param The node at which to split the tree. | ||
909 | * @return An array consisting of the resulting trees. | ||
910 | */ | ||
911 | public N[] split(N splitNode) { | ||
912 | // To split the tree, we accumulate a pre-split tree and a post-split tree. We walk down the tree toward the | ||
913 | // position where we are splitting. Whenever we go left, we concatenate the right subtree with the post-split | ||
914 | // tree, and whenever we go right, we concatenate the pre-split tree with the left subtree. We use the | ||
915 | // concatenation algorithm described in concatenate(Object, Object). For the pivot, we use the last node where | ||
916 | // we went left in the case of a left move, and the last node where we went right in the case of a right move. | ||
917 | // | ||
918 | // The method uses the following variables: | ||
919 | // | ||
920 | // node: The current node in our walk down the tree. | ||
921 | // first: A node on the right spine of the pre-split tree. At the beginning of each iteration, it is the black | ||
922 | // node with the same black height as "node". If the pre-split tree is empty, this is null instead. | ||
923 | // firstParent: The parent of "first". If the pre-split tree is empty, this is null. Otherwise, this is the | ||
924 | // same as first.parent, unless first.isLeaf(). | ||
925 | // firstPivot: The node where we last went right, i.e. the next node to use as a pivot when concatenating with | ||
926 | // the pre-split tree. | ||
927 | // advanceFirst: Whether to set "first" to be its next black descendant at the end of the loop. | ||
928 | // last, lastParent, lastPivot, advanceLast: Analogous to "first", firstParent, firstPivot, and advanceFirst, | ||
929 | // but for the post-split tree. | ||
930 | if (parent != null) { | ||
931 | throw new IllegalArgumentException("This is not the root of a tree"); | ||
932 | } | ||
933 | if (isLeaf() || splitNode.isLeaf()) { | ||
934 | throw new IllegalArgumentException("The root or the split node is a leaf"); | ||
935 | } | ||
936 | |||
937 | // Create an array containing the path from the root to splitNode | ||
938 | int depth = 1; | ||
939 | N parent; | ||
940 | for (parent = splitNode; parent.parent != null; parent = parent.parent) { | ||
941 | depth++; | ||
942 | } | ||
943 | if (parent != this) { | ||
944 | throw new IllegalArgumentException("The split node does not belong to this tree"); | ||
945 | } | ||
946 | RedBlackNode<?>[] path = new RedBlackNode<?>[depth]; | ||
947 | for (parent = splitNode; parent != null; parent = parent.parent) { | ||
948 | depth--; | ||
949 | path[depth] = parent; | ||
950 | } | ||
951 | |||
952 | @SuppressWarnings("unchecked") | ||
953 | N node = (N)this; | ||
954 | N first = null; | ||
955 | N firstParent = null; | ||
956 | N last = null; | ||
957 | N lastParent = null; | ||
958 | N firstPivot = null; | ||
959 | N lastPivot = null; | ||
960 | while (!node.isLeaf()) { | ||
961 | boolean advanceFirst = !node.isRed && firstPivot != null; | ||
962 | boolean advanceLast = !node.isRed && lastPivot != null; | ||
963 | if ((depth + 1 < path.length && path[depth + 1] == node.left) || depth + 1 == path.length) { | ||
964 | // Left move | ||
965 | if (lastPivot == null) { | ||
966 | // The post-split tree is empty | ||
967 | last = node.right; | ||
968 | last.parent = null; | ||
969 | if (last.isRed) { | ||
970 | last.isRed = false; | ||
971 | lastParent = last; | ||
972 | last = last.left; | ||
973 | } | ||
974 | } else { | ||
975 | // Concatenate node.right and the post-split tree | ||
976 | if (node.right.isRed) { | ||
977 | node.right.isRed = false; | ||
978 | } else if (!node.isRed) { | ||
979 | lastParent = last; | ||
980 | last = last.left; | ||
981 | if (last.isRed) { | ||
982 | lastParent = last; | ||
983 | last = last.left; | ||
984 | } | ||
985 | advanceLast = false; | ||
986 | } | ||
987 | lastPivot.isRed = true; | ||
988 | lastPivot.parent = lastParent; | ||
989 | if (lastParent != null) { | ||
990 | lastParent.left = lastPivot; | ||
991 | } | ||
992 | lastPivot.left = node.right; | ||
993 | if (!lastPivot.left.isLeaf()) { | ||
994 | lastPivot.left.parent = lastPivot; | ||
995 | } | ||
996 | lastPivot.right = last; | ||
997 | if (!last.isLeaf()) { | ||
998 | last.parent = lastPivot; | ||
999 | } | ||
1000 | last = lastPivot.left; | ||
1001 | lastParent = lastPivot; | ||
1002 | lastPivot.fixInsertionWithoutGettingRoot(false); | ||
1003 | } | ||
1004 | lastPivot = node; | ||
1005 | node = node.left; | ||
1006 | } else { | ||
1007 | // Right move | ||
1008 | if (firstPivot == null) { | ||
1009 | // The pre-split tree is empty | ||
1010 | first = node.left; | ||
1011 | first.parent = null; | ||
1012 | if (first.isRed) { | ||
1013 | first.isRed = false; | ||
1014 | firstParent = first; | ||
1015 | first = first.right; | ||
1016 | } | ||
1017 | } else { | ||
1018 | // Concatenate the post-split tree and node.left | ||
1019 | if (node.left.isRed) { | ||
1020 | node.left.isRed = false; | ||
1021 | } else if (!node.isRed) { | ||
1022 | firstParent = first; | ||
1023 | first = first.right; | ||
1024 | if (first.isRed) { | ||
1025 | firstParent = first; | ||
1026 | first = first.right; | ||
1027 | } | ||
1028 | advanceFirst = false; | ||
1029 | } | ||
1030 | firstPivot.isRed = true; | ||
1031 | firstPivot.parent = firstParent; | ||
1032 | if (firstParent != null) { | ||
1033 | firstParent.right = firstPivot; | ||
1034 | } | ||
1035 | firstPivot.right = node.left; | ||
1036 | if (!firstPivot.right.isLeaf()) { | ||
1037 | firstPivot.right.parent = firstPivot; | ||
1038 | } | ||
1039 | firstPivot.left = first; | ||
1040 | if (!first.isLeaf()) { | ||
1041 | first.parent = firstPivot; | ||
1042 | } | ||
1043 | first = firstPivot.right; | ||
1044 | firstParent = firstPivot; | ||
1045 | firstPivot.fixInsertionWithoutGettingRoot(false); | ||
1046 | } | ||
1047 | firstPivot = node; | ||
1048 | node = node.right; | ||
1049 | } | ||
1050 | |||
1051 | depth++; | ||
1052 | |||
1053 | // Update "first" and "last" to be the nodes at the proper black height | ||
1054 | if (advanceFirst) { | ||
1055 | firstParent = first; | ||
1056 | first = first.right; | ||
1057 | if (first.isRed) { | ||
1058 | firstParent = first; | ||
1059 | first = first.right; | ||
1060 | } | ||
1061 | } | ||
1062 | if (advanceLast) { | ||
1063 | lastParent = last; | ||
1064 | last = last.left; | ||
1065 | if (last.isRed) { | ||
1066 | lastParent = last; | ||
1067 | last = last.left; | ||
1068 | } | ||
1069 | } | ||
1070 | } | ||
1071 | |||
1072 | // Add firstPivot to the pre-split tree | ||
1073 | N leaf = node; | ||
1074 | if (first == null) { | ||
1075 | first = leaf; | ||
1076 | } else { | ||
1077 | firstPivot.isRed = true; | ||
1078 | firstPivot.parent = firstParent; | ||
1079 | if (firstParent != null) { | ||
1080 | firstParent.right = firstPivot; | ||
1081 | } | ||
1082 | firstPivot.left = leaf; | ||
1083 | firstPivot.right = leaf; | ||
1084 | firstPivot.fixInsertionWithoutGettingRoot(false); | ||
1085 | for (first = firstPivot; first.parent != null; first = first.parent) { | ||
1086 | first.augment(); | ||
1087 | } | ||
1088 | first.augment(); | ||
1089 | } | ||
1090 | |||
1091 | // Add lastPivot to the post-split tree | ||
1092 | lastPivot.isRed = true; | ||
1093 | lastPivot.parent = lastParent; | ||
1094 | if (lastParent != null) { | ||
1095 | lastParent.left = lastPivot; | ||
1096 | } | ||
1097 | lastPivot.left = leaf; | ||
1098 | lastPivot.right = leaf; | ||
1099 | lastPivot.fixInsertionWithoutGettingRoot(false); | ||
1100 | for (last = lastPivot; last.parent != null; last = last.parent) { | ||
1101 | last.augment(); | ||
1102 | } | ||
1103 | last.augment(); | ||
1104 | |||
1105 | @SuppressWarnings("unchecked") | ||
1106 | N[] result = (N[])Array.newInstance(getClass(), 2); | ||
1107 | result[0] = first; | ||
1108 | result[1] = last; | ||
1109 | return result; | ||
1110 | } | ||
1111 | |||
1112 | /** | ||
1113 | * Returns the lowest common ancestor of this node and "other" - the node that is an ancestor of both and is not the | ||
1114 | * parent of a node that is an ancestor of both. Assumes that this is in the same tree as "other". Assumes that | ||
1115 | * neither "this" nor "other" is a leaf node. This method may return "this" or "other". | ||
1116 | * | ||
1117 | * Note that while it is possible to compute the lowest common ancestor in O(P) time, where P is the length of the | ||
1118 | * path from this node to "other", the "lca" method is not guaranteed to take O(P) time. If your application | ||
1119 | * requires this, then you should write your own lowest common ancestor method. | ||
1120 | */ | ||
1121 | public N lca(N other) { | ||
1122 | if (isLeaf() || other.isLeaf()) { | ||
1123 | throw new IllegalArgumentException("One of the nodes is a leaf node"); | ||
1124 | } | ||
1125 | |||
1126 | // Compute the depth of each node | ||
1127 | int depth = 0; | ||
1128 | for (N parent = this.parent; parent != null; parent = parent.parent) { | ||
1129 | depth++; | ||
1130 | } | ||
1131 | int otherDepth = 0; | ||
1132 | for (N parent = other.parent; parent != null; parent = parent.parent) { | ||
1133 | otherDepth++; | ||
1134 | } | ||
1135 | |||
1136 | // Go up to nodes of the same depth | ||
1137 | @SuppressWarnings("unchecked") | ||
1138 | N parent = (N)this; | ||
1139 | N otherParent = other; | ||
1140 | if (depth <= otherDepth) { | ||
1141 | for (int i = otherDepth; i > depth; i--) { | ||
1142 | otherParent = otherParent.parent; | ||
1143 | } | ||
1144 | } else { | ||
1145 | for (int i = depth; i > otherDepth; i--) { | ||
1146 | parent = parent.parent; | ||
1147 | } | ||
1148 | } | ||
1149 | |||
1150 | // Find the LCA | ||
1151 | while (parent != otherParent) { | ||
1152 | parent = parent.parent; | ||
1153 | otherParent = otherParent.parent; | ||
1154 | } | ||
1155 | if (parent != null) { | ||
1156 | return parent; | ||
1157 | } else { | ||
1158 | throw new IllegalArgumentException("The nodes do not belong to the same tree"); | ||
1159 | } | ||
1160 | } | ||
1161 | |||
1162 | /** | ||
1163 | * Returns an integer comparing the position of this node in the tree that contains it with that of "other". Returns | ||
1164 | * a negative number if this is earlier, a positive number if this is later, and 0 if this is at the same position. | ||
1165 | * Assumes that this is in the same tree as "other". Assumes that neither "this" nor "other" is a leaf node. | ||
1166 | * | ||
1167 | * The base class's implementation takes O(log N) time. If a RedBlackNode subclass stores a value used to order the | ||
1168 | * nodes, then it could override compareTo to compare the nodes' values, which would take O(1) time. | ||
1169 | * | ||
1170 | * Note that while it is possible to compare the positions of two nodes in O(P) time, where P is the length of the | ||
1171 | * path from this node to "other", the default implementation of compareTo is not guaranteed to take O(P) time. If | ||
1172 | * your application requires this, then you should write your own comparison method. | ||
1173 | */ | ||
1174 | @Override | ||
1175 | public int compareTo(N other) { | ||
1176 | if (isLeaf() || other.isLeaf()) { | ||
1177 | throw new IllegalArgumentException("One of the nodes is a leaf node"); | ||
1178 | } | ||
1179 | |||
1180 | // The algorithm operates as follows: compare the depth of this node to that of "other". If the depth of | ||
1181 | // "other" is greater, keep moving up from "other" until we find the ancestor at the same depth. Then, keep | ||
1182 | // moving up from "this" and from that node until we reach the lowest common ancestor. The node that arrived | ||
1183 | // from the left child of the common ancestor is earlier. The algorithm is analogous if the depth of "other" is | ||
1184 | // not greater. | ||
1185 | if (this == other) { | ||
1186 | return 0; | ||
1187 | } | ||
1188 | |||
1189 | // Compute the depth of each node | ||
1190 | int depth = 0; | ||
1191 | RedBlackNode<N> parent; | ||
1192 | for (parent = this; parent.parent != null; parent = parent.parent) { | ||
1193 | depth++; | ||
1194 | } | ||
1195 | int otherDepth = 0; | ||
1196 | N otherParent; | ||
1197 | for (otherParent = other; otherParent.parent != null; otherParent = otherParent.parent) { | ||
1198 | otherDepth++; | ||
1199 | } | ||
1200 | |||
1201 | // Go up to nodes of the same depth | ||
1202 | if (depth < otherDepth) { | ||
1203 | otherParent = other; | ||
1204 | for (int i = otherDepth - 1; i > depth; i--) { | ||
1205 | otherParent = otherParent.parent; | ||
1206 | } | ||
1207 | if (otherParent.parent != this) { | ||
1208 | otherParent = otherParent.parent; | ||
1209 | } else if (left == otherParent) { | ||
1210 | return 1; | ||
1211 | } else { | ||
1212 | return -1; | ||
1213 | } | ||
1214 | parent = this; | ||
1215 | } else if (depth > otherDepth) { | ||
1216 | parent = this; | ||
1217 | for (int i = depth - 1; i > otherDepth; i--) { | ||
1218 | parent = parent.parent; | ||
1219 | } | ||
1220 | if (parent.parent != other) { | ||
1221 | parent = parent.parent; | ||
1222 | } else if (other.left == parent) { | ||
1223 | return -1; | ||
1224 | } else { | ||
1225 | return 1; | ||
1226 | } | ||
1227 | otherParent = other; | ||
1228 | } else { | ||
1229 | parent = this; | ||
1230 | otherParent = other; | ||
1231 | } | ||
1232 | |||
1233 | // Keep going up until we reach the lowest common ancestor | ||
1234 | while (parent.parent != otherParent.parent) { | ||
1235 | parent = parent.parent; | ||
1236 | otherParent = otherParent.parent; | ||
1237 | } | ||
1238 | if (parent.parent == null) { | ||
1239 | throw new IllegalArgumentException("The nodes do not belong to the same tree"); | ||
1240 | } | ||
1241 | if (parent.parent.left == parent) { | ||
1242 | return -1; | ||
1243 | } else { | ||
1244 | return 1; | ||
1245 | } | ||
1246 | } | ||
1247 | |||
1248 | /** Throws a RuntimeException if the RedBlackNode fields of this are not correct for a leaf node. */ | ||
1249 | private void assertIsValidLeaf() { | ||
1250 | if (left != null || right != null || parent != null || isRed) { | ||
1251 | throw new RuntimeException("A leaf node's \"left\", \"right\", \"parent\", or isRed field is incorrect"); | ||
1252 | } | ||
1253 | } | ||
1254 | |||
1255 | /** | ||
1256 | * Throws a RuntimeException if the subtree rooted at this node does not satisfy the red-black properties, excluding | ||
1257 | * the requirement that the root be black, or it contains a repeated node other than a leaf node. | ||
1258 | * @param blackHeight The required number of black nodes in each path from this to a leaf node, including this and | ||
1259 | * the leaf node. | ||
1260 | * @param visited The nodes we have reached thus far, other than leaf nodes. This method adds the non-leaf nodes in | ||
1261 | * the subtree rooted at this node to "visited". | ||
1262 | */ | ||
1263 | private void assertSubtreeIsValidRedBlack(int blackHeight, Set<Reference<N>> visited) { | ||
1264 | @SuppressWarnings("unchecked") | ||
1265 | N nThis = (N)this; | ||
1266 | if (left == null || right == null) { | ||
1267 | assertIsValidLeaf(); | ||
1268 | if (blackHeight != 1) { | ||
1269 | throw new RuntimeException("Not all root-to-leaf paths have the same number of black nodes"); | ||
1270 | } | ||
1271 | return; | ||
1272 | } else if (!visited.add(new Reference<N>(nThis))) { | ||
1273 | throw new RuntimeException("The tree contains a repeated non-leaf node"); | ||
1274 | } else { | ||
1275 | int childBlackHeight; | ||
1276 | if (isRed) { | ||
1277 | if ((!left.isLeaf() && left.isRed) || (!right.isLeaf() && right.isRed)) { | ||
1278 | throw new RuntimeException("A red node has a red child"); | ||
1279 | } | ||
1280 | childBlackHeight = blackHeight; | ||
1281 | } else if (blackHeight == 0) { | ||
1282 | throw new RuntimeException("Not all root-to-leaf paths have the same number of black nodes"); | ||
1283 | } else { | ||
1284 | childBlackHeight = blackHeight - 1; | ||
1285 | } | ||
1286 | |||
1287 | if (!left.isLeaf() && left.parent != this) { | ||
1288 | throw new RuntimeException("left.parent != this"); | ||
1289 | } | ||
1290 | if (!right.isLeaf() && right.parent != this) { | ||
1291 | throw new RuntimeException("right.parent != this"); | ||
1292 | } | ||
1293 | RedBlackNode<N> leftNode = left; | ||
1294 | RedBlackNode<N> rightNode = right; | ||
1295 | leftNode.assertSubtreeIsValidRedBlack(childBlackHeight, visited); | ||
1296 | rightNode.assertSubtreeIsValidRedBlack(childBlackHeight, visited); | ||
1297 | } | ||
1298 | } | ||
1299 | |||
1300 | /** Calls assertNodeIsValid() on every node in the subtree rooted at this node. */ | ||
1301 | private void assertNodesAreValid() { | ||
1302 | assertNodeIsValid(); | ||
1303 | if (left != null) { | ||
1304 | RedBlackNode<N> leftNode = left; | ||
1305 | RedBlackNode<N> rightNode = right; | ||
1306 | leftNode.assertNodesAreValid(); | ||
1307 | rightNode.assertNodesAreValid(); | ||
1308 | } | ||
1309 | } | ||
1310 | |||
1311 | /** | ||
1312 | * Throws a RuntimeException if the subtree rooted at this node is not a valid red-black tree, e.g. if a red node | ||
1313 | * has a red child or it contains a non-leaf node "node" for which node.left.parent != node. (If parent != null, | ||
1314 | * it's okay if isRed is true.) This method is useful for debugging. See also assertSubtreeIsValid(). | ||
1315 | */ | ||
1316 | public void assertSubtreeIsValidRedBlack() { | ||
1317 | if (isLeaf()) { | ||
1318 | assertIsValidLeaf(); | ||
1319 | } else { | ||
1320 | if (parent == null && isRed) { | ||
1321 | throw new RuntimeException("The root is red"); | ||
1322 | } | ||
1323 | |||
1324 | // Compute the black height of the tree | ||
1325 | Set<Reference<N>> nodes = new HashSet<Reference<N>>(); | ||
1326 | int blackHeight = 0; | ||
1327 | @SuppressWarnings("unchecked") | ||
1328 | N node = (N)this; | ||
1329 | while (node != null) { | ||
1330 | if (!nodes.add(new Reference<N>(node))) { | ||
1331 | throw new RuntimeException("The tree contains a repeated non-leaf node"); | ||
1332 | } | ||
1333 | if (!node.isRed) { | ||
1334 | blackHeight++; | ||
1335 | } | ||
1336 | node = node.left; | ||
1337 | } | ||
1338 | |||
1339 | assertSubtreeIsValidRedBlack(blackHeight, new HashSet<Reference<N>>()); | ||
1340 | } | ||
1341 | } | ||
1342 | |||
1343 | /** | ||
1344 | * Throws a RuntimeException if we detect a problem with the subtree rooted at this node, such as a red child of a | ||
1345 | * red node or a non-leaf descendant "node" for which node.left.parent != node. This method is useful for | ||
1346 | * debugging. RedBlackNode subclasses may want to override assertSubtreeIsValid() to call assertOrderIsValid. | ||
1347 | */ | ||
1348 | public void assertSubtreeIsValid() { | ||
1349 | assertSubtreeIsValidRedBlack(); | ||
1350 | assertNodesAreValid(); | ||
1351 | } | ||
1352 | |||
1353 | /** | ||
1354 | * Throws a RuntimeException if the nodes in the subtree rooted at this node are not in the specified order or they | ||
1355 | * do not lie in the specified range. Assumes that the subtree rooted at this node is a valid binary tree, i.e. it | ||
1356 | * has no repeated nodes other than leaf nodes. | ||
1357 | * @param comparator A comparator indicating how the nodes should be ordered. | ||
1358 | * @param start The lower limit for nodes in the subtree, if any. | ||
1359 | * @param end The upper limit for nodes in the subtree, if any. | ||
1360 | */ | ||
1361 | private void assertOrderIsValid(Comparator<? super N> comparator, N start, N end) { | ||
1362 | if (!isLeaf()) { | ||
1363 | @SuppressWarnings("unchecked") | ||
1364 | N nThis = (N)this; | ||
1365 | if (start != null && comparator.compare(nThis, start) < 0) { | ||
1366 | throw new RuntimeException("The nodes are not ordered correctly"); | ||
1367 | } | ||
1368 | if (end != null && comparator.compare(nThis, end) > 0) { | ||
1369 | throw new RuntimeException("The nodes are not ordered correctly"); | ||
1370 | } | ||
1371 | RedBlackNode<N> leftNode = left; | ||
1372 | RedBlackNode<N> rightNode = right; | ||
1373 | leftNode.assertOrderIsValid(comparator, start, nThis); | ||
1374 | rightNode.assertOrderIsValid(comparator, nThis, end); | ||
1375 | } | ||
1376 | } | ||
1377 | |||
1378 | /** | ||
1379 | * Throws a RuntimeException if the nodes in the subtree rooted at this node are not in the specified order. | ||
1380 | * Assumes that this is a valid binary tree, i.e. there are no repeated nodes other than leaf nodes. This method is | ||
1381 | * useful for debugging. RedBlackNode subclasses may want to override assertSubtreeIsValid() to call | ||
1382 | * assertOrderIsValid. | ||
1383 | * @param comparator A comparator indicating how the nodes should be ordered. If this is null, we use the nodes' | ||
1384 | * natural order, as in N.compareTo. | ||
1385 | */ | ||
1386 | public void assertOrderIsValid(Comparator<? super N> comparator) { | ||
1387 | if (comparator == null) { | ||
1388 | comparator = naturalOrder(); | ||
1389 | } | ||
1390 | assertOrderIsValid(comparator, null, null); | ||
1391 | } | ||
1392 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/Reference.java b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/Reference.java new file mode 100644 index 00000000..a25c167d --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/Reference.java | |||
@@ -0,0 +1,51 @@ | |||
1 | /* | ||
2 | * The MIT License (MIT) | ||
3 | * | ||
4 | * Copyright (c) 2016 btrekkie | ||
5 | * | ||
6 | * Permission is hereby granted, free of charge, to any person obtaining a copy | ||
7 | * of this software and associated documentation files (the "Software"), to deal | ||
8 | * in the Software without restriction, including without limitation the rights | ||
9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
10 | * copies of the Software, and to permit persons to whom the Software is | ||
11 | * furnished to do so, subject to the following conditions: | ||
12 | * | ||
13 | * The above copyright notice and this permission notice shall be included in all | ||
14 | * copies or substantial portions of the Software. | ||
15 | * | ||
16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||
19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||
21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||
22 | * SOFTWARE. | ||
23 | */ | ||
24 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval; | ||
25 | |||
26 | /** | ||
27 | * Wraps a value using reference equality. In other words, two references are equal only if their values are the same | ||
28 | * object instance, as in ==. | ||
29 | * @param <T> The type of value. | ||
30 | */ | ||
31 | class Reference<T> { | ||
32 | /** The value this wraps. */ | ||
33 | private final T value; | ||
34 | |||
35 | public Reference(T value) { | ||
36 | this.value = value; | ||
37 | } | ||
38 | |||
39 | public boolean equals(Object obj) { | ||
40 | if (!(obj instanceof Reference)) { | ||
41 | return false; | ||
42 | } | ||
43 | Reference<?> reference = (Reference<?>)obj; | ||
44 | return value == reference.value; | ||
45 | } | ||
46 | |||
47 | @Override | ||
48 | public int hashCode() { | ||
49 | return System.identityHashCode(value); | ||
50 | } | ||
51 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/aggregators/IntervalAggregatorFactory.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/aggregators/IntervalAggregatorFactory.xtend new file mode 100644 index 00000000..dee31f67 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/aggregators/IntervalAggregatorFactory.xtend | |||
@@ -0,0 +1,50 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval.aggregators | ||
2 | |||
3 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval.Interval | ||
4 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval.IntervalAggregationMode | ||
5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval.IntervalAggregationOperator | ||
6 | import org.eclipse.viatra.query.runtime.matchers.psystem.aggregations.AggregatorType | ||
7 | import org.eclipse.viatra.query.runtime.matchers.psystem.aggregations.BoundAggregator | ||
8 | import org.eclipse.viatra.query.runtime.matchers.psystem.aggregations.IAggregatorFactory | ||
9 | import org.eclipse.xtend.lib.annotations.FinalFieldsConstructor | ||
10 | |||
11 | @AggregatorType(parameterTypes=#[Interval], returnTypes=#[Interval]) | ||
12 | abstract class IntervalAggregatorFactory implements IAggregatorFactory { | ||
13 | val IntervalAggregationMode mode | ||
14 | |||
15 | @FinalFieldsConstructor | ||
16 | protected new() { | ||
17 | } | ||
18 | |||
19 | override getAggregatorLogic(Class<?> domainClass) { | ||
20 | if (domainClass == Interval) { | ||
21 | new BoundAggregator(new IntervalAggregationOperator(mode), Interval, Interval) | ||
22 | } else { | ||
23 | throw new IllegalArgumentException("Unknown domain class: " + domainClass) | ||
24 | } | ||
25 | } | ||
26 | } | ||
27 | |||
28 | class intervalSum extends IntervalAggregatorFactory { | ||
29 | new() { | ||
30 | super(IntervalAggregationMode.SUM) | ||
31 | } | ||
32 | } | ||
33 | |||
34 | class intervalMin extends IntervalAggregatorFactory { | ||
35 | new() { | ||
36 | super(IntervalAggregationMode.MIN) | ||
37 | } | ||
38 | } | ||
39 | |||
40 | class intervalMax extends IntervalAggregatorFactory { | ||
41 | new() { | ||
42 | super(IntervalAggregationMode.MAX) | ||
43 | } | ||
44 | } | ||
45 | |||
46 | class intervalJoin extends IntervalAggregatorFactory { | ||
47 | new() { | ||
48 | super(IntervalAggregationMode.JOIN) | ||
49 | } | ||
50 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/aggregators/intervalHull.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/aggregators/intervalHull.xtend new file mode 100644 index 00000000..72605f57 --- /dev/null +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/interval/aggregators/intervalHull.xtend | |||
@@ -0,0 +1,74 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval.aggregators | ||
2 | |||
3 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval.Interval | ||
4 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.interval.IntervalHullAggregatorOperator | ||
5 | import java.math.BigDecimal | ||
6 | import java.math.BigInteger | ||
7 | import java.math.MathContext | ||
8 | import org.eclipse.viatra.query.runtime.matchers.psystem.aggregations.AggregatorType | ||
9 | import org.eclipse.viatra.query.runtime.matchers.psystem.aggregations.BoundAggregator | ||
10 | import org.eclipse.viatra.query.runtime.matchers.psystem.aggregations.IAggregatorFactory | ||
11 | |||
12 | @AggregatorType(parameterTypes=#[BigDecimal, BigInteger, Byte, Double, Float, Integer, Long, Short], returnTypes=#[ | ||
13 | Interval, Interval, Interval, Interval, Interval, Interval, Interval, Interval]) | ||
14 | class intervalHull implements IAggregatorFactory { | ||
15 | |||
16 | override getAggregatorLogic(Class<?> domainClass) { | ||
17 | new BoundAggregator(getAggregationOperator(domainClass), domainClass, Interval) | ||
18 | } | ||
19 | |||
20 | private def getAggregationOperator(Class<?> domainClass) { | ||
21 | switch (domainClass) { | ||
22 | case BigDecimal: | ||
23 | new IntervalHullAggregatorOperator<BigDecimal>() { | ||
24 | override protected toBigDecimal(BigDecimal value, MathContext mc) { | ||
25 | value.round(mc) | ||
26 | } | ||
27 | } | ||
28 | case BigInteger: | ||
29 | new IntervalHullAggregatorOperator<BigInteger>() { | ||
30 | override protected toBigDecimal(BigInteger value, MathContext mc) { | ||
31 | new BigDecimal(value, mc) | ||
32 | } | ||
33 | } | ||
34 | case Byte: | ||
35 | new IntervalHullAggregatorOperator<Byte>() { | ||
36 | override protected toBigDecimal(Byte value, MathContext mc) { | ||
37 | new BigDecimal(value, mc) | ||
38 | } | ||
39 | } | ||
40 | case Double: | ||
41 | new IntervalHullAggregatorOperator<Double>() { | ||
42 | override protected toBigDecimal(Double value, MathContext mc) { | ||
43 | new BigDecimal(value, mc) | ||
44 | } | ||
45 | } | ||
46 | case Float: | ||
47 | new IntervalHullAggregatorOperator<Float>() { | ||
48 | override protected toBigDecimal(Float value, MathContext mc) { | ||
49 | new BigDecimal(value, mc) | ||
50 | } | ||
51 | } | ||
52 | case Integer: | ||
53 | new IntervalHullAggregatorOperator<Integer>() { | ||
54 | override protected toBigDecimal(Integer value, MathContext mc) { | ||
55 | new BigDecimal(value, mc) | ||
56 | } | ||
57 | } | ||
58 | case Long: | ||
59 | new IntervalHullAggregatorOperator<Long>() { | ||
60 | override protected toBigDecimal(Long value, MathContext mc) { | ||
61 | new BigDecimal(value, mc) | ||
62 | } | ||
63 | } | ||
64 | case Short: | ||
65 | new IntervalHullAggregatorOperator<Short>() { | ||
66 | override protected toBigDecimal(Short value, MathContext mc) { | ||
67 | new BigDecimal(value, mc) | ||
68 | } | ||
69 | } | ||
70 | default: | ||
71 | throw new IllegalArgumentException("Unknown domain class: " + domainClass) | ||
72 | } | ||
73 | } | ||
74 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/GenericTypeIndexer.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/GenericTypeIndexer.xtend index d6a15c1a..0e0f1f02 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/GenericTypeIndexer.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/GenericTypeIndexer.xtend | |||
@@ -1,209 +1,150 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns | 1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns |
2 | 2 | ||
3 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem | ||
4 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type | 3 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type |
5 | import org.eclipse.emf.ecore.EClass | 4 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem |
6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality | 5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality |
7 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | ||
8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysisResult | 6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysisResult |
9 | 7 | ||
10 | class GenericTypeIndexer extends TypeIndexer { | 8 | class GenericTypeIndexer extends TypeIndexer { |
11 | val PatternGenerator base; | ||
12 | |||
13 | new(PatternGenerator base) { | 9 | new(PatternGenerator base) { |
14 | this.base = base | 10 | super(base) |
15 | } | 11 | } |
12 | |||
16 | override requiresTypeAnalysis() { false } | 13 | override requiresTypeAnalysis() { false } |
17 | 14 | ||
18 | public override getRequiredQueries() ''' | 15 | override getRequiredQueries() ''' |
19 | private pattern newELement(interpretation: PartialInterpretation, element: DefinedElement) { | 16 | «super.requiredQueries» |
20 | PartialInterpretation.newElements(interpretation,element); | 17 | |
21 | } | 18 | /** |
22 | 19 | * Direct supertypes of a type. | |
23 | private pattern typeInterpretation(problem:LogicProblem, interpetation:PartialInterpretation, type:TypeDeclaration, typeInterpretation:PartialComplexTypeInterpretation) { | 20 | */ |
24 | find interpretation(problem,interpetation); | 21 | private pattern supertypeDirect(subtype : Type, supertype : Type) { |
25 | LogicProblem.types(problem,type); | 22 | Type.supertypes(subtype, supertype); |
26 | PartialInterpretation.partialtypeinterpratation(interpetation,typeInterpretation); | 23 | } |
27 | PartialComplexTypeInterpretation.interpretationOf(typeInterpretation,type); | 24 | |
28 | } | 25 | /** |
29 | 26 | * All supertypes of a type. | |
30 | private pattern directInstanceOf(problem:LogicProblem, interpetation:PartialInterpretation, element:DefinedElement, type:Type) { | 27 | */ |
31 | find interpretation(problem,interpetation); | 28 | private pattern supertypeStar(subtype: Type, supertype: Type) { |
32 | find mustExist(problem,interpetation,element); | 29 | subtype == supertype; |
33 | LogicProblem.types(problem,type); | 30 | } or { |
34 | TypeDefinition.elements(type,element); | 31 | find supertypeDirect+(subtype,supertype); |
35 | } or { | 32 | } |
36 | find mustExist(problem,interpetation,element); | 33 | |
37 | find typeInterpretation(problem,interpetation,type,typeInterpretation); | 34 | /// Complex type reasoning patterns /// |
38 | PartialComplexTypeInterpretation.elements(typeInterpretation,element); | 35 | // |
39 | } | 36 | // In a valid type system, for each element e there is exactly one type T where |
40 | 37 | // 1: T(e) - but we dont know this for type declaration | |
41 | /** | 38 | // 2: For the dynamic type D and another type T, where D(e) && D-->T, T(e) is true. |
42 | * Direct supertypes of a type. | 39 | // 2e: A type hierarchy is invalid, if there is a supertype T for a dynamic type D which does no contains e: |
43 | */ | 40 | // D(e) && D-->T && !T(e) |
44 | private pattern supertypeDirect(subtype : Type, supertype : Type) { | 41 | // 3: There is no T' that T'->T and T'(e) |
45 | Type.supertypes(subtype, supertype); | 42 | // 3e: A type hierarcy is invalid, if there is a type T for a dynamic type D, which contains e, but not subtype of T: |
46 | } | 43 | // D(e) && ![T--->D] && T(e) |
47 | |||
48 | /** | ||
49 | * All supertypes of a type. | ||
50 | */ | ||
51 | private pattern supertypeStar(subtype: Type, supertype: Type) { | ||
52 | subtype == supertype; | ||
53 | } or { | ||
54 | find supertypeDirect+(subtype,supertype); | ||
55 | } | ||
56 | |||
57 | /// Complex type reasoning patterns /// | ||
58 | // | ||
59 | // In a valid type system, for each element e there is exactly one type T where | ||
60 | // 1: T(e) - but we dont know this for type declaration | ||
61 | // 2: For the dynamic type D and another type T, where D(e) && D-->T, T(e) is true. | ||
62 | // 2e: A type hierarchy is invalid, if there is a supertype T for a dynamic type D which does no contains e: | ||
63 | // D(e) && D-->T && !T(e) | ||
64 | // 3: There is no T' that T'->T and T'(e) | ||
65 | // 3e: A type hierarcy is invalid, if there is a type T for a dynamic type D, which contains e, but not subtype of T: | ||
66 | // D(e) && ![T--->D] && T(e) | ||
67 | // 4: T is not abstract | ||
68 | // Such type T is called Dynamic type of e, while other types are called static types. | ||
69 | // | ||
70 | // The following patterns checks the possible dynamic types for an element | ||
71 | |||
72 | private pattern wellformedType(problem: LogicProblem, interpretation:PartialInterpretation, dynamic:Type, element:DefinedElement) { | ||
73 | // 1: T(e) | ||
74 | find directInstanceOf(problem,interpretation,element,dynamic); | ||
75 | // 2e is not true: D(e) && D-->T && !T(e) | ||
76 | neg find dynamicTypeNotSubtypeOfADefinition(problem,interpretation,element,dynamic); | ||
77 | // 3e is not true: D(e) && ![T--->D] && T(e) | ||
78 | neg find dynamicTypeIsSubtypeOfANonDefinition(problem,interpretation,element,dynamic); | ||
79 | // 4: T is not abstract | ||
80 | Type.isAbstract(dynamic,false); | ||
81 | } | ||
82 | |||
83 | |||
84 | private pattern isPrimitive(element: PrimitiveElement) { | ||
85 | PrimitiveElement(element); | ||
86 | } | ||
87 | |||
88 | private pattern possibleDynamicType(problem: LogicProblem, interpretation:PartialInterpretation, dynamic:Type, element:DefinedElement) | ||
89 | // case 1: element is defined at least once | ||
90 | { | ||
91 | LogicProblem.types(problem,dynamic); | ||
92 | // select a random definition 'randomType' | ||
93 | find directInstanceOf(problem,interpretation,element,randomType); | ||
94 | // dynamic is a subtype of 'randomType' | ||
95 | find supertypeStar(dynamic,randomType); | ||
96 | // 2e is not true: D(e) && D-->T && !T(e) | ||
97 | neg find dynamicTypeNotSubtypeOfADefinition(problem,interpretation,element,dynamic); | ||
98 | // 3e is not true: D(e) && ![T--->D] && T(e) | ||
99 | neg find dynamicTypeIsSubtypeOfANonDefinition(problem,interpretation,element,dynamic); | ||
100 | // 4: T is not abstract | ||
101 | Type.isAbstract(dynamic,false); | ||
102 | // 5. element is not primitive datatype | ||
103 | neg find isPrimitive(element); | ||
104 | } or | ||
105 | // case 2: element is not defined anywhere | ||
106 | { | ||
107 | find mayExist(problem,interpretation,element); | ||
108 | // there is no definition | ||
109 | neg find directInstanceOf(problem,interpretation,element,_); | ||
110 | // 2e is not true: D(e) && D-->T && !T(e) | ||
111 | // because non of the definition contains element, the type cannot have defined supertype | ||
112 | LogicProblem.types(problem,dynamic); | ||
113 | PartialInterpretation.problem(interpretation,problem); | ||
114 | neg find typeWithDefinedSupertype(dynamic); | ||
115 | // 3e is not true: D(e) && ![T--->D] && T(e) | ||
116 | // because there is no definition, dynamic covers all definition | ||
117 | // 4: T is not abstract | 44 | // 4: T is not abstract |
118 | Type.isAbstract(dynamic,false); | 45 | // Such type T is called Dynamic type of e, while other types are called static types. |
119 | // 5. element is not primitive datatype | 46 | // |
120 | neg find isPrimitive(element); | 47 | // The following patterns checks the possible dynamic types for an element |
121 | } | 48 | |
122 | 49 | private pattern wellformedType(problem: LogicProblem, interpretation:PartialInterpretation, dynamic:Type, element:DefinedElement) { | |
123 | /** | 50 | // 1: T(e) |
124 | * supertype -------> element <------- otherSupertype | 51 | find directInstanceOf(problem,interpretation,element,dynamic); |
125 | * A A | 52 | // 2e is not true: D(e) && D-->T && !T(e) |
126 | * | | | 53 | neg find dynamicTypeNotSubtypeOfADefinition(problem,interpretation,element,dynamic); |
127 | * wrongDynamic -----------------------------X | 54 | // 3e is not true: D(e) && ![T--->D] && T(e) |
128 | */ | 55 | neg find dynamicTypeIsSubtypeOfANonDefinition(problem,interpretation,element,dynamic); |
129 | private pattern dynamicTypeNotSubtypeOfADefinition(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement, wrongDynamic : Type) { | 56 | // 4: T is not abstract |
130 | find directInstanceOf(problem,interpretation,element,supertype); | 57 | Type.isAbstract(dynamic,false); |
131 | find directInstanceOf(problem,interpretation,element,otherSupertype); | 58 | } |
132 | find supertypeStar(wrongDynamic,supertype); | 59 | |
133 | neg find supertypeStar(wrongDynamic,otherSupertype); | 60 | private pattern possibleDynamicType(problem: LogicProblem, interpretation:PartialInterpretation, dynamic:Type, element:DefinedElement) |
134 | } | 61 | // case 1: element is defined at least once |
135 | 62 | { | |
136 | /** | 63 | LogicProblem.types(problem,dynamic); |
137 | * supertype -------> element <---X--- otherSupertype | 64 | // select a random definition 'randomType' |
138 | * A A | 65 | find directInstanceOf(problem,interpretation,element,randomType); |
139 | * | | | 66 | // dynamic is a subtype of 'randomType' |
140 | * wrongDynamic -----------------------------+ | 67 | find supertypeStar(dynamic,randomType); |
141 | */ | 68 | // 2e is not true: D(e) && D-->T && !T(e) |
142 | private pattern dynamicTypeIsSubtypeOfANonDefinition(problem: LogicProblem, interpretation:PartialInterpretation, element:DefinedElement, wrongDynamic:Type) { | 69 | neg find dynamicTypeNotSubtypeOfADefinition(problem,interpretation,element,dynamic); |
143 | find directInstanceOf(problem,interpretation,element,supertype); | 70 | // 3e is not true: D(e) && ![T--->D] && T(e) |
144 | neg find elementInTypeDefinition(element,otherSupertype); | 71 | neg find dynamicTypeIsSubtypeOfANonDefinition(problem,interpretation,element,dynamic); |
145 | TypeDefinition(otherSupertype); | 72 | // 4: T is not abstract |
146 | find supertypeStar(wrongDynamic, supertype); | 73 | Type.isAbstract(dynamic,false); |
147 | find supertypeStar(wrongDynamic, otherSupertype); | 74 | // 5. element is not primitive datatype |
148 | } | 75 | neg find isPrimitive(element); |
149 | 76 | } or | |
150 | private pattern elementInTypeDefinition(element:DefinedElement, definition:TypeDefinition) { | 77 | // case 2: element is not defined anywhere |
151 | TypeDefinition.elements(definition,element); | 78 | { |
152 | } | 79 | find mayExist(problem,interpretation,element); |
153 | 80 | // there is no definition | |
154 | private pattern typeWithDefinedSupertype(type:Type) { | 81 | neg find directInstanceOf(problem,interpretation,element,_); |
155 | find supertypeStar(type,definedSupertype); | 82 | // 2e is not true: D(e) && D-->T && !T(e) |
156 | TypeDefinition(definedSupertype); | 83 | // because non of the definition contains element, the type cannot have defined supertype |
157 | } | 84 | LogicProblem.types(problem,dynamic); |
158 | 85 | PartialInterpretation.problem(interpretation,problem); | |
159 | private pattern scopeDisallowsNewElementsFromType(typeInterpretation:PartialComplexTypeInterpretation) { | 86 | neg find typeWithDefinedSupertype(dynamic); |
160 | Scope.targetTypeInterpretation(scope,typeInterpretation); | 87 | // 3e is not true: D(e) && ![T--->D] && T(e) |
161 | Scope.maxNewElements(scope,0); | 88 | // because there is no definition, dynamic covers all definition |
162 | } | 89 | // 4: T is not abstract |
163 | ''' | 90 | Type.isAbstract(dynamic,false); |
164 | 91 | // 5. element is not primitive datatype | |
165 | public override generateInstanceOfQueries(LogicProblem problem, PartialInterpretation emptySolution,TypeAnalysisResult typeAnalysisResult) { | 92 | neg find isPrimitive(element); |
166 | ''' | 93 | } |
167 | «FOR type:problem.types» | 94 | |
168 | «problem.generateMustInstenceOf(type)» | ||
169 | «problem.generateMayInstanceOf(type)» | ||
170 | «ENDFOR» | ||
171 | ''' | ||
172 | } | ||
173 | |||
174 | private def patternName(Type type, Modality modality) | ||
175 | '''«modality.toString.toLowerCase»InstanceOf«base.canonizeName(type.name)»''' | ||
176 | |||
177 | private def generateMustInstenceOf(LogicProblem problem, Type type) { | ||
178 | ''' | ||
179 | /** | 95 | /** |
180 | * An element must be an instance of type "«type.name»". | 96 | * supertype -------> element <------- otherSupertype |
97 | * A A | ||
98 | * | | | ||
99 | * wrongDynamic -----------------------------X | ||
181 | */ | 100 | */ |
182 | private pattern «patternName(type,Modality.MUST)»(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement) { | 101 | private pattern dynamicTypeNotSubtypeOfADefinition(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement, wrongDynamic : Type) { |
183 | Type.name(type,"«type.name»"); | 102 | find directInstanceOf(problem,interpretation,element,supertype); |
184 | find directInstanceOf(problem,interpretation,element,type); | 103 | find directInstanceOf(problem,interpretation,element,otherSupertype); |
104 | find supertypeStar(wrongDynamic,supertype); | ||
105 | neg find supertypeStar(wrongDynamic,otherSupertype); | ||
185 | } | 106 | } |
186 | ''' | 107 | |
187 | } | ||
188 | |||
189 | private def generateMayInstanceOf(LogicProblem problem, Type type) { | ||
190 | ''' | ||
191 | /** | 108 | /** |
192 | * An element may be an instance of type "«type.name»". | 109 | * supertype -------> element <---X--- otherSupertype |
110 | * A A | ||
111 | * | | | ||
112 | * wrongDynamic -----------------------------+ | ||
193 | */ | 113 | */ |
194 | private pattern «patternName(type,Modality.MAY)»(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement) { | 114 | private pattern dynamicTypeIsSubtypeOfANonDefinition(problem: LogicProblem, interpretation:PartialInterpretation, element:DefinedElement, wrongDynamic:Type) { |
195 | Type.name(type,"«type.name»"); | 115 | find directInstanceOf(problem,interpretation,element,supertype); |
196 | find possibleDynamicType(problem,interpretation,dynamic,element); | 116 | neg find elementInTypeDefinition(element,otherSupertype); |
197 | find supertypeStar(dynamic,type); | 117 | TypeDefinition(otherSupertype); |
198 | neg find scopeDisallowsNewElementsFromType(dynamic); | 118 | find supertypeStar(wrongDynamic, supertype); |
119 | find supertypeStar(wrongDynamic, otherSupertype); | ||
120 | } | ||
121 | |||
122 | private pattern elementInTypeDefinition(element:DefinedElement, definition:TypeDefinition) { | ||
123 | TypeDefinition.elements(definition,element); | ||
124 | } | ||
125 | |||
126 | private pattern typeWithDefinedSupertype(type:Type) { | ||
127 | find supertypeStar(type,definedSupertype); | ||
128 | TypeDefinition(definedSupertype); | ||
129 | } | ||
130 | |||
131 | private pattern scopeDisallowsNewElementsFromType(typeInterpretation:PartialComplexTypeInterpretation) { | ||
132 | Scope.targetTypeInterpretation(scope,typeInterpretation); | ||
133 | Scope.maxNewElements(scope,0); | ||
199 | } | 134 | } |
135 | ''' | ||
136 | |||
137 | protected override generateMayInstanceOf(LogicProblem problem, Type type, TypeAnalysisResult typeAnalysisResult) { | ||
138 | ''' | ||
139 | /** | ||
140 | * An element may be an instance of type "«type.name»". | ||
141 | */ | ||
142 | private pattern «patternName(type,Modality.MAY)»(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement) { | ||
143 | Type.name(type,"«type.name»"); | ||
144 | find possibleDynamicType(problem,interpretation,dynamic,element); | ||
145 | find supertypeStar(dynamic,type); | ||
146 | neg find scopeDisallowsNewElementsFromType(dynamic); | ||
147 | } | ||
200 | ''' | 148 | ''' |
201 | } | 149 | } |
202 | 150 | } | |
203 | public override referInstanceOf(Type type, Modality modality, String variableName) { | ||
204 | '''find «patternName(type,modality)»(problem,interpretation,«variableName»);''' | ||
205 | } | ||
206 | public override referInstanceOf(EClass type, Modality modality, String variableName) { | ||
207 | '''find «modality.toString.toLowerCase»InstanceOf«base.canonizeName('''«type.name» class''')»(problem,interpretation,«variableName»);''' | ||
208 | } | ||
209 | } \ No newline at end of file | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/GenericTypeRefinementGenerator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/GenericTypeRefinementGenerator.xtend index 2e03d6ed..52f0cbea 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/GenericTypeRefinementGenerator.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/GenericTypeRefinementGenerator.xtend | |||
@@ -11,110 +11,114 @@ import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.par | |||
11 | import java.util.HashMap | 11 | import java.util.HashMap |
12 | 12 | ||
13 | class GenericTypeRefinementGenerator extends TypeRefinementGenerator { | 13 | class GenericTypeRefinementGenerator extends TypeRefinementGenerator { |
14 | public new(PatternGenerator base) { | 14 | new(PatternGenerator base) { |
15 | super(base) | 15 | super(base) |
16 | } | 16 | } |
17 | |||
17 | override requiresTypeAnalysis() { false } | 18 | override requiresTypeAnalysis() { false } |
18 | 19 | ||
19 | override generateRefineObjectQueries(LogicProblem p, PartialInterpretation emptySolution, TypeAnalysisResult typeAnalysisResult) { | 20 | override generateRefineObjectQueries(LogicProblem p, PartialInterpretation emptySolution, |
21 | TypeAnalysisResult typeAnalysisResult) { | ||
20 | val containment = p.containmentHierarchies.head | 22 | val containment = p.containmentHierarchies.head |
21 | val newObjectTypes = p.types.filter(TypeDeclaration).filter[!isAbstract] | 23 | val newObjectTypes = p.types.filter(TypeDeclaration).filter[!isAbstract] |
22 | val inverseRelations = new HashMap | 24 | val inverseRelations = new HashMap |
23 | p.annotations.filter(InverseRelationAssertion).forEach[ | 25 | p.annotations.filter(InverseRelationAssertion).forEach [ |
24 | inverseRelations.put(it.inverseA,it.inverseB) | 26 | inverseRelations.put(it.inverseA, it.inverseB) |
25 | inverseRelations.put(it.inverseB,it.inverseA) | 27 | inverseRelations.put(it.inverseB, it.inverseA) |
26 | ] | 28 | ] |
27 | return ''' | 29 | return ''' |
28 | private pattern hasElementInContainment(problem:LogicProblem, interpretation:PartialInterpretation) | 30 | pattern «hasElementInContainmentName»(problem:LogicProblem, interpretation:PartialInterpretation) |
29 | «FOR type :containment.typesOrderedInHierarchy SEPARATOR "or"»{ | 31 | «FOR type : containment.typesOrderedInHierarchy SEPARATOR "or"»{ |
30 | find interpretation(problem,interpretation); | ||
31 | «base.typeIndexer.referInstanceOf(type,Modality.MUST,"root")» | ||
32 | find mustExist(problem, interpretation, root); | ||
33 | }«ENDFOR» | ||
34 | «FOR type:newObjectTypes» | ||
35 | «IF(containment.typesOrderedInHierarchy.contains(type))» | ||
36 | «FOR containmentRelation : containment.containmentRelations.filter[canBeContainedByRelation(it,type)]» | ||
37 | «IF inverseRelations.containsKey(containmentRelation)» | ||
38 | pattern «this.patternName(containmentRelation,inverseRelations.get(containmentRelation),type)»( | ||
39 | problem:LogicProblem, interpretation:PartialInterpretation, | ||
40 | relationInterpretation:PartialRelationInterpretation, inverseInterpretation:PartialRelationInterpretation ,typeInterpretation:PartialComplexTypeInterpretation, | ||
41 | container:DefinedElement) | ||
42 | { | ||
43 | find interpretation(problem,interpretation); | ||
44 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); | ||
45 | PartialComplexTypeInterpretation.interpretationOf.name(typeInterpretation,"«type.name»"); | ||
46 | PartialInterpretation.partialrelationinterpretation(interpretation,relationInterpretation); | ||
47 | PartialRelationInterpretation.interpretationOf.name(relationInterpretation,"«containmentRelation.name»"); | ||
48 | PartialInterpretation.partialrelationinterpretation(interpretation,inverseInterpretation); | ||
49 | PartialRelationInterpretation.interpretationOf.name(inverseInterpretation,"«inverseRelations.get(containmentRelation).name»"); | ||
50 | «base.typeIndexer.referInstanceOf((containmentRelation.parameters.get(0) as ComplexTypeReference).referred,Modality.MUST,"container")» | ||
51 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"newObject")» | ||
52 | «base.relationDeclarationIndexer.referRelation(containmentRelation as RelationDeclaration,"container","newObject",Modality.MAY)» | ||
53 | find mustExist(problem, interpretation, container); | ||
54 | neg find mustExist(problem, interpretation, newObject); | ||
55 | } | ||
56 | «ELSE» | ||
57 | pattern «this.patternName(containmentRelation,null,type)»( | ||
58 | problem:LogicProblem, interpretation:PartialInterpretation, | ||
59 | relationInterpretation:PartialRelationInterpretation, typeInterpretation:PartialComplexTypeInterpretation, | ||
60 | container:DefinedElement) | ||
61 | { | ||
62 | find interpretation(problem,interpretation); | ||
63 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); | ||
64 | PartialComplexTypeInterpretation.interpretationOf.name(typeInterpretation,"«type.name»"); | ||
65 | PartialInterpretation.partialrelationinterpretation(interpretation,relationInterpretation); | ||
66 | PartialRelationInterpretation.interpretationOf.name(relationInterpretation,"«containmentRelation.name»"); | ||
67 | «base.typeIndexer.referInstanceOf((containmentRelation.parameters.get(0) as ComplexTypeReference).referred,Modality.MUST,"container")» | ||
68 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"newObject")» | ||
69 | «base.relationDeclarationIndexer.referRelation(containmentRelation as RelationDeclaration,"container","newObject",Modality.MAY)» | ||
70 | find mustExist(problem, interpretation, container); | ||
71 | neg find mustExist(problem, interpretation, newObject); | ||
72 | } | ||
73 | «ENDIF» | ||
74 | «ENDFOR» | ||
75 | pattern «patternName(null,null,type)»( | ||
76 | problem:LogicProblem, interpretation:PartialInterpretation, | ||
77 | typeInterpretation:PartialComplexTypeInterpretation) | ||
78 | { | ||
79 | find interpretation(problem,interpretation); | 32 | find interpretation(problem,interpretation); |
80 | neg find hasElementInContainment(problem,interpretation); | 33 | «base.typeIndexer.referInstanceOf(type,Modality.MUST,"root")» |
81 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); | 34 | find mustExist(problem, interpretation, root); |
82 | PartialComplexTypeInterpretation.interpretationOf.name(type,"«type.name»"); | 35 | }«ENDFOR» |
83 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"newObject")» | 36 | «FOR type : newObjectTypes» |
84 | find mayExist(problem, interpretation, newObject); | 37 | «IF(containment.typesOrderedInHierarchy.contains(type))» |
85 | neg find mustExist(problem, interpretation, newObject); | 38 | «FOR containmentRelation : containment.containmentRelations.filter[canBeContainedByRelation(it,type)]» |
86 | } | 39 | «IF inverseRelations.containsKey(containmentRelation)» |
87 | «ELSE» | 40 | pattern «this.patternName(containmentRelation,inverseRelations.get(containmentRelation),type)»( |
88 | pattern createObject_«this.patternName(null,null,type)»( | 41 | problem:LogicProblem, interpretation:PartialInterpretation, |
89 | problem:LogicProblem, interpretation:PartialInterpretation, | 42 | relationInterpretation:PartialRelationInterpretation, inverseInterpretation:PartialRelationInterpretation ,typeInterpretation:PartialComplexTypeInterpretation, |
90 | typeInterpretation:PartialComplexTypeInterpretation) | 43 | container:DefinedElement) |
91 | { | 44 | { |
92 | find interpretation(problem,interpretation); | 45 | find interpretation(problem,interpretation); |
93 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); | 46 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); |
94 | PartialComplexTypeInterpretation.interpretationOf.name(typeInterpretation,"«type.name»"); | 47 | PartialComplexTypeInterpretation.interpretationOf.name(typeInterpretation,"«type.name»"); |
95 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"newObject")» | 48 | PartialInterpretation.partialrelationinterpretation(interpretation,relationInterpretation); |
96 | find mayExist(problem, interpretation, newObject); | 49 | PartialRelationInterpretation.interpretationOf.name(relationInterpretation,"«containmentRelation.name»"); |
97 | neg find mustExist(problem, interpretation, newObject); | 50 | PartialInterpretation.partialrelationinterpretation(interpretation,inverseInterpretation); |
98 | } | 51 | PartialRelationInterpretation.interpretationOf.name(inverseInterpretation,"«inverseRelations.get(containmentRelation).name»"); |
99 | «ENDIF» | 52 | «base.typeIndexer.referInstanceOf((containmentRelation.parameters.get(0) as ComplexTypeReference).referred,Modality.MUST,"container")» |
100 | «ENDFOR» | 53 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"newObject")» |
54 | «base.relationDeclarationIndexer.referRelation(containmentRelation as RelationDeclaration,"container","newObject",Modality.MAY)» | ||
55 | find mustExist(problem, interpretation, container); | ||
56 | neg find mustExist(problem, interpretation, newObject); | ||
57 | } | ||
58 | «ELSE» | ||
59 | pattern «this.patternName(containmentRelation,null,type)»( | ||
60 | problem:LogicProblem, interpretation:PartialInterpretation, | ||
61 | relationInterpretation:PartialRelationInterpretation, typeInterpretation:PartialComplexTypeInterpretation, | ||
62 | container:DefinedElement) | ||
63 | { | ||
64 | find interpretation(problem,interpretation); | ||
65 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); | ||
66 | PartialComplexTypeInterpretation.interpretationOf.name(typeInterpretation,"«type.name»"); | ||
67 | PartialInterpretation.partialrelationinterpretation(interpretation,relationInterpretation); | ||
68 | PartialRelationInterpretation.interpretationOf.name(relationInterpretation,"«containmentRelation.name»"); | ||
69 | «base.typeIndexer.referInstanceOf((containmentRelation.parameters.get(0) as ComplexTypeReference).referred,Modality.MUST,"container")» | ||
70 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"newObject")» | ||
71 | «base.relationDeclarationIndexer.referRelation(containmentRelation as RelationDeclaration,"container","newObject",Modality.MAY)» | ||
72 | find mustExist(problem, interpretation, container); | ||
73 | neg find mustExist(problem, interpretation, newObject); | ||
74 | } | ||
75 | «ENDIF» | ||
76 | «ENDFOR» | ||
77 | pattern «patternName(null,null,type)»( | ||
78 | problem:LogicProblem, interpretation:PartialInterpretation, | ||
79 | typeInterpretation:PartialComplexTypeInterpretation) | ||
80 | { | ||
81 | find interpretation(problem,interpretation); | ||
82 | neg find «hasElementInContainmentName»(problem,interpretation); | ||
83 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); | ||
84 | PartialComplexTypeInterpretation.interpretationOf.name(type,"«type.name»"); | ||
85 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"newObject")» | ||
86 | find mayExist(problem, interpretation, newObject); | ||
87 | neg find mustExist(problem, interpretation, newObject); | ||
88 | } | ||
89 | «ELSE» | ||
90 | pattern createObject_«this.patternName(null,null,type)»( | ||
91 | problem:LogicProblem, interpretation:PartialInterpretation, | ||
92 | typeInterpretation:PartialComplexTypeInterpretation) | ||
93 | { | ||
94 | find interpretation(problem,interpretation); | ||
95 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); | ||
96 | PartialComplexTypeInterpretation.interpretationOf.name(typeInterpretation,"«type.name»"); | ||
97 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"newObject")» | ||
98 | find mayExist(problem, interpretation, newObject); | ||
99 | neg find mustExist(problem, interpretation, newObject); | ||
100 | } | ||
101 | «ENDIF» | ||
102 | «ENDFOR» | ||
101 | ''' | 103 | ''' |
102 | } | 104 | } |
103 | 105 | ||
104 | override generateRefineTypeQueries(LogicProblem p, PartialInterpretation emptySolution, TypeAnalysisResult typeAnalysisResult) { | 106 | override generateRefineTypeQueries(LogicProblem p, PartialInterpretation emptySolution, |
107 | TypeAnalysisResult typeAnalysisResult) { | ||
105 | return ''' | 108 | return ''' |
106 | «FOR type : p.types.filter(TypeDeclaration).filter[!it.isAbstract]» | 109 | «FOR type : p.types.filter(TypeDeclaration).filter[!it.isAbstract]» |
107 | pattern refineTypeTo_«base.canonizeName(type.name)»(problem:LogicProblem, interpretation:PartialInterpretation, object: DefinedElement) { | 110 | pattern refineTypeTo_«base.canonizeName(type.name)»(problem:LogicProblem, interpretation:PartialInterpretation, object: DefinedElement) { |
108 | find interpretation(problem,interpretation); | 111 | find interpretation(problem,interpretation); |
109 | find mustExist(problem, interpretation, object); | 112 | find mustExist(problem, interpretation, object); |
110 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"object")» | 113 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"object")» |
111 | neg «base.typeIndexer.referInstanceOf(type,Modality.MUST,"object")» | 114 | neg «base.typeIndexer.referInstanceOf(type,Modality.MUST,"object")» |
112 | } | 115 | } |
113 | «ENDFOR» | 116 | «ENDFOR» |
114 | ''' | 117 | ''' |
115 | } | 118 | } |
116 | 119 | ||
117 | override getRefineTypeQueryNames(LogicProblem p, PartialInterpretation emptySolution, TypeAnalysisResult typeAnalysisResult) { | 120 | override getRefineTypeQueryNames(LogicProblem p, PartialInterpretation emptySolution, |
121 | TypeAnalysisResult typeAnalysisResult) { | ||
118 | p.types.filter(TypeDeclaration).toInvertedMap['''refineTypeTo_«base.canonizeName(it.name)»'''] | 122 | p.types.filter(TypeDeclaration).toInvertedMap['''refineTypeTo_«base.canonizeName(it.name)»'''] |
119 | } | 123 | } |
120 | } \ No newline at end of file | 124 | } |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/PatternGenerator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/PatternGenerator.xtend index d4c76bb4..5c35fb54 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/PatternGenerator.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/PatternGenerator.xtend | |||
@@ -1,7 +1,6 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns | 1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns |
2 | 2 | ||
3 | import hu.bme.mit.inf.dslreasoner.ecore2logic.ecore2logicannotations.InverseRelationAssertion | 3 | import hu.bme.mit.inf.dslreasoner.ecore2logic.ecore2logicannotations.InverseRelationAssertion |
4 | import hu.bme.mit.inf.dslreasoner.ecore2logic.ecore2logicannotations.LowerMultiplicityAssertion | ||
5 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.BoolTypeReference | 4 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.BoolTypeReference |
6 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.IntTypeReference | 5 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.IntTypeReference |
7 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RealTypeReference | 6 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RealTypeReference |
@@ -17,7 +16,11 @@ import hu.bme.mit.inf.dslreasoner.viatra2logic.viatra2logicannotations.Transform | |||
17 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality | 16 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality |
18 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysisResult | 17 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysisResult |
19 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeInferenceMethod | 18 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeInferenceMethod |
19 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.LinearTypeConstraintHint | ||
20 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.RelationConstraints | ||
21 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.ScopePropagatorStrategy | ||
20 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | 22 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation |
23 | import java.util.Collection | ||
21 | import java.util.HashMap | 24 | import java.util.HashMap |
22 | import java.util.Map | 25 | import java.util.Map |
23 | import org.eclipse.emf.ecore.EAttribute | 26 | import org.eclipse.emf.ecore.EAttribute |
@@ -28,20 +31,23 @@ import org.eclipse.xtend.lib.annotations.Accessors | |||
28 | import static extension hu.bme.mit.inf.dslreasoner.util.CollectionsUtil.* | 31 | import static extension hu.bme.mit.inf.dslreasoner.util.CollectionsUtil.* |
29 | 32 | ||
30 | class PatternGenerator { | 33 | class PatternGenerator { |
31 | @Accessors(PUBLIC_GETTER) val TypeIndexer typeIndexer //= new TypeIndexer(this) | 34 | @Accessors(PUBLIC_GETTER) val TypeIndexer typeIndexer // = new TypeIndexer(this) |
32 | @Accessors(PUBLIC_GETTER) val RelationDeclarationIndexer relationDeclarationIndexer = new RelationDeclarationIndexer(this) | 35 | @Accessors(PUBLIC_GETTER) val RelationDeclarationIndexer relationDeclarationIndexer = new RelationDeclarationIndexer( |
33 | @Accessors(PUBLIC_GETTER) val RelationDefinitionIndexer relationDefinitionIndexer = new RelationDefinitionIndexer(this) | 36 | this) |
37 | @Accessors(PUBLIC_GETTER) val RelationDefinitionIndexer relationDefinitionIndexer = new RelationDefinitionIndexer( | ||
38 | this) | ||
34 | @Accessors(PUBLIC_GETTER) val ContainmentIndexer containmentIndexer = new ContainmentIndexer(this) | 39 | @Accessors(PUBLIC_GETTER) val ContainmentIndexer containmentIndexer = new ContainmentIndexer(this) |
35 | @Accessors(PUBLIC_GETTER) val InvalidIndexer invalidIndexer = new InvalidIndexer(this) | 40 | @Accessors(PUBLIC_GETTER) val InvalidIndexer invalidIndexer = new InvalidIndexer(this) |
36 | @Accessors(PUBLIC_GETTER) val UnfinishedIndexer unfinishedIndexer = new UnfinishedIndexer(this) | 41 | @Accessors(PUBLIC_GETTER) val UnfinishedIndexer unfinishedIndexer |
37 | @Accessors(PUBLIC_GETTER) val TypeRefinementGenerator typeRefinementGenerator //= new RefinementGenerator(this) | 42 | @Accessors(PUBLIC_GETTER) val TypeRefinementGenerator typeRefinementGenerator // = new RefinementGenerator(this) |
38 | @Accessors(PUBLIC_GETTER) val RelationRefinementGenerator relationRefinementGenerator = new RelationRefinementGenerator(this) | 43 | @Accessors(PUBLIC_GETTER) val RelationRefinementGenerator relationRefinementGenerator = new RelationRefinementGenerator( |
39 | 44 | this) | |
40 | public new(TypeInferenceMethod typeInferenceMethod) { | 45 | |
41 | if(typeInferenceMethod == TypeInferenceMethod.Generic) { | 46 | new(TypeInferenceMethod typeInferenceMethod, ScopePropagatorStrategy scopePropagatorStrategy) { |
47 | if (typeInferenceMethod == TypeInferenceMethod.Generic) { | ||
42 | this.typeIndexer = new GenericTypeIndexer(this) | 48 | this.typeIndexer = new GenericTypeIndexer(this) |
43 | this.typeRefinementGenerator = new GenericTypeRefinementGenerator(this) | 49 | this.typeRefinementGenerator = new GenericTypeRefinementGenerator(this) |
44 | } else if(typeInferenceMethod == TypeInferenceMethod.PreliminaryAnalysis) { | 50 | } else if (typeInferenceMethod == TypeInferenceMethod.PreliminaryAnalysis) { |
45 | this.typeIndexer = new TypeIndexerWithPreliminaryTypeAnalysis(this) | 51 | this.typeIndexer = new TypeIndexerWithPreliminaryTypeAnalysis(this) |
46 | this.typeRefinementGenerator = new TypeRefinementWithPreliminaryTypeAnalysis(this) | 52 | this.typeRefinementGenerator = new TypeRefinementWithPreliminaryTypeAnalysis(this) |
47 | } else { | 53 | } else { |
@@ -49,112 +55,103 @@ class PatternGenerator { | |||
49 | this.typeRefinementGenerator = null | 55 | this.typeRefinementGenerator = null |
50 | throw new IllegalArgumentException('''Unknown type indexing technique : «typeInferenceMethod.name»''') | 56 | throw new IllegalArgumentException('''Unknown type indexing technique : «typeInferenceMethod.name»''') |
51 | } | 57 | } |
58 | this.unfinishedIndexer = new UnfinishedIndexer(this, scopePropagatorStrategy.requiresUpperBoundIndexing) | ||
52 | } | 59 | } |
53 | 60 | ||
54 | public def requiresTypeAnalysis() { | 61 | def requiresTypeAnalysis() { |
55 | typeIndexer.requiresTypeAnalysis || typeRefinementGenerator.requiresTypeAnalysis | 62 | typeIndexer.requiresTypeAnalysis || typeRefinementGenerator.requiresTypeAnalysis |
56 | } | 63 | } |
57 | 64 | ||
58 | public dispatch def referRelation( | 65 | dispatch def CharSequence referRelation(RelationDeclaration referred, String sourceVariable, String targetVariable, |
59 | RelationDeclaration referred, | 66 | Modality modality, Map<String, PQuery> fqn2PQuery) { |
60 | String sourceVariable, | 67 | return this.relationDeclarationIndexer.referRelation(referred, sourceVariable, targetVariable, modality) |
61 | String targetVariable, | ||
62 | Modality modality, | ||
63 | Map<String,PQuery> fqn2PQuery) | ||
64 | { | ||
65 | return this.relationDeclarationIndexer.referRelation(referred,sourceVariable,targetVariable,modality) | ||
66 | } | 68 | } |
67 | public dispatch def referRelation( | 69 | |
68 | RelationDefinition referred, | 70 | dispatch def CharSequence referRelation(RelationDefinition referred, String sourceVariable, String targetVariable, |
69 | String sourceVariable, | 71 | Modality modality, Map<String, PQuery> fqn2PQuery) { |
70 | String targetVariable, | 72 | val pattern = referred.annotations.filter(TransfomedViatraQuery).head.patternFullyQualifiedName.lookup( |
71 | Modality modality, | 73 | fqn2PQuery) |
72 | Map<String,PQuery> fqn2PQuery) | 74 | return this.relationDefinitionIndexer.referPattern(pattern, #[sourceVariable, targetVariable], modality, true, |
73 | { | 75 | false) |
74 | val pattern = referred.annotations.filter(TransfomedViatraQuery).head.patternFullyQualifiedName.lookup(fqn2PQuery) | ||
75 | return this.relationDefinitionIndexer.referPattern(pattern,#[sourceVariable,targetVariable],modality,true,false) | ||
76 | } | 76 | } |
77 | 77 | ||
78 | def public referRelationByName(EReference reference, | 78 | def referRelationByName(EReference reference, String sourceVariable, String targetVariable, Modality modality) { |
79 | String sourceVariable, | 79 | '''find «modality.name.toLowerCase»InRelation«canonizeName('''«reference.name» reference «reference.EContainingClass.name»''')»(problem,interpretation,«sourceVariable»,«targetVariable»);''' |
80 | String targetVariable, | ||
81 | Modality modality) | ||
82 | { | ||
83 | '''find «modality.name.toLowerCase»InRelation«canonizeName('''«reference.name» reference «reference.EContainingClass.name»''') | ||
84 | »(problem,interpretation,«sourceVariable»,«targetVariable»);''' | ||
85 | } | 80 | } |
86 | 81 | ||
87 | def public CharSequence referAttributeByName(EAttribute attribute, | 82 | def CharSequence referAttributeByName(EAttribute attribute, String sourceVariable, String targetVariable, |
88 | String sourceVariable, | 83 | Modality modality) { |
89 | String targetVariable, | 84 | '''find «modality.name.toLowerCase»InRelation«canonizeName('''«attribute.name» attribute «attribute.EContainingClass.name»''')»(problem,interpretation,«sourceVariable»,«targetVariable»);''' |
90 | Modality modality) | ||
91 | { | ||
92 | '''find «modality.name.toLowerCase»InRelation«canonizeName('''«attribute.name» attribute «attribute.EContainingClass.name»''') | ||
93 | »(problem,interpretation,«sourceVariable»,«targetVariable»);''' | ||
94 | } | 85 | } |
95 | 86 | ||
96 | public def canonizeName(String name) { | 87 | def canonizeName(String name) { |
97 | name.split(' ').join('_') | 88 | name.split(' ').join('_') |
98 | } | 89 | } |
99 | 90 | ||
100 | public def lowerMultiplicities(LogicProblem problem) { | 91 | def wfQueries(LogicProblem problem) { |
101 | problem.assertions.map[annotations].flatten.filter(LowerMultiplicityAssertion).filter[!it.relation.isDerived] | 92 | problem.assertions.map[it.annotations].flatten.filter(TransformedViatraWellformednessConstraint).map[it.query] |
102 | } | ||
103 | public def wfQueries(LogicProblem problem) { | ||
104 | problem.assertions.map[it.annotations] | ||
105 | .flatten | ||
106 | .filter(TransformedViatraWellformednessConstraint) | ||
107 | .map[it.query] | ||
108 | } | 93 | } |
109 | public def getContainments(LogicProblem p) { | 94 | |
95 | def getContainments(LogicProblem p) { | ||
110 | return p.containmentHierarchies.head.containmentRelations | 96 | return p.containmentHierarchies.head.containmentRelations |
111 | } | 97 | } |
112 | public def getInverseRelations(LogicProblem p) { | 98 | |
99 | def getInverseRelations(LogicProblem p) { | ||
113 | val inverseRelations = new HashMap | 100 | val inverseRelations = new HashMap |
114 | p.annotations.filter(InverseRelationAssertion).forEach[ | 101 | p.annotations.filter(InverseRelationAssertion).forEach [ |
115 | inverseRelations.put(it.inverseA,it.inverseB) | 102 | inverseRelations.put(it.inverseA, it.inverseB) |
116 | inverseRelations.put(it.inverseB,it.inverseA) | 103 | inverseRelations.put(it.inverseB, it.inverseA) |
117 | ] | 104 | ] |
118 | return inverseRelations | 105 | return inverseRelations |
119 | } | 106 | } |
120 | public def isRepresentative(Relation relation, Relation inverse) { | 107 | |
121 | if(inverse == null) { | 108 | def isRepresentative(Relation relation, Relation inverse) { |
109 | if (relation === null) { | ||
110 | return false | ||
111 | } else if (inverse === null) { | ||
122 | return true | 112 | return true |
123 | } else { | 113 | } else { |
124 | relation.name.compareTo(inverse.name)<1 | 114 | relation.name.compareTo(inverse.name) < 1 |
125 | } | 115 | } |
126 | } | 116 | } |
127 | 117 | ||
128 | public def isDerived(Relation relation) { | 118 | def isDerived(Relation relation) { |
129 | relation.annotations.exists[it instanceof DefinedByDerivedFeature] | 119 | relation.annotations.exists[it instanceof DefinedByDerivedFeature] |
130 | } | 120 | } |
131 | public def getDerivedDefinition(RelationDeclaration relation) { | 121 | |
122 | def getDerivedDefinition(RelationDeclaration relation) { | ||
132 | relation.annotations.filter(DefinedByDerivedFeature).head.query | 123 | relation.annotations.filter(DefinedByDerivedFeature).head.query |
133 | } | 124 | } |
134 | 125 | ||
135 | private def allTypeReferences(LogicProblem problem) { | 126 | private def allTypeReferences(LogicProblem problem) { |
136 | problem.eAllContents.filter(TypeReference).toIterable | 127 | problem.eAllContents.filter(TypeReference).toIterable |
137 | } | 128 | } |
129 | |||
138 | protected def hasBoolean(LogicProblem problem) { | 130 | protected def hasBoolean(LogicProblem problem) { |
139 | problem.allTypeReferences.exists[it instanceof BoolTypeReference] | 131 | problem.allTypeReferences.exists[it instanceof BoolTypeReference] |
140 | } | 132 | } |
133 | |||
141 | protected def hasInteger(LogicProblem problem) { | 134 | protected def hasInteger(LogicProblem problem) { |
142 | problem.allTypeReferences.exists[it instanceof IntTypeReference] | 135 | problem.allTypeReferences.exists[it instanceof IntTypeReference] |
143 | } | 136 | } |
137 | |||
144 | protected def hasReal(LogicProblem problem) { | 138 | protected def hasReal(LogicProblem problem) { |
145 | problem.allTypeReferences.exists[it instanceof RealTypeReference] | 139 | problem.allTypeReferences.exists[it instanceof RealTypeReference] |
146 | } | 140 | } |
141 | |||
147 | protected def hasString(LogicProblem problem) { | 142 | protected def hasString(LogicProblem problem) { |
148 | problem.allTypeReferences.exists[it instanceof StringTypeReference] | 143 | problem.allTypeReferences.exists[it instanceof StringTypeReference] |
149 | } | 144 | } |
150 | 145 | ||
151 | public def transformBaseProperties( | 146 | def transformBaseProperties( |
152 | LogicProblem problem, | 147 | LogicProblem problem, |
153 | PartialInterpretation emptySolution, | 148 | PartialInterpretation emptySolution, |
154 | Map<String,PQuery> fqn2PQuery, | 149 | Map<String, PQuery> fqn2PQuery, |
155 | TypeAnalysisResult typeAnalysisResult | 150 | TypeAnalysisResult typeAnalysisResult, |
151 | RelationConstraints constraints, | ||
152 | Collection<LinearTypeConstraintHint> hints | ||
156 | ) { | 153 | ) { |
157 | 154 | ||
158 | return ''' | 155 | return ''' |
159 | import epackage "http://www.bme.hu/mit/inf/dslreasoner/viatrasolver/partialinterpretationlanguage" | 156 | import epackage "http://www.bme.hu/mit/inf/dslreasoner/viatrasolver/partialinterpretationlanguage" |
160 | import epackage "http://www.bme.hu/mit/inf/dslreasoner/logic/model/problem" | 157 | import epackage "http://www.bme.hu/mit/inf/dslreasoner/logic/model/problem" |
@@ -188,7 +185,7 @@ class PatternGenerator { | |||
188 | 185 | ||
189 | private pattern elementCloseWorld(element:DefinedElement) { | 186 | private pattern elementCloseWorld(element:DefinedElement) { |
190 | PartialInterpretation.openWorldElements(i,element); | 187 | PartialInterpretation.openWorldElements(i,element); |
191 | PartialInterpretation.maxNewElements(i,0); | 188 | PartialInterpretation.maxNewElements(i,0); |
192 | } or { | 189 | } or { |
193 | Scope.targetTypeInterpretation(scope,interpretation); | 190 | Scope.targetTypeInterpretation(scope,interpretation); |
194 | PartialTypeInterpratation.elements(interpretation,element); | 191 | PartialTypeInterpratation.elements(interpretation,element); |
@@ -221,7 +218,7 @@ class PatternGenerator { | |||
221 | ////////// | 218 | ////////// |
222 | // 1.1.1 primitive Type Indexers | 219 | // 1.1.1 primitive Type Indexers |
223 | ////////// | 220 | ////////// |
224 | ««« pattern instanceofBoolean(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement) { | 221 | ««« pattern instanceofBoolean(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement) { |
225 | ««« find interpretation(problem,interpretation); | 222 | ««« find interpretation(problem,interpretation); |
226 | ««« PartialInterpretation.booleanelements(interpretation,element); | 223 | ««« PartialInterpretation.booleanelements(interpretation,element); |
227 | ««« } | 224 | ««« } |
@@ -279,7 +276,7 @@ class PatternGenerator { | |||
279 | ////////// | 276 | ////////// |
280 | // 3.1 Unfinishedness Measured by Multiplicity | 277 | // 3.1 Unfinishedness Measured by Multiplicity |
281 | ////////// | 278 | ////////// |
282 | «unfinishedIndexer.generateUnfinishedMultiplicityQueries(problem,fqn2PQuery)» | 279 | «unfinishedIndexer.generateUnfinishedMultiplicityQueries(constraints.multiplicityConstraints,fqn2PQuery)» |
283 | 280 | ||
284 | ////////// | 281 | ////////// |
285 | // 3.2 Unfinishedness Measured by WF Queries | 282 | // 3.2 Unfinishedness Measured by WF Queries |
@@ -302,6 +299,13 @@ class PatternGenerator { | |||
302 | // 4.3 Relation refinement | 299 | // 4.3 Relation refinement |
303 | ////////// | 300 | ////////// |
304 | «relationRefinementGenerator.generateRefineReference(problem)» | 301 | «relationRefinementGenerator.generateRefineReference(problem)» |
305 | ''' | 302 | |
303 | ////////// | ||
304 | // 5 Hints | ||
305 | ////////// | ||
306 | «FOR hint : hints» | ||
307 | «hint.getAdditionalPatterns(this)» | ||
308 | «ENDFOR» | ||
309 | ''' | ||
306 | } | 310 | } |
307 | } | 311 | } |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/PatternProvider.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/PatternProvider.xtend index 0e13a5e1..f5c85524 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/PatternProvider.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/PatternProvider.xtend | |||
@@ -2,16 +2,23 @@ package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns | |||
2 | 2 | ||
3 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Relation | 3 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Relation |
4 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RelationDeclaration | 4 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RelationDeclaration |
5 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RelationDefinition | ||
5 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type | 6 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type |
6 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem | 7 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem |
8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality | ||
7 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.ModelGenerationStatistics | 9 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.ModelGenerationStatistics |
8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysis | 10 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysis |
9 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysisResult | 11 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysisResult |
10 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeInferenceMethod | 12 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeInferenceMethod |
13 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.RelationConstraints | ||
14 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.RelationMultiplicityConstraint | ||
15 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.ScopePropagatorStrategy | ||
11 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.util.ParseUtil | 16 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.util.ParseUtil |
12 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | 17 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation |
13 | import hu.bme.mit.inf.dslreasoner.workspace.ReasonerWorkspace | 18 | import hu.bme.mit.inf.dslreasoner.workspace.ReasonerWorkspace |
19 | import java.util.Collection | ||
14 | import java.util.Map | 20 | import java.util.Map |
21 | import java.util.Set | ||
15 | import org.eclipse.viatra.query.runtime.api.IPatternMatch | 22 | import org.eclipse.viatra.query.runtime.api.IPatternMatch |
16 | import org.eclipse.viatra.query.runtime.api.IQuerySpecification | 23 | import org.eclipse.viatra.query.runtime.api.IQuerySpecification |
17 | import org.eclipse.viatra.query.runtime.api.ViatraQueryMatcher | 24 | import org.eclipse.viatra.query.runtime.api.ViatraQueryMatcher |
@@ -19,79 +26,117 @@ import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PQuery | |||
19 | import org.eclipse.xtend.lib.annotations.Data | 26 | import org.eclipse.xtend.lib.annotations.Data |
20 | 27 | ||
21 | import static extension hu.bme.mit.inf.dslreasoner.util.CollectionsUtil.* | 28 | import static extension hu.bme.mit.inf.dslreasoner.util.CollectionsUtil.* |
22 | import java.util.Collection | 29 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.LinearTypeConstraintHint |
23 | import java.util.Set | ||
24 | 30 | ||
25 | @Data class GeneratedPatterns { | 31 | @Data |
26 | public Map<Relation, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> invalidWFQueries | 32 | class GeneratedPatterns { |
27 | public Map<Relation, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> unfinishedWFQueries | 33 | public Map<Relation, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> invalidWFQueries |
28 | public Map<Relation, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> unfinishedMulticiplicityQueries | 34 | public Map<Relation, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> unfinishedWFQueries |
29 | public Map<ObjectCreationPrecondition, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> refineObjectQueries | 35 | public Map<RelationMultiplicityConstraint, UnifinishedMultiplicityQueries> multiplicityConstraintQueries |
30 | public Map<? extends Type, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> refineTypeQueries | 36 | public IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> hasElementInContainmentQuery |
31 | public Map<Pair<RelationDeclaration, Relation>, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> refinerelationQueries | 37 | public Map<Relation, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> unfinishedMulticiplicityQueries |
38 | public Map<ObjectCreationPrecondition, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> refineObjectQueries | ||
39 | public Map<? extends Type, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> refineTypeQueries | ||
40 | public Map<Pair<RelationDeclaration, Relation>, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> refinerelationQueries | ||
41 | public Map<RelationDefinition, ModalPatternQueries> modalRelationQueries | ||
32 | public Collection<IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> allQueries | 42 | public Collection<IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> allQueries |
33 | } | 43 | } |
34 | 44 | ||
45 | @Data | ||
46 | class ModalPatternQueries { | ||
47 | val IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> mayQuery | ||
48 | val IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> mustQuery | ||
49 | val IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> currentQuery | ||
50 | } | ||
51 | |||
52 | @Data | ||
53 | class UnifinishedMultiplicityQueries { | ||
54 | val IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> unfinishedMultiplicityQuery | ||
55 | val IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> unrepairableMultiplicityQuery | ||
56 | val IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> remainingInverseMultiplicityQuery | ||
57 | val IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>> remainingContentsQuery | ||
58 | } | ||
59 | |||
35 | class PatternProvider { | 60 | class PatternProvider { |
36 | 61 | ||
37 | val TypeAnalysis typeAnalysis = new TypeAnalysis | 62 | val TypeAnalysis typeAnalysis = new TypeAnalysis |
38 | 63 | ||
39 | public def generateQueries( | 64 | def generateQueries(LogicProblem problem, PartialInterpretation emptySolution, ModelGenerationStatistics statistics, |
40 | LogicProblem problem, | 65 | Set<PQuery> existingQueries, ReasonerWorkspace workspace, TypeInferenceMethod typeInferenceMethod, |
41 | PartialInterpretation emptySolution, | 66 | ScopePropagatorStrategy scopePropagatorStrategy, RelationConstraints relationConstraints, |
42 | ModelGenerationStatistics statistics, | 67 | Collection<LinearTypeConstraintHint> hints, boolean writeToFile) { |
43 | Set<PQuery> existingQueries, | ||
44 | ReasonerWorkspace workspace, | ||
45 | TypeInferenceMethod typeInferenceMethod, | ||
46 | boolean writeToFile) | ||
47 | { | ||
48 | val fqn2Query = existingQueries.toMap[it.fullyQualifiedName] | 68 | val fqn2Query = existingQueries.toMap[it.fullyQualifiedName] |
49 | val PatternGenerator patternGenerator = new PatternGenerator(typeInferenceMethod) | 69 | val PatternGenerator patternGenerator = new PatternGenerator(typeInferenceMethod, scopePropagatorStrategy) |
50 | val typeAnalysisResult = if(patternGenerator.requiresTypeAnalysis) { | 70 | val typeAnalysisResult = if (patternGenerator.requiresTypeAnalysis) { |
51 | val startTime = System.nanoTime | 71 | val startTime = System.nanoTime |
52 | val result = typeAnalysis.performTypeAnalysis(problem,emptySolution) | 72 | val result = typeAnalysis.performTypeAnalysis(problem, emptySolution) |
53 | val typeAnalysisTime = System.nanoTime - startTime | 73 | val typeAnalysisTime = System.nanoTime - startTime |
54 | statistics.PreliminaryTypeAnalisisTime = typeAnalysisTime | 74 | statistics.preliminaryTypeAnalisisTime = typeAnalysisTime |
55 | result | 75 | result |
56 | } else { | 76 | } else { |
57 | null | 77 | null |
58 | } | 78 | } |
59 | val baseIndexerFile = patternGenerator.transformBaseProperties(problem,emptySolution,fqn2Query,typeAnalysisResult) | 79 | val baseIndexerFile = patternGenerator.transformBaseProperties(problem, emptySolution, fqn2Query, |
60 | if(writeToFile) { | 80 | typeAnalysisResult, relationConstraints, hints) |
61 | workspace.writeText('''generated3valued.vql_deactivated''',baseIndexerFile) | 81 | if (writeToFile) { |
82 | workspace.writeText('''generated3valued.vql_deactivated''', baseIndexerFile) | ||
62 | } | 83 | } |
63 | val ParseUtil parseUtil = new ParseUtil | 84 | val ParseUtil parseUtil = new ParseUtil |
64 | val generatedQueries = parseUtil.parse(baseIndexerFile) | 85 | val generatedQueries = parseUtil.parse(baseIndexerFile) |
65 | val runtimeQueries = calclulateRuntimeQueries(patternGenerator,problem,emptySolution,typeAnalysisResult,generatedQueries); | 86 | val runtimeQueries = calclulateRuntimeQueries(patternGenerator, problem, emptySolution, typeAnalysisResult, |
87 | relationConstraints, generatedQueries) | ||
66 | return runtimeQueries | 88 | return runtimeQueries |
67 | } | 89 | } |
68 | 90 | ||
69 | private def GeneratedPatterns calclulateRuntimeQueries( | 91 | private def GeneratedPatterns calclulateRuntimeQueries( |
70 | PatternGenerator patternGenerator, | 92 | PatternGenerator patternGenerator, |
71 | LogicProblem problem, | 93 | LogicProblem problem, |
72 | PartialInterpretation emptySolution, | 94 | PartialInterpretation emptySolution, |
73 | TypeAnalysisResult typeAnalysisResult, | 95 | TypeAnalysisResult typeAnalysisResult, |
74 | Map<String, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> queries | 96 | RelationConstraints relationConstraints, |
97 | Map<String, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> queries | ||
75 | ) { | 98 | ) { |
76 | val Map<Relation, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> | 99 | val invalidWFQueries = patternGenerator.invalidIndexer.getInvalidateByWfQueryNames(problem).mapValues [ |
77 | invalidWFQueries = patternGenerator.invalidIndexer.getInvalidateByWfQueryNames(problem).mapValues[it.lookup(queries)] | 100 | it.lookup(queries) |
78 | val Map<Relation, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> | 101 | ] |
79 | unfinishedWFQueries = patternGenerator.unfinishedIndexer.getUnfinishedWFQueryNames(problem).mapValues[it.lookup(queries)] | 102 | val unfinishedWFQueries = patternGenerator.unfinishedIndexer.getUnfinishedWFQueryNames(problem).mapValues [ |
80 | val Map<Relation, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> | 103 | it.lookup(queries) |
81 | unfinishedMultiplicityQueries = patternGenerator.unfinishedIndexer.getUnfinishedMultiplicityQueries(problem).mapValues[it.lookup(queries)] | 104 | ] |
82 | val Map<ObjectCreationPrecondition, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> | 105 | val multiplicityConstraintQueries = patternGenerator.unfinishedIndexer.getUnfinishedMultiplicityQueries( |
83 | refineObjectsQueries = patternGenerator.typeRefinementGenerator.getRefineObjectQueryNames(problem,emptySolution,typeAnalysisResult).mapValues[it.lookup(queries)] | 106 | relationConstraints.multiplicityConstraints).mapValues [ |
84 | val Map<? extends Type, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> | 107 | new UnifinishedMultiplicityQueries(unfinishedMultiplicityQueryName?.lookup(queries), |
85 | refineTypeQueries = patternGenerator.typeRefinementGenerator.getRefineTypeQueryNames(problem,emptySolution,typeAnalysisResult).mapValues[it.lookup(queries)] | 108 | unrepairableMultiplicityQueryName?.lookup(queries), |
86 | val Map<Pair<RelationDeclaration, Relation>, IQuerySpecification<? extends ViatraQueryMatcher<? extends IPatternMatch>>> | 109 | remainingInverseMultiplicityQueryName?.lookup(queries), remainingContentsQueryName?.lookup(queries)) |
87 | refineRelationQueries = patternGenerator.relationRefinementGenerator.getRefineRelationQueries(problem).mapValues[it.lookup(queries)] | 110 | ] |
111 | val hasElementInContainmentQuery = patternGenerator.typeRefinementGenerator.hasElementInContainmentName.lookup( | ||
112 | queries) | ||
113 | val unfinishedMultiplicityQueries = multiplicityConstraintQueries.entrySet.filter [ | ||
114 | value.unfinishedMultiplicityQuery !== null | ||
115 | ].toMap([key.relation], [value.unfinishedMultiplicityQuery]) | ||
116 | val refineObjectsQueries = patternGenerator.typeRefinementGenerator. | ||
117 | getRefineObjectQueryNames(problem, emptySolution, typeAnalysisResult).mapValues[it.lookup(queries)] | ||
118 | val refineTypeQueries = patternGenerator.typeRefinementGenerator.getRefineTypeQueryNames(problem, emptySolution, | ||
119 | typeAnalysisResult).mapValues[it.lookup(queries)] | ||
120 | val refineRelationQueries = patternGenerator.relationRefinementGenerator.getRefineRelationQueries(problem). | ||
121 | mapValues[it.lookup(queries)] | ||
122 | val modalRelationQueries = problem.relations.filter(RelationDefinition).toMap([it], [ relationDefinition | | ||
123 | val indexer = patternGenerator.relationDefinitionIndexer | ||
124 | new ModalPatternQueries( | ||
125 | indexer.relationDefinitionName(relationDefinition, Modality.MAY).lookup(queries), | ||
126 | indexer.relationDefinitionName(relationDefinition, Modality.MUST).lookup(queries), | ||
127 | indexer.relationDefinitionName(relationDefinition, Modality.CURRENT).lookup(queries) | ||
128 | ) | ||
129 | ]) | ||
88 | return new GeneratedPatterns( | 130 | return new GeneratedPatterns( |
89 | invalidWFQueries, | 131 | invalidWFQueries, |
90 | unfinishedWFQueries, | 132 | unfinishedWFQueries, |
133 | multiplicityConstraintQueries, | ||
134 | hasElementInContainmentQuery, | ||
91 | unfinishedMultiplicityQueries, | 135 | unfinishedMultiplicityQueries, |
92 | refineObjectsQueries, | 136 | refineObjectsQueries, |
93 | refineTypeQueries, | 137 | refineTypeQueries, |
94 | refineRelationQueries, | 138 | refineRelationQueries, |
139 | modalRelationQueries, | ||
95 | queries.values | 140 | queries.values |
96 | ) | 141 | ) |
97 | } | 142 | } |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/RelationDefinitionIndexer.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/RelationDefinitionIndexer.xtend index 9723373f..0ae28b66 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/RelationDefinitionIndexer.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/RelationDefinitionIndexer.xtend | |||
@@ -5,6 +5,7 @@ import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem | |||
5 | import hu.bme.mit.inf.dslreasoner.viatra2logic.viatra2logicannotations.TransfomedViatraQuery | 5 | import hu.bme.mit.inf.dslreasoner.viatra2logic.viatra2logicannotations.TransfomedViatraQuery |
6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality | 6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality |
7 | import java.util.Map | 7 | import java.util.Map |
8 | import org.eclipse.emf.common.util.Enumerator | ||
8 | import org.eclipse.emf.ecore.EAttribute | 9 | import org.eclipse.emf.ecore.EAttribute |
9 | import org.eclipse.emf.ecore.EEnumLiteral | 10 | import org.eclipse.emf.ecore.EEnumLiteral |
10 | import org.eclipse.emf.ecore.EReference | 11 | import org.eclipse.emf.ecore.EReference |
@@ -17,6 +18,7 @@ import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.Equality | |||
17 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.ExportedParameter | 18 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.ExportedParameter |
18 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.Inequality | 19 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.Inequality |
19 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.NegativePatternCall | 20 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.NegativePatternCall |
21 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.TypeFilterConstraint | ||
20 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.BinaryTransitiveClosure | 22 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.BinaryTransitiveClosure |
21 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.ConstantValue | 23 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.ConstantValue |
22 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.PositivePatternCall | 24 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.PositivePatternCall |
@@ -24,7 +26,6 @@ import org.eclipse.viatra.query.runtime.matchers.psystem.basicenumerables.TypeCo | |||
24 | import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PQuery | 26 | import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PQuery |
25 | 27 | ||
26 | import static extension hu.bme.mit.inf.dslreasoner.util.CollectionsUtil.* | 28 | import static extension hu.bme.mit.inf.dslreasoner.util.CollectionsUtil.* |
27 | import org.eclipse.viatra.query.runtime.matchers.psystem.basicdeferred.TypeFilterConstraint | ||
28 | 29 | ||
29 | class RelationDefinitionIndexer { | 30 | class RelationDefinitionIndexer { |
30 | val PatternGenerator base; | 31 | val PatternGenerator base; |
@@ -33,7 +34,7 @@ class RelationDefinitionIndexer { | |||
33 | this.base = base | 34 | this.base = base |
34 | } | 35 | } |
35 | 36 | ||
36 | public def generateRelationDefinitions( | 37 | def generateRelationDefinitions( |
37 | LogicProblem problem, | 38 | LogicProblem problem, |
38 | Iterable<RelationDefinition> relations, | 39 | Iterable<RelationDefinition> relations, |
39 | Map<String,PQuery> fqn2PQuery) { | 40 | Map<String,PQuery> fqn2PQuery) { |
@@ -71,7 +72,7 @@ class RelationDefinitionIndexer { | |||
71 | ] | 72 | ] |
72 | } | 73 | } |
73 | 74 | ||
74 | private def relationDefinitionName(RelationDefinition relation, Modality modality) | 75 | def String relationDefinitionName(RelationDefinition relation, Modality modality) |
75 | '''«modality.name.toLowerCase»InRelation_«base.canonizeName(relation.name)»''' | 76 | '''«modality.name.toLowerCase»InRelation_«base.canonizeName(relation.name)»''' |
76 | 77 | ||
77 | private def canonizeName(PVariable v) { | 78 | private def canonizeName(PVariable v) { |
@@ -109,7 +110,7 @@ class RelationDefinitionIndexer { | |||
109 | else return Modality::MUST | 110 | else return Modality::MUST |
110 | } | 111 | } |
111 | 112 | ||
112 | def public referPattern(PQuery p, String[] variables, Modality modality, boolean positive, boolean transitive) ''' | 113 | def referPattern(PQuery p, String[] variables, Modality modality, boolean positive, boolean transitive) ''' |
113 | «IF !positive»neg «ENDIF»find «IF transitive»twoParam_«ENDIF»«modality.name.toLowerCase»InRelation_pattern_«p.fullyQualifiedName.replace('.','_')»«IF transitive»+«ENDIF»(«IF !transitive»problem,interpretation,«ENDIF»«variables.join(',')»); | 114 | «IF !positive»neg «ENDIF»find «IF transitive»twoParam_«ENDIF»«modality.name.toLowerCase»InRelation_pattern_«p.fullyQualifiedName.replace('.','_')»«IF transitive»+«ENDIF»(«IF !transitive»problem,interpretation,«ENDIF»«variables.join(',')»); |
114 | ''' | 115 | ''' |
115 | 116 | ||
@@ -227,7 +228,11 @@ class RelationDefinitionIndexer { | |||
227 | var String additionalDefinition; | 228 | var String additionalDefinition; |
228 | if(target instanceof EEnumLiteral) { | 229 | if(target instanceof EEnumLiteral) { |
229 | targetString = '''const_«target.name»_«target.EEnum.name»''' | 230 | targetString = '''const_«target.name»_«target.EEnum.name»''' |
230 | additionalDefinition = '''DefinedElement.name(«targetString»,"«target.name» «target.EEnum.name»"); //LogicProblem.elements(problem,«targetString»);''' | 231 | additionalDefinition = '''DefinedElement.name(«targetString»,"«target.name» literal «target.EEnum.name»"); //LogicProblem.elements(problem,«targetString»);''' |
232 | } else if(target instanceof Enumerator) { | ||
233 | // XXX We should get the corresponding EEnum name instead of the java class name. | ||
234 | targetString = '''const_«target.name»_«target.class.simpleName»''' | ||
235 | additionalDefinition = '''DefinedElement.name(«targetString»,"«target.name» literal «target.class.simpleName»"); //LogicProblem.elements(problem,«targetString»);''' | ||
231 | } else if(target instanceof Integer) { | 236 | } else if(target instanceof Integer) { |
232 | targetString = '''const_«target»_Integer''' | 237 | targetString = '''const_«target»_Integer''' |
233 | additionalDefinition = '''IntegerElement.value(«targetString»,«target»);''' | 238 | additionalDefinition = '''IntegerElement.value(«targetString»,«target»);''' |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/RelationRefinementGenerator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/RelationRefinementGenerator.xtend index f9e9baea..d915d47e 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/RelationRefinementGenerator.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/RelationRefinementGenerator.xtend | |||
@@ -9,77 +9,71 @@ import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.ComplexTypeReference | |||
9 | 9 | ||
10 | class RelationRefinementGenerator { | 10 | class RelationRefinementGenerator { |
11 | PatternGenerator base; | 11 | PatternGenerator base; |
12 | |||
12 | public new(PatternGenerator base) { | 13 | public new(PatternGenerator base) { |
13 | this.base = base | 14 | this.base = base |
14 | } | 15 | } |
15 | 16 | ||
16 | def CharSequence generateRefineReference(LogicProblem p) { | 17 | def CharSequence generateRefineReference(LogicProblem p) ''' |
17 | return ''' | 18 | «FOR relationRefinement : this.getRelationRefinements(p)» |
18 | «FOR relationRefinement: this.getRelationRefinements(p)» | 19 | pattern «relationRefinementQueryName(relationRefinement.key,relationRefinement.value)»( |
19 | pattern «relationRefinementQueryName(relationRefinement.key,relationRefinement.value)»( | 20 | problem:LogicProblem, interpretation:PartialInterpretation, |
20 | problem:LogicProblem, interpretation:PartialInterpretation, | 21 | relationIterpretation:PartialRelationInterpretation«IF relationRefinement.value !== null», oppositeInterpretation:PartialRelationInterpretation«ENDIF», |
21 | relationIterpretation:PartialRelationInterpretation«IF relationRefinement.value != null», oppositeInterpretation:PartialRelationInterpretation«ENDIF», | 22 | from: DefinedElement, to: DefinedElement) |
22 | from: DefinedElement, to: DefinedElement) | 23 | { |
23 | { | 24 | find interpretation(problem,interpretation); |
24 | find interpretation(problem,interpretation); | 25 | PartialInterpretation.partialrelationinterpretation(interpretation,relationIterpretation); |
25 | PartialInterpretation.partialrelationinterpretation(interpretation,relationIterpretation); | 26 | PartialRelationInterpretation.interpretationOf.name(relationIterpretation,"«relationRefinement.key.name»"); |
26 | PartialRelationInterpretation.interpretationOf.name(relationIterpretation,"«relationRefinement.key.name»"); | 27 | «IF relationRefinement.value !== null» |
27 | «IF relationRefinement.value != null» | 28 | PartialInterpretation.partialrelationinterpretation(interpretation,oppositeInterpretation); |
28 | PartialInterpretation.partialrelationinterpretation(interpretation,oppositeInterpretation); | 29 | PartialRelationInterpretation.interpretationOf.name(oppositeInterpretation,"«relationRefinement.value.name»"); |
29 | PartialRelationInterpretation.interpretationOf.name(oppositeInterpretation,"«relationRefinement.value.name»"); | 30 | «ENDIF» |
30 | «ENDIF» | 31 | find mustExist(problem, interpretation, from); |
31 | find mustExist(problem, interpretation, from); | 32 | find mustExist(problem, interpretation, to); |
32 | find mustExist(problem, interpretation, to); | 33 | «base.typeIndexer.referInstanceOfByReference(relationRefinement.key.parameters.get(0), Modality::MUST,"from")» |
33 | «base.typeIndexer.referInstanceOfByReference(relationRefinement.key.parameters.get(0), Modality::MUST,"from")» | 34 | «base.typeIndexer.referInstanceOfByReference(relationRefinement.key.parameters.get(1), Modality::MUST,"to")» |
34 | «base.typeIndexer.referInstanceOfByReference(relationRefinement.key.parameters.get(1), Modality::MUST,"to")» | 35 | «base.relationDeclarationIndexer.referRelation(relationRefinement.key,"from","to",Modality.MAY)» |
35 | «base.relationDeclarationIndexer.referRelation(relationRefinement.key,"from","to",Modality.MAY)» | 36 | neg «base.relationDeclarationIndexer.referRelation(relationRefinement.key,"from","to",Modality.MUST)» |
36 | neg «base.relationDeclarationIndexer.referRelation(relationRefinement.key,"from","to",Modality.MUST)» | 37 | } |
37 | } | ||
38 | «ENDFOR» | 38 | «ENDFOR» |
39 | ''' | 39 | ''' |
40 | } | 40 | |
41 | |||
42 | def String relationRefinementQueryName(RelationDeclaration relation, Relation inverseRelation) { | 41 | def String relationRefinementQueryName(RelationDeclaration relation, Relation inverseRelation) { |
43 | '''«IF inverseRelation != null | 42 | '''«IF inverseRelation !== null»refineRelation_«base.canonizeName(relation.name)»_and_«base.canonizeName(inverseRelation.name)»«ELSE»refineRelation_«base.canonizeName(relation.name)»«ENDIF»''' |
44 | »refineRelation_«base.canonizeName(relation.name)»_and_«base.canonizeName(inverseRelation.name)»« | ||
45 | ELSE | ||
46 | »refineRelation_«base.canonizeName(relation.name)»«ENDIF»''' | ||
47 | } | 43 | } |
48 | 44 | ||
49 | def referRefinementQuery(RelationDeclaration relation, Relation inverseRelation, String relInterpretationName, | 45 | def referRefinementQuery(RelationDeclaration relation, Relation inverseRelation, String relInterpretationName, |
50 | String inverseInterpretationName, String sourceName, String targetName) | 46 | String inverseInterpretationName, String sourceName, |
51 | '''find «this.relationRefinementQueryName(relation,inverseRelation)»(problem, interpretation, «relInterpretationName», «IF inverseRelation != null»inverseInterpretationName, «ENDIF»«sourceName», «targetName»);''' | 47 | String targetName) '''find «this.relationRefinementQueryName(relation,inverseRelation)»(problem, interpretation, «relInterpretationName», «IF inverseRelation !== null»«inverseInterpretationName», «ENDIF»«sourceName», «targetName»);''' |
52 | 48 | ||
53 | def getRefineRelationQueries(LogicProblem p) { | 49 | def getRefineRelationQueries(LogicProblem p) { |
54 | // val containmentRelations = p.containmentHierarchies.map[containmentRelations].flatten.toSet | 50 | // val containmentRelations = p.containmentHierarchies.map[containmentRelations].flatten.toSet |
55 | // p.relations.filter(RelationDeclaration).filter[!containmentRelations.contains(it)].toInvertedMap['''refineRelation_«base.canonizeName(it.name)»'''] | 51 | // p.relations.filter(RelationDeclaration).filter[!containmentRelations.contains(it)].toInvertedMap['''refineRelation_«base.canonizeName(it.name)»'''] |
56 | /* | 52 | /* |
57 | val res = new LinkedHashMap | 53 | * val res = new LinkedHashMap |
58 | for(relation: getRelationRefinements(p)) { | 54 | * for(relation: getRelationRefinements(p)) { |
59 | if(inverseRelations.containsKey(relation)) { | 55 | * if(inverseRelations.containsKey(relation)) { |
60 | val name = '''refineRelation_«base.canonizeName(relation.name)»_and_«base.canonizeName(inverseRelations.get(relation).name)»''' | 56 | * val name = '''refineRelation_«base.canonizeName(relation.name)»_and_«base.canonizeName(inverseRelations.get(relation).name)»''' |
61 | res.put(relation -> inverseRelations.get(relation),name) | 57 | * res.put(relation -> inverseRelations.get(relation),name) |
62 | } else { | 58 | * } else { |
63 | val name = '''refineRelation_«base.canonizeName(relation.name)»''' | 59 | * val name = '''refineRelation_«base.canonizeName(relation.name)»''' |
64 | res.put(relation -> null,name) | 60 | * res.put(relation -> null,name) |
65 | } | 61 | * } |
66 | } | 62 | * } |
67 | return res*/ | 63 | return res*/ |
68 | 64 | getRelationRefinements(p).toInvertedMap[relationRefinementQueryName(it.key, it.value)] | |
69 | getRelationRefinements(p).toInvertedMap[relationRefinementQueryName(it.key,it.value)] | ||
70 | } | 65 | } |
71 | |||
72 | 66 | ||
73 | def getRelationRefinements(LogicProblem p) { | 67 | def getRelationRefinements(LogicProblem p) { |
74 | val inverses = base.getInverseRelations(p) | 68 | val inverses = base.getInverseRelations(p) |
75 | val containments = base.getContainments(p) | 69 | val containments = base.getContainments(p) |
76 | val list = new LinkedList | 70 | val list = new LinkedList |
77 | for(relation : p.relations.filter(RelationDeclaration)) { | 71 | for (relation : p.relations.filter(RelationDeclaration)) { |
78 | if(!containments.contains(relation)) { | 72 | if (!containments.contains(relation)) { |
79 | if(inverses.containsKey(relation)) { | 73 | if (inverses.containsKey(relation)) { |
80 | val inverse = inverses.get(relation) | 74 | val inverse = inverses.get(relation) |
81 | if(!containments.contains(inverse)) { | 75 | if (!containments.contains(inverse)) { |
82 | if(base.isRepresentative(relation,inverse)) { | 76 | if (base.isRepresentative(relation, inverse)) { |
83 | list += (relation -> inverse) | 77 | list += (relation -> inverse) |
84 | } | 78 | } |
85 | } | 79 | } |
@@ -90,4 +84,4 @@ class RelationRefinementGenerator { | |||
90 | } | 84 | } |
91 | return list | 85 | return list |
92 | } | 86 | } |
93 | } \ No newline at end of file | 87 | } |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeIndexer.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeIndexer.xtend index d1d57189..7d687e99 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeIndexer.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeIndexer.xtend | |||
@@ -1,52 +1,122 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns | 1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns |
2 | 2 | ||
3 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem | ||
4 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type | ||
5 | import org.eclipse.emf.ecore.EClass | ||
6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality | ||
7 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | ||
8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysisResult | ||
9 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.ComplexTypeReference | ||
10 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.BoolTypeReference | 3 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.BoolTypeReference |
4 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.ComplexTypeReference | ||
11 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.IntTypeReference | 5 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.IntTypeReference |
12 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RealTypeReference | 6 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RealTypeReference |
13 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.StringTypeReference | 7 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.StringTypeReference |
8 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type | ||
9 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem | ||
10 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality | ||
11 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysisResult | ||
12 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | ||
14 | import java.math.BigDecimal | 13 | import java.math.BigDecimal |
14 | import org.eclipse.emf.ecore.EClass | ||
15 | import org.eclipse.xtend.lib.annotations.Accessors | ||
16 | import org.eclipse.xtend.lib.annotations.FinalFieldsConstructor | ||
15 | 17 | ||
18 | @FinalFieldsConstructor | ||
16 | abstract class TypeIndexer { | 19 | abstract class TypeIndexer { |
17 | public def CharSequence getRequiredQueries() | 20 | @Accessors(PROTECTED_GETTER) val PatternGenerator base |
18 | public def boolean requiresTypeAnalysis() | 21 | |
19 | public def CharSequence generateInstanceOfQueries(LogicProblem problem,PartialInterpretation emptySolution,TypeAnalysisResult typeAnalysisResult) | 22 | def CharSequence getRequiredQueries() ''' |
20 | public def CharSequence referInstanceOf(Type type, Modality modality, String variableName) | 23 | private pattern typeInterpretation(problem:LogicProblem, interpretation:PartialInterpretation, type:TypeDeclaration, typeInterpretation:PartialComplexTypeInterpretation) { |
21 | public def CharSequence referInstanceOf(EClass type, Modality modality, String variableName) | 24 | find interpretation(problem,interpretation); |
22 | 25 | LogicProblem.types(problem,type); | |
23 | public def dispatch CharSequence referInstanceOfByReference(ComplexTypeReference reference, Modality modality, String variableName) { | 26 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); |
24 | reference.referred.referInstanceOf(modality,variableName) | 27 | PartialComplexTypeInterpretation.interpretationOf(typeInterpretation,type); |
25 | } | 28 | } |
26 | public def dispatch CharSequence referInstanceOfByReference(BoolTypeReference reference, Modality modality, String variableName) { | 29 | |
30 | private pattern directInstanceOf(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement, type:Type) { | ||
31 | find interpretation(problem,interpretation); | ||
32 | LogicProblem.types(problem,type); | ||
33 | TypeDefinition.elements(type,element); | ||
34 | } or { | ||
35 | find interpretation(problem,interpretation); | ||
36 | find typeInterpretation(problem,interpretation,type,typeInterpretation); | ||
37 | PartialComplexTypeInterpretation.elements(typeInterpretation,element); | ||
38 | } | ||
39 | |||
40 | private pattern isPrimitive(element: PrimitiveElement) { | ||
41 | PrimitiveElement(element); | ||
42 | } | ||
43 | ''' | ||
44 | |||
45 | def boolean requiresTypeAnalysis() | ||
46 | |||
47 | def CharSequence generateInstanceOfQueries(LogicProblem problem, PartialInterpretation emptySolution, | ||
48 | TypeAnalysisResult typeAnalysisResult) ''' | ||
49 | «FOR type : problem.types» | ||
50 | «problem.generateMustInstenceOf(type, typeAnalysisResult)» | ||
51 | «problem.generateMayInstanceOf(type, typeAnalysisResult)» | ||
52 | «ENDFOR» | ||
53 | ''' | ||
54 | |||
55 | protected def CharSequence generateMustInstenceOf(LogicProblem problem, Type type, | ||
56 | TypeAnalysisResult typeAnalysisResult) ''' | ||
57 | /** | ||
58 | * An element must be an instance of type "«type.name»". | ||
59 | */ | ||
60 | private pattern «patternName(type,Modality.MUST)»(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement) { | ||
61 | Type.name(type,"«type.name»"); | ||
62 | find directInstanceOf(problem,interpretation,element,type); | ||
63 | } | ||
64 | ''' | ||
65 | |||
66 | protected def CharSequence generateMayInstanceOf(LogicProblem problem, Type type, | ||
67 | TypeAnalysisResult typeAnalysisResult) | ||
68 | |||
69 | protected def patternName(Type type, | ||
70 | Modality modality) '''«modality.toBase»InstanceOf«base.canonizeName(type.name)»''' | ||
71 | |||
72 | def referInstanceOf(Type type, Modality modality, String variableName) { | ||
73 | '''find «patternName(type,modality)»(problem,interpretation,«variableName»);''' | ||
74 | } | ||
75 | |||
76 | def referInstanceOf(EClass type, Modality modality, String variableName) { | ||
77 | '''find «modality.toBase»InstanceOf«base.canonizeName('''«type.name» class''')»(problem,interpretation,«variableName»);''' | ||
78 | } | ||
79 | |||
80 | def dispatch CharSequence referInstanceOfByReference(ComplexTypeReference reference, Modality modality, | ||
81 | String variableName) { | ||
82 | reference.referred.referInstanceOf(modality, variableName) | ||
83 | } | ||
84 | |||
85 | def dispatch CharSequence referInstanceOfByReference(BoolTypeReference reference, Modality modality, | ||
86 | String variableName) { | ||
27 | '''BooleanElement(«variableName»);''' | 87 | '''BooleanElement(«variableName»);''' |
28 | } | 88 | } |
29 | public def dispatch CharSequence referInstanceOfByReference(IntTypeReference reference, Modality modality, String variableName) { | 89 | |
90 | def dispatch CharSequence referInstanceOfByReference(IntTypeReference reference, Modality modality, | ||
91 | String variableName) { | ||
30 | '''IntegerElement(«variableName»);''' | 92 | '''IntegerElement(«variableName»);''' |
31 | } | 93 | } |
32 | public def dispatch CharSequence referInstanceOfByReference(RealTypeReference reference, Modality modality, String variableName) { | 94 | |
95 | def dispatch CharSequence referInstanceOfByReference(RealTypeReference reference, Modality modality, | ||
96 | String variableName) { | ||
33 | '''RealElement(«variableName»);''' | 97 | '''RealElement(«variableName»);''' |
34 | } | 98 | } |
35 | public def dispatch CharSequence referInstanceOfByReference(StringTypeReference reference, Modality modality, String variableName) { | 99 | |
100 | def dispatch CharSequence referInstanceOfByReference(StringTypeReference reference, Modality modality, | ||
101 | String variableName) { | ||
36 | '''StringElement(«variableName»);''' | 102 | '''StringElement(«variableName»);''' |
37 | } | 103 | } |
38 | public def dispatch CharSequence referPrimitiveValue(String variableName, Boolean value) { | 104 | |
105 | def dispatch CharSequence referPrimitiveValue(String variableName, Boolean value) { | ||
39 | '''BooleanElement.value(«variableName»,«value»);''' | 106 | '''BooleanElement.value(«variableName»,«value»);''' |
40 | } | 107 | } |
41 | public def dispatch CharSequence referPrimitiveValue(String variableName, Integer value) { | 108 | |
109 | def dispatch CharSequence referPrimitiveValue(String variableName, Integer value) { | ||
42 | '''IntegerElement.value(«variableName»,«value»);''' | 110 | '''IntegerElement.value(«variableName»,«value»);''' |
43 | } | 111 | } |
44 | public def dispatch CharSequence referPrimitiveValue(String variableName, BigDecimal value) { | 112 | |
113 | def dispatch CharSequence referPrimitiveValue(String variableName, BigDecimal value) { | ||
45 | '''RealElement.value(«variableName»,«value»);''' | 114 | '''RealElement.value(«variableName»,«value»);''' |
46 | } | 115 | } |
47 | ///TODO: de-escaping string literals | 116 | |
48 | public def dispatch CharSequence referPrimitiveValue(String variableName, String value) { | 117 | def dispatch CharSequence referPrimitiveValue(String variableName, String value) { |
118 | // /TODO: de-escaping string literals | ||
49 | '''StringElement.value(«variableName»,"«value»");''' | 119 | '''StringElement.value(«variableName»,"«value»");''' |
50 | } | 120 | } |
51 | 121 | ||
52 | } \ No newline at end of file | 122 | } |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeIndexerWithPreliminaryTypeAnalysis.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeIndexerWithPreliminaryTypeAnalysis.xtend index d3af0426..0393b803 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeIndexerWithPreliminaryTypeAnalysis.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeIndexerWithPreliminaryTypeAnalysis.xtend | |||
@@ -4,113 +4,51 @@ import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type | |||
4 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem | 4 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem |
5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality | 5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality |
6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysisResult | 6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeAnalysisResult |
7 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.TypeRefinementPrecondition | ||
8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | ||
9 | import org.eclipse.emf.ecore.EClass | ||
10 | 7 | ||
11 | class TypeIndexerWithPreliminaryTypeAnalysis extends TypeIndexer{ | 8 | class TypeIndexerWithPreliminaryTypeAnalysis extends TypeIndexer { |
12 | val PatternGenerator base; | ||
13 | |||
14 | new(PatternGenerator base) { | 9 | new(PatternGenerator base) { |
15 | this.base = base | 10 | super(base) |
16 | } | 11 | } |
12 | |||
17 | override requiresTypeAnalysis() { true } | 13 | override requiresTypeAnalysis() { true } |
18 | 14 | ||
19 | override getRequiredQueries() ''' | 15 | protected override generateMayInstanceOf(LogicProblem problem, Type type, TypeAnalysisResult typeAnalysisResult) { |
20 | private pattern typeInterpretation(problem:LogicProblem, interpretation:PartialInterpretation, type:TypeDeclaration, typeInterpretation:PartialComplexTypeInterpretation) { | 16 | val precondition = typeAnalysisResult?.mayNewTypePreconditions?.get(type) |
21 | find interpretation(problem,interpretation); | 17 | val inhibitorTypes = precondition?.inhibitorTypes |
22 | LogicProblem.types(problem,type); | ||
23 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); | ||
24 | PartialComplexTypeInterpretation.interpretationOf(typeInterpretation,type); | ||
25 | } | ||
26 | |||
27 | private pattern directInstanceOf(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement, type:Type) { | ||
28 | find interpretation(problem,interpretation); | ||
29 | LogicProblem.types(problem,type); | ||
30 | TypeDefinition.elements(type,element); | ||
31 | } or { | ||
32 | find interpretation(problem,interpretation); | ||
33 | find typeInterpretation(problem,interpretation,type,typeInterpretation); | ||
34 | PartialComplexTypeInterpretation.elements(typeInterpretation,element); | ||
35 | } | ||
36 | |||
37 | private pattern isPrimitive(element: PrimitiveElement) { | ||
38 | PrimitiveElement(element); | ||
39 | } | ||
40 | ''' | ||
41 | |||
42 | override generateInstanceOfQueries(LogicProblem problem, PartialInterpretation emptySolution, TypeAnalysisResult typeAnalysisResult) { | ||
43 | val mayNewTypePreconditions = typeAnalysisResult.mayNewTypePreconditions | ||
44 | |||
45 | return ''' | ||
46 | «FOR type:problem.types» | ||
47 | «problem.generateMustInstenceOf(type)» | ||
48 | «problem.generateMayInstanceOf(type,mayNewTypePreconditions.get(type))» | ||
49 | «ENDFOR» | ||
50 | ''' | ||
51 | } | ||
52 | |||
53 | private def patternName(Type type, Modality modality) | ||
54 | '''«modality.toString.toLowerCase»InstanceOf«base.canonizeName(type.name)»''' | ||
55 | |||
56 | private def generateMustInstenceOf(LogicProblem problem, Type type) { | ||
57 | ''' | ||
58 | /** | ||
59 | * An element must be an instance of type "«type.name»". | ||
60 | */ | ||
61 | private pattern «patternName(type,Modality.MUST)»(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement) { | ||
62 | Type.name(type,"«type.name»"); | ||
63 | find directInstanceOf(problem,interpretation,element,type); | ||
64 | } | ||
65 | ''' | ||
66 | } | ||
67 | |||
68 | private def generateMayInstanceOf(LogicProblem problem, Type type, TypeRefinementPrecondition precondition) { | ||
69 | val inhibitorTypes = if(precondition!=null) { | ||
70 | precondition.inhibitorTypes | ||
71 | } else { | ||
72 | null | ||
73 | } | ||
74 | ''' | 18 | ''' |
75 | private pattern scopeDisallowsNew«base.canonizeName(type.name)»(problem:LogicProblem, interpretation:PartialInterpretation) { | 19 | private pattern scopeDisallowsNew«base.canonizeName(type.name)»(problem:LogicProblem, interpretation:PartialInterpretation) { |
76 | find interpretation(problem,interpretation); | 20 | find interpretation(problem,interpretation); |
77 | PartialInterpretation.scopes(interpretation,scope); | 21 | PartialInterpretation.scopes(interpretation,scope); |
78 | Scope.targetTypeInterpretation(scope,typeInterpretation); | 22 | Scope.targetTypeInterpretation(scope,typeInterpretation); |
79 | Scope.maxNewElements(scope,0); | 23 | Scope.maxNewElements(scope,0); |
80 | PartialComplexTypeInterpretation.interpretationOf(typeInterpretation,type); | 24 | PartialComplexTypeInterpretation.interpretationOf(typeInterpretation,type); |
81 | Type.name(type,"«type.name»"); | 25 | Type.name(type,"«type.name»"); |
82 | } | 26 | } |
83 | 27 | ||
84 | /** | 28 | /** |
85 | * An element may be an instance of type "«type.name»". | 29 | * An element may be an instance of type "«type.name»". |
86 | */ | 30 | */ |
87 | private pattern «patternName(type,Modality.MAY)»(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement) | 31 | private pattern «patternName(type,Modality.MAY)»(problem:LogicProblem, interpretation:PartialInterpretation, element:DefinedElement) |
88 | «IF inhibitorTypes !== null»{ | 32 | «IF inhibitorTypes !== null» |
89 | find interpretation(problem,interpretation); | 33 | { |
90 | PartialInterpretation.newElements(interpretation,element); | 34 | find interpretation(problem,interpretation); |
91 | «FOR inhibitorType : inhibitorTypes» | 35 | PartialInterpretation.newElements(interpretation,element); |
92 | neg «referInstanceOf(inhibitorType,Modality.MUST,"element")» | 36 | «FOR inhibitorType : inhibitorTypes» |
93 | «ENDFOR» | 37 | neg «referInstanceOf(inhibitorType,Modality.MUST,"element")» |
94 | neg find scopeDisallowsNew«base.canonizeName(type.name)»(problem, interpretation); | 38 | «ENDFOR» |
95 | neg find isPrimitive(element); | 39 | neg find scopeDisallowsNew«base.canonizeName(type.name)»(problem, interpretation); |
96 | } or { | 40 | neg find isPrimitive(element); |
97 | find interpretation(problem,interpretation); | 41 | } or { |
98 | PartialInterpretation.openWorldElements(interpretation,element); | 42 | find interpretation(problem,interpretation); |
99 | «FOR inhibitorType : inhibitorTypes» | 43 | PartialInterpretation.openWorldElements(interpretation,element); |
100 | neg «referInstanceOf(inhibitorType,Modality.MUST,"element")» | 44 | «FOR inhibitorType : inhibitorTypes» |
101 | «ENDFOR» | 45 | neg «referInstanceOf(inhibitorType,Modality.MUST,"element")» |
102 | neg find scopeDisallowsNew«base.canonizeName(type.name)»(problem, interpretation); | 46 | «ENDFOR» |
103 | neg find isPrimitive(element); | 47 | neg find scopeDisallowsNew«base.canonizeName(type.name)»(problem, interpretation); |
104 | } or | 48 | neg find isPrimitive(element); |
105 | «ENDIF» | 49 | } or |
106 | { «referInstanceOf(type,Modality.MUST,"element")» } | 50 | «ENDIF» |
51 | { «referInstanceOf(type,Modality.MUST,"element")» } | ||
107 | ''' | 52 | ''' |
108 | } | 53 | } |
109 | 54 | } | |
110 | public override referInstanceOf(Type type, Modality modality, String variableName) { | ||
111 | '''find «patternName(type,modality)»(problem,interpretation,«variableName»);''' | ||
112 | } | ||
113 | public override referInstanceOf(EClass type, Modality modality, String variableName) { | ||
114 | '''find «modality.toString.toLowerCase»InstanceOf«base.canonizeName('''«type.name» class''')»(problem,interpretation,«variableName»);''' | ||
115 | } | ||
116 | } \ No newline at end of file | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeRefinementGenerator.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeRefinementGenerator.xtend index 7e3fad91..4ef336ae 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeRefinementGenerator.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeRefinementGenerator.xtend | |||
@@ -25,69 +25,76 @@ class ObjectCreationPrecondition { | |||
25 | 25 | ||
26 | abstract class TypeRefinementGenerator { | 26 | abstract class TypeRefinementGenerator { |
27 | val protected PatternGenerator base; | 27 | val protected PatternGenerator base; |
28 | public new(PatternGenerator base) { | 28 | |
29 | new(PatternGenerator base) { | ||
29 | this.base = base | 30 | this.base = base |
30 | } | 31 | } |
31 | 32 | ||
32 | public def boolean requiresTypeAnalysis() | 33 | def boolean requiresTypeAnalysis() |
33 | public def CharSequence generateRefineObjectQueries(LogicProblem p, PartialInterpretation emptySolution, TypeAnalysisResult typeAnalysisResult) | 34 | |
34 | public def CharSequence generateRefineTypeQueries(LogicProblem p, PartialInterpretation emptySolution, TypeAnalysisResult typeAnalysisResult) | 35 | def CharSequence generateRefineObjectQueries(LogicProblem p, PartialInterpretation emptySolution, |
35 | public def Map<? extends Type, String> getRefineTypeQueryNames(LogicProblem p, PartialInterpretation emptySolution, TypeAnalysisResult typeAnalysisResult) | 36 | TypeAnalysisResult typeAnalysisResult) |
36 | 37 | ||
37 | public def getRefineObjectQueryNames(LogicProblem p, PartialInterpretation emptySolution, TypeAnalysisResult typeAnalysisResult) { | 38 | def CharSequence generateRefineTypeQueries(LogicProblem p, PartialInterpretation emptySolution, |
38 | val Map<ObjectCreationPrecondition,String> objectCreationQueries = new LinkedHashMap | 39 | TypeAnalysisResult typeAnalysisResult) |
40 | |||
41 | def Map<? extends Type, String> getRefineTypeQueryNames(LogicProblem p, PartialInterpretation emptySolution, | ||
42 | TypeAnalysisResult typeAnalysisResult) | ||
43 | |||
44 | def getRefineObjectQueryNames(LogicProblem p, PartialInterpretation emptySolution, | ||
45 | TypeAnalysisResult typeAnalysisResult) { | ||
46 | val Map<ObjectCreationPrecondition, String> objectCreationQueries = new LinkedHashMap | ||
39 | val containment = p.containmentHierarchies.head | 47 | val containment = p.containmentHierarchies.head |
40 | val inverseRelations = new HashMap | 48 | val inverseRelations = new HashMap |
41 | p.annotations.filter(InverseRelationAssertion).forEach[ | 49 | p.annotations.filter(InverseRelationAssertion).forEach [ |
42 | inverseRelations.put(it.inverseA,it.inverseB) | 50 | inverseRelations.put(it.inverseA, it.inverseB) |
43 | inverseRelations.put(it.inverseB,it.inverseA) | 51 | inverseRelations.put(it.inverseB, it.inverseA) |
44 | ] | 52 | ] |
45 | for(type: p.types.filter(TypeDeclaration).filter[!it.isAbstract]) { | 53 | for (type : p.types.filter(TypeDeclaration).filter[!it.isAbstract]) { |
46 | if(containment.typeInContainment(type)) { | 54 | if (containment.typeInContainment(type)) { |
47 | for(containmentRelation : containment.containmentRelations.filter[canBeContainedByRelation(it,type)]) { | 55 | for (containmentRelation : containment.containmentRelations. |
48 | if(inverseRelations.containsKey(containmentRelation)) { | 56 | filter[canBeContainedByRelation(it, type)]) { |
57 | if (inverseRelations.containsKey(containmentRelation)) { | ||
49 | objectCreationQueries.put( | 58 | objectCreationQueries.put( |
50 | new ObjectCreationPrecondition(containmentRelation,inverseRelations.get(containmentRelation),type), | 59 | new ObjectCreationPrecondition(containmentRelation, |
51 | this.patternName(containmentRelation,inverseRelations.get(containmentRelation),type)) | 60 | inverseRelations.get(containmentRelation), type), |
61 | this.patternName(containmentRelation, inverseRelations.get(containmentRelation), type)) | ||
52 | } else { | 62 | } else { |
53 | objectCreationQueries.put( | 63 | objectCreationQueries.put(new ObjectCreationPrecondition(containmentRelation, null, type), |
54 | new ObjectCreationPrecondition(containmentRelation,null,type), | 64 | patternName(containmentRelation, null, type)) |
55 | patternName(containmentRelation,null,type)) | ||
56 | } | 65 | } |
57 | } | 66 | } |
58 | objectCreationQueries.put( | 67 | objectCreationQueries.put(new ObjectCreationPrecondition(null, null, type), |
59 | new ObjectCreationPrecondition(null,null,type), | 68 | patternName(null, null, type)) |
60 | patternName(null,null,type)) | ||
61 | } else { | 69 | } else { |
62 | objectCreationQueries.put( | 70 | objectCreationQueries.put(new ObjectCreationPrecondition(null, null, type), |
63 | new ObjectCreationPrecondition(null,null,type), | 71 | this.patternName(null, null, type)) |
64 | this.patternName(null,null,type)) | ||
65 | } | 72 | } |
66 | } | 73 | } |
67 | return objectCreationQueries | 74 | return objectCreationQueries |
68 | } | 75 | } |
69 | 76 | ||
70 | protected def canBeContainedByRelation(Relation r, Type t) { | 77 | protected def canBeContainedByRelation(Relation r, Type t) { |
71 | if(r.parameters.size==2) { | 78 | if (r.parameters.size == 2) { |
72 | val param = r.parameters.get(1) | 79 | val param = r.parameters.get(1) |
73 | if(param instanceof ComplexTypeReference) { | 80 | if (param instanceof ComplexTypeReference) { |
74 | val allSuperTypes = t.transitiveClosureStar[it.supertypes] | 81 | val allSuperTypes = t.transitiveClosureStar[it.supertypes] |
75 | for(superType : allSuperTypes) { | 82 | for (superType : allSuperTypes) { |
76 | if(param.referred == superType) return true | 83 | if(param.referred == superType) return true |
77 | } | 84 | } |
78 | } | 85 | } |
79 | } | 86 | } |
80 | return false | 87 | return false |
81 | } | 88 | } |
82 | 89 | ||
83 | private def typeInContainment(ContainmentHierarchy hierarchy, Type type) { | 90 | private def typeInContainment(ContainmentHierarchy hierarchy, Type type) { |
84 | val allSuperTypes = type.transitiveClosureStar[it.supertypes] | 91 | val allSuperTypes = type.transitiveClosureStar[it.supertypes] |
85 | return allSuperTypes.exists[hierarchy.typesOrderedInHierarchy.contains(it)] | 92 | return allSuperTypes.exists[hierarchy.typesOrderedInHierarchy.contains(it)] |
86 | } | 93 | } |
87 | 94 | ||
88 | protected def String patternName(Relation containmentRelation, Relation inverseContainment, Type newType) { | 95 | protected def String patternName(Relation containmentRelation, Relation inverseContainment, Type newType) { |
89 | if(containmentRelation != null) { | 96 | if (containmentRelation !== null) { |
90 | if(inverseContainment != null) { | 97 | if (inverseContainment !== null) { |
91 | '''createObject_«base.canonizeName(newType.name)»_by_«base.canonizeName(containmentRelation.name)»_with_«base.canonizeName(inverseContainment.name)»''' | 98 | '''createObject_«base.canonizeName(newType.name)»_by_«base.canonizeName(containmentRelation.name)»_with_«base.canonizeName(inverseContainment.name)»''' |
92 | } else { | 99 | } else { |
93 | '''createObject_«base.canonizeName(newType.name)»_by_«base.canonizeName(containmentRelation.name)»''' | 100 | '''createObject_«base.canonizeName(newType.name)»_by_«base.canonizeName(containmentRelation.name)»''' |
@@ -96,4 +103,8 @@ abstract class TypeRefinementGenerator { | |||
96 | '''createObject_«base.canonizeName(newType.name)»''' | 103 | '''createObject_«base.canonizeName(newType.name)»''' |
97 | } | 104 | } |
98 | } | 105 | } |
99 | } \ No newline at end of file | 106 | |
107 | def hasElementInContainmentName() { | ||
108 | "hasElementInContainment" | ||
109 | } | ||
110 | } | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeRefinementWithPreliminaryTypeAnalysis.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeRefinementWithPreliminaryTypeAnalysis.xtend index cbbbcb08..1a81695e 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeRefinementWithPreliminaryTypeAnalysis.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/TypeRefinementWithPreliminaryTypeAnalysis.xtend | |||
@@ -10,7 +10,7 @@ import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.par | |||
10 | import java.util.HashMap | 10 | import java.util.HashMap |
11 | 11 | ||
12 | class TypeRefinementWithPreliminaryTypeAnalysis extends TypeRefinementGenerator{ | 12 | class TypeRefinementWithPreliminaryTypeAnalysis extends TypeRefinementGenerator{ |
13 | public new(PatternGenerator base) { | 13 | new(PatternGenerator base) { |
14 | super(base) | 14 | super(base) |
15 | } | 15 | } |
16 | override requiresTypeAnalysis() { true } | 16 | override requiresTypeAnalysis() { true } |
@@ -24,7 +24,7 @@ class TypeRefinementWithPreliminaryTypeAnalysis extends TypeRefinementGenerator{ | |||
24 | inverseRelations.put(it.inverseB,it.inverseA) | 24 | inverseRelations.put(it.inverseB,it.inverseA) |
25 | ] | 25 | ] |
26 | return ''' | 26 | return ''' |
27 | private pattern hasElementInContainment(problem:LogicProblem, interpretation:PartialInterpretation) | 27 | pattern «hasElementInContainmentName»(problem:LogicProblem, interpretation:PartialInterpretation) |
28 | «FOR type :containment.typesOrderedInHierarchy SEPARATOR "or"»{ | 28 | «FOR type :containment.typesOrderedInHierarchy SEPARATOR "or"»{ |
29 | find interpretation(problem,interpretation); | 29 | find interpretation(problem,interpretation); |
30 | «base.typeIndexer.referInstanceOf(type,Modality.MUST,"root")» | 30 | «base.typeIndexer.referInstanceOf(type,Modality.MUST,"root")» |
@@ -76,7 +76,7 @@ class TypeRefinementWithPreliminaryTypeAnalysis extends TypeRefinementGenerator{ | |||
76 | typeInterpretation:PartialComplexTypeInterpretation) | 76 | typeInterpretation:PartialComplexTypeInterpretation) |
77 | { | 77 | { |
78 | find interpretation(problem,interpretation); | 78 | find interpretation(problem,interpretation); |
79 | neg find hasElementInContainment(problem,interpretation); | 79 | neg find «hasElementInContainmentName»(problem,interpretation); |
80 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); | 80 | PartialInterpretation.partialtypeinterpratation(interpretation,typeInterpretation); |
81 | PartialComplexTypeInterpretation.interpretationOf.name(typeInterpretation,"«type.name»"); | 81 | PartialComplexTypeInterpretation.interpretationOf.name(typeInterpretation,"«type.name»"); |
82 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"newObject")» | 82 | «base.typeIndexer.referInstanceOf(type,Modality.MAY,"newObject")» |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/UnfinishedIndexer.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/UnfinishedIndexer.xtend index ad1c9033..a8a07756 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/UnfinishedIndexer.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/patterns/UnfinishedIndexer.xtend | |||
@@ -1,85 +1,204 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns | 1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns |
2 | 2 | ||
3 | import hu.bme.mit.inf.dslreasoner.ecore2logic.ecore2logicannotations.LowerMultiplicityAssertion | 3 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RelationDeclaration |
4 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem | 4 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem |
5 | import hu.bme.mit.inf.dslreasoner.viatra2logic.viatra2logicannotations.TransformedViatraWellformednessConstraint | 5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality |
6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.RelationMultiplicityConstraint | ||
7 | import java.util.LinkedHashMap | ||
8 | import java.util.List | ||
6 | import java.util.Map | 9 | import java.util.Map |
7 | import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PQuery | 10 | import org.eclipse.viatra.query.runtime.matchers.psystem.queries.PQuery |
11 | import org.eclipse.xtend.lib.annotations.Data | ||
8 | 12 | ||
9 | import static extension hu.bme.mit.inf.dslreasoner.util.CollectionsUtil.* | 13 | import static extension hu.bme.mit.inf.dslreasoner.util.CollectionsUtil.* |
10 | import java.util.LinkedHashMap | 14 | |
11 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality | 15 | @Data |
12 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.ComplexTypeReference | 16 | class UnifinishedMultiplicityQueryNames { |
17 | val String unfinishedMultiplicityQueryName | ||
18 | val String unrepairableMultiplicityQueryName | ||
19 | val String remainingInverseMultiplicityQueryName | ||
20 | val String remainingContentsQueryName | ||
21 | } | ||
13 | 22 | ||
14 | class UnfinishedIndexer { | 23 | class UnfinishedIndexer { |
15 | val PatternGenerator base | 24 | val PatternGenerator base |
16 | 25 | val boolean indexUpperMultiplicities | |
17 | new(PatternGenerator patternGenerator) { | 26 | |
27 | new(PatternGenerator patternGenerator, boolean indexUpperMultiplicities) { | ||
18 | this.base = patternGenerator | 28 | this.base = patternGenerator |
29 | this.indexUpperMultiplicities = indexUpperMultiplicities | ||
19 | } | 30 | } |
20 | 31 | ||
21 | def generateUnfinishedWfQueries(LogicProblem problem, Map<String,PQuery> fqn2PQuery) { | 32 | def generateUnfinishedWfQueries(LogicProblem problem, Map<String, PQuery> fqn2PQuery) { |
22 | val wfQueries = base.wfQueries(problem) | 33 | val wfQueries = base.wfQueries(problem) |
23 | ''' | 34 | ''' |
24 | «FOR wfQuery: wfQueries» | 35 | «FOR wfQuery : wfQueries» |
25 | pattern unfinishedBy_«base.canonizeName(wfQuery.target.name)»(problem:LogicProblem, interpretation:PartialInterpretation, | 36 | pattern unfinishedBy_«base.canonizeName(wfQuery.target.name)»(problem:LogicProblem, interpretation:PartialInterpretation, |
26 | «FOR param : wfQuery.patternFullyQualifiedName.lookup(fqn2PQuery).parameters SEPARATOR ', '»var_«param.name»«ENDFOR») | 37 | «FOR param : wfQuery.patternFullyQualifiedName.lookup(fqn2PQuery).parameters SEPARATOR ', '»var_«param.name»«ENDFOR») |
27 | { | 38 | { |
28 | «base.relationDefinitionIndexer.referPattern( | 39 | «base.relationDefinitionIndexer.referPattern( |
29 | wfQuery.patternFullyQualifiedName.lookup(fqn2PQuery), | 40 | wfQuery.patternFullyQualifiedName.lookup(fqn2PQuery), |
30 | wfQuery.patternFullyQualifiedName.lookup(fqn2PQuery).parameters.map['''var_«it.name»'''], | 41 | wfQuery.patternFullyQualifiedName.lookup(fqn2PQuery).parameters.map['''var_«it.name»'''], |
31 | Modality.CURRENT, | 42 | Modality.CURRENT, |
32 | true,false)» | 43 | true,false)» |
33 | } | 44 | } |
34 | «ENDFOR» | 45 | «ENDFOR» |
35 | ''' | 46 | ''' |
36 | } | 47 | } |
48 | |||
37 | def getUnfinishedWFQueryNames(LogicProblem problem) { | 49 | def getUnfinishedWFQueryNames(LogicProblem problem) { |
38 | val wfQueries = base.wfQueries(problem) | 50 | val wfQueries = base.wfQueries(problem) |
39 | val map = new LinkedHashMap | 51 | val map = new LinkedHashMap |
40 | for(wfQuery : wfQueries) { | 52 | for (wfQuery : wfQueries) { |
41 | map.put(wfQuery.target,'''unfinishedBy_«base.canonizeName(wfQuery.target.name)»''') | 53 | map.put(wfQuery.target, '''unfinishedBy_«base.canonizeName(wfQuery.target.name)»''') |
42 | } | 54 | } |
43 | return map | 55 | return map |
44 | } | 56 | } |
45 | def generateUnfinishedMultiplicityQueries(LogicProblem problem, Map<String,PQuery> fqn2PQuery) { | 57 | |
46 | val lowerMultiplicities = base.lowerMultiplicities(problem) | 58 | def generateUnfinishedMultiplicityQueries(List<RelationMultiplicityConstraint> constraints, |
47 | return ''' | 59 | Map<String, PQuery> fqn2PQuery) ''' |
48 | «FOR lowerMultiplicity : lowerMultiplicities» | 60 | «FOR constraint : constraints» |
49 | pattern «unfinishedMultiplicityName(lowerMultiplicity)»(problem:LogicProblem, interpretation:PartialInterpretation, relationIterpretation:PartialRelationInterpretation, object:DefinedElement,missingMultiplicity) { | 61 | «IF constraint.constrainsUnfinished» |
50 | find interpretation(problem,interpretation); | 62 | private pattern «unfinishedMultiplicityName(constraint)»_helper(problem:LogicProblem, interpretation:PartialInterpretation, object:DefinedElement, missingMultiplicity:java Integer) { |
51 | PartialInterpretation.partialrelationinterpretation(interpretation,relationIterpretation); | 63 | find interpretation(problem,interpretation); |
52 | PartialRelationInterpretation.interpretationOf.name(relationIterpretation,"«lowerMultiplicity.relation.name»"); | 64 | find mustExist(problem,interpretation,object); |
53 | «base.typeIndexer.referInstanceOf(lowerMultiplicity.firstParamTypeOfRelation,Modality::MUST,"object")» | 65 | «base.typeIndexer.referInstanceOf(constraint.sourceType,Modality::MUST,"object")» |
54 | numberOfExistingReferences == count «base.referRelation(lowerMultiplicity.relation,"object","_",Modality.MUST,fqn2PQuery)» | 66 | numberOfExistingReferences == count «base.referRelation(constraint.relation,"object","_",Modality.MUST,fqn2PQuery)» |
55 | check(numberOfExistingReferences < «lowerMultiplicity.lower»); | 67 | check(numberOfExistingReferences < «constraint.lowerBound»); |
56 | missingMultiplicity == eval(«lowerMultiplicity.lower»-numberOfExistingReferences); | 68 | missingMultiplicity == eval(«constraint.lowerBound»-numberOfExistingReferences); |
57 | } | 69 | } |
70 | |||
71 | pattern «unfinishedMultiplicityName(constraint)»(problem:LogicProblem, interpretation:PartialInterpretation, missingMultiplicity:java Integer) { | ||
72 | find interpretation(problem,interpretation); | ||
73 | missingMultiplicity == sum find «unfinishedMultiplicityName(constraint)»_helper(problem, interpretation, _, #_); | ||
74 | } | ||
75 | «ENDIF» | ||
76 | |||
77 | «IF indexUpperMultiplicities» | ||
78 | «IF constraint.constrainsUnrepairable || constraint.constrainsRemainingInverse» | ||
79 | private pattern «repairMatchName(constraint)»(problem:LogicProblem, interpretation:PartialInterpretation, source:DefinedElement, target:DefinedElement) { | ||
80 | «IF base.isRepresentative(constraint.relation, constraint.inverseRelation) && constraint.relation instanceof RelationDeclaration» | ||
81 | «base.relationRefinementGenerator.referRefinementQuery(constraint.relation as RelationDeclaration, constraint.inverseRelation, "_", "_", "source", "target")» | ||
82 | «ELSE» | ||
83 | «IF base.isRepresentative(constraint.inverseRelation, constraint.relation) && constraint.inverseRelation instanceof RelationDeclaration» | ||
84 | «base.relationRefinementGenerator.referRefinementQuery(constraint.inverseRelation as RelationDeclaration, constraint.relation, "_", "_", "target", "source")» | ||
85 | «ELSE» | ||
86 | find interpretation(problem,interpretation); | ||
87 | find mustExist(problem,interpretation,source); | ||
88 | «base.typeIndexer.referInstanceOf(constraint.sourceType,Modality::MUST,"source")» | ||
89 | find mustExist(problem,interpretation,target); | ||
90 | «base.typeIndexer.referInstanceOf(constraint.targetType,Modality::MUST,"target")» | ||
91 | neg «base.referRelation(constraint.relation,"source","target",Modality.MUST,fqn2PQuery)» | ||
92 | «base.referRelation(constraint.relation,"source","target",Modality.MAY,fqn2PQuery)» | ||
93 | «ENDIF» | ||
94 | «ENDIF» | ||
95 | } | ||
96 | «ENDIF» | ||
97 | |||
98 | «IF constraint.constrainsUnrepairable» | ||
99 | private pattern «unrepairableMultiplicityName(constraint)»_helper(problem:LogicProblem, interpretation:PartialInterpretation, object:DefinedElement, unrepairableMultiplicity:java Integer) { | ||
100 | find «unfinishedMultiplicityName(constraint)»_helper(problem, interpretation, object, missingMultiplicity); | ||
101 | numberOfRepairMatches == count find «repairMatchName(constraint)»(problem, interpretation, object, _); | ||
102 | check(numberOfRepairMatches < missingMultiplicity); | ||
103 | unrepairableMultiplicity == eval(missingMultiplicity-numberOfRepairMatches); | ||
104 | } | ||
105 | |||
106 | private pattern «unrepairableMultiplicityName(constraint)»(problem:LogicProblem, interpretation:PartialInterpretation, unrepairableMultiplicity:java Integer) { | ||
107 | find interpretation(problem,interpretation); | ||
108 | unrepairableMultiplicity == max find «unrepairableMultiplicityName(constraint)»_helper(problem, interpretation, _, #_); | ||
109 | } or { | ||
110 | find interpretation(problem,interpretation); | ||
111 | neg find «unrepairableMultiplicityName(constraint)»_helper(problem, interpretation, _, _); | ||
112 | unrepairableMultiplicity == 0; | ||
113 | } | ||
114 | «ENDIF» | ||
115 | |||
116 | «IF constraint.constrainsRemainingInverse» | ||
117 | private pattern «remainingMultiplicityName(constraint)»_helper(problem:LogicProblem, interpretation:PartialInterpretation, object:DefinedElement, remainingMultiplicity:java Integer) { | ||
118 | find interpretation(problem,interpretation); | ||
119 | find mustExist(problem,interpretation,object); | ||
120 | «base.typeIndexer.referInstanceOf(constraint.targetType,Modality::MUST,"object")» | ||
121 | numberOfExistingReferences == count «base.referRelation(constraint.relation,"_","object",Modality.MUST,fqn2PQuery)» | ||
122 | check(numberOfExistingReferences < «constraint.inverseUpperBound»); | ||
123 | numberOfRepairMatches == count find «repairMatchName(constraint)»(problem, interpretation, _, object); | ||
124 | remainingMultiplicity == eval(Math.min(«constraint.inverseUpperBound»-numberOfExistingReferences, numberOfRepairMatches)); | ||
125 | } | ||
126 | |||
127 | pattern «remainingMultiplicityName(constraint)»(problem:LogicProblem, interpretation:PartialInterpretation, remainingMultiplicity:java Integer) { | ||
128 | find interpretation(problem,interpretation); | ||
129 | remainingMultiplicity == sum find «remainingMultiplicityName(constraint)»_helper(problem, interpretation, _, #_); | ||
130 | } | ||
131 | «ENDIF» | ||
132 | |||
133 | «IF constraint.constrainsRemainingContents» | ||
134 | «IF constraint.upperBoundFinite» | ||
135 | private pattern «remainingContentsName(constraint)»_helper(problem:LogicProblem, interpretation:PartialInterpretation, object:DefinedElement, remainingMultiplicity:java Integer) { | ||
136 | find interpretation(problem,interpretation); | ||
137 | find mustExist(problem,interpretation,object); | ||
138 | «base.typeIndexer.referInstanceOf(constraint.sourceType,Modality::MUST,"object")» | ||
139 | numberOfExistingReferences == count «base.referRelation(constraint.relation,"object","_",Modality.MUST,fqn2PQuery)» | ||
140 | check(numberOfExistingReferences < «constraint.upperBound»); | ||
141 | remainingMultiplicity == eval(«constraint.upperBound»-numberOfExistingReferences); | ||
142 | } | ||
143 | |||
144 | pattern «remainingContentsName(constraint)»(problem:LogicProblem, interpretation:PartialInterpretation, remainingMultiplicity:java Integer) { | ||
145 | find interpretation(problem,interpretation); | ||
146 | remainingMultiplicity == sum find «remainingContentsName(constraint)»_helper(problem, interpretation, _, #_); | ||
147 | } | ||
148 | «ELSE» | ||
149 | pattern «remainingContentsName(constraint)»_helper(problem:LogicProblem, interpretation:PartialInterpretation) { | ||
150 | find interpretation(problem,interpretation); | ||
151 | find mustExist(problem,interpretation,object); | ||
152 | «base.typeIndexer.referInstanceOf(constraint.sourceType,Modality::MUST,"object")» | ||
153 | } | ||
154 | |||
155 | pattern «remainingContentsName(constraint)»(problem:LogicProblem, interpretation:PartialInterpretation, remainingMultiplicity:java Integer) { | ||
156 | find interpretation(problem,interpretation); | ||
157 | find «remainingContentsName(constraint)»_helper(problem, interpretation); | ||
158 | remainingMultiplicity == -1; | ||
159 | } or { | ||
160 | find interpretation(problem,interpretation); | ||
161 | neg find «remainingContentsName(constraint)»_helper(problem, interpretation); | ||
162 | remainingMultiplicity == 0; | ||
163 | } | ||
164 | «ENDIF» | ||
165 | «ENDIF» | ||
166 | «ENDIF» | ||
58 | «ENDFOR» | 167 | «ENDFOR» |
59 | ''' | 168 | ''' |
60 | } | 169 | |
61 | def String unfinishedMultiplicityName(LowerMultiplicityAssertion lowerMultiplicityAssertion) | 170 | def String unfinishedMultiplicityName( |
62 | '''unfinishedLowerMultiplicity_«base.canonizeName(lowerMultiplicityAssertion.relation.name)»''' | 171 | RelationMultiplicityConstraint constraint) '''unfinishedLowerMultiplicity_«base.canonizeName(constraint.relation.name)»''' |
63 | 172 | ||
64 | def public referUnfinishedMultiplicityQuery(LowerMultiplicityAssertion lowerMultiplicityAssertion) | 173 | def String unrepairableMultiplicityName( |
65 | '''find «unfinishedMultiplicityName(lowerMultiplicityAssertion)»(problem, interpretation ,object, missingMultiplicity);''' | 174 | RelationMultiplicityConstraint constraint) '''unrepairableLowerMultiplicity_«base.canonizeName(constraint.relation.name)»''' |
66 | 175 | ||
67 | def getFirstParamTypeOfRelation(LowerMultiplicityAssertion lowerMultiplicityAssertion) { | 176 | private def String repairMatchName( |
68 | val parameters = lowerMultiplicityAssertion.relation.parameters | 177 | RelationMultiplicityConstraint constraint) '''repair_«base.canonizeName(constraint.relation.name)»''' |
69 | if(parameters.size == 2) { | 178 | |
70 | val firstParam = parameters.get(0) | 179 | def String remainingMultiplicityName( |
71 | if(firstParam instanceof ComplexTypeReference) { | 180 | RelationMultiplicityConstraint constraint) '''remainingInverseUpperMultiplicity_«base.canonizeName(constraint.relation.name)»''' |
72 | return firstParam.referred | 181 | |
73 | } | 182 | def String remainingContentsName( |
74 | } | 183 | RelationMultiplicityConstraint constraint) '''remainingContents_«base.canonizeName(constraint.relation.name)»''' |
75 | } | 184 | |
76 | 185 | def getUnfinishedMultiplicityQueries(List<RelationMultiplicityConstraint> constraints) { | |
77 | def getUnfinishedMultiplicityQueries(LogicProblem problem) { | 186 | constraints.toInvertedMap [ constraint | |
78 | val lowerMultiplicities = base.lowerMultiplicities(problem) | 187 | new UnifinishedMultiplicityQueryNames( |
79 | val map = new LinkedHashMap | 188 | if(constraint.constrainsUnfinished) unfinishedMultiplicityName(constraint) else null, |
80 | for(lowerMultiplicity : lowerMultiplicities) { | 189 | if (indexUpperMultiplicities && constraint.constrainsUnrepairable) |
81 | map.put(lowerMultiplicity.relation,unfinishedMultiplicityName(lowerMultiplicity)) | 190 | unrepairableMultiplicityName(constraint) |
82 | } | 191 | else |
83 | return map | 192 | null, |
193 | if (indexUpperMultiplicities && constraint.constrainsRemainingInverse) | ||
194 | remainingMultiplicityName(constraint) | ||
195 | else | ||
196 | null, | ||
197 | if (indexUpperMultiplicities && constraint.constrainsRemainingContents) | ||
198 | remainingContentsName(constraint) | ||
199 | else | ||
200 | null | ||
201 | ) | ||
202 | ] | ||
84 | } | 203 | } |
85 | } | 204 | } |
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/rules/GoalConstraintProvider.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/rules/GoalConstraintProvider.xtend index e1be2742..b6fdbe06 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/rules/GoalConstraintProvider.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/rules/GoalConstraintProvider.xtend | |||
@@ -1,6 +1,6 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.rules | 1 | package hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.rules |
2 | 2 | ||
3 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.MultiplicityGoalConstraintCalculator | 3 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.MultiplicityGoalConstraintCalculator |
4 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.GeneratedPatterns | 4 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.GeneratedPatterns |
5 | import java.util.ArrayList | 5 | import java.util.ArrayList |
6 | 6 | ||
diff --git a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/rules/RefinementRuleProvider.xtend b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/rules/RefinementRuleProvider.xtend index 20d24b77..7891ebd8 100644 --- a/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/rules/RefinementRuleProvider.xtend +++ b/Solvers/VIATRA-Solver/hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra/src/hu/bme/mit/inf/dslreasoner/viatrasolver/logic2viatra/rules/RefinementRuleProvider.xtend | |||
@@ -6,7 +6,7 @@ import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Relation | |||
6 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RelationDeclaration | 6 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RelationDeclaration |
7 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type | 7 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type |
8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.ModelGenerationStatistics | 8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.ModelGenerationStatistics |
9 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.ScopePropagator | 9 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.ScopePropagator |
10 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.GeneratedPatterns | 10 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.GeneratedPatterns |
11 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.ObjectCreationPrecondition | 11 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.ObjectCreationPrecondition |
12 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialComplexTypeInterpretation | 12 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialComplexTypeInterpretation |
@@ -67,7 +67,8 @@ class RefinementRuleProvider { | |||
67 | if(containmentRelation != null) { | 67 | if(containmentRelation != null) { |
68 | if(inverseRelation!= null) { | 68 | if(inverseRelation!= null) { |
69 | ruleBuilder.action[match | | 69 | ruleBuilder.action[match | |
70 | //println(name) | 70 | statistics.incrementTransformationCount |
71 | // println(name) | ||
71 | val startTime = System.nanoTime | 72 | val startTime = System.nanoTime |
72 | //val problem = match.get(0) as LogicProblem | 73 | //val problem = match.get(0) as LogicProblem |
73 | val interpretation = match.get(1) as PartialInterpretation | 74 | val interpretation = match.get(1) as PartialInterpretation |
@@ -98,14 +99,17 @@ class RefinementRuleProvider { | |||
98 | val newLink2 = factory2.createBinaryElementRelationLink => [it.param1 = newElement it.param2 = container] | 99 | val newLink2 = factory2.createBinaryElementRelationLink => [it.param1 = newElement it.param2 = container] |
99 | inverseRelationInterpretation.relationlinks+=newLink2 | 100 | inverseRelationInterpretation.relationlinks+=newLink2 |
100 | 101 | ||
102 | val propagatorStartTime = System.nanoTime | ||
103 | statistics.addExecutionTime(propagatorStartTime-startTime) | ||
104 | |||
101 | // Scope propagation | 105 | // Scope propagation |
102 | scopePropagator.propagateAdditionToType(typeInterpretation) | 106 | scopePropagator.propagateAdditionToType(typeInterpretation) |
103 | 107 | statistics.addScopePropagationTime(System.nanoTime-propagatorStartTime) | |
104 | statistics.addExecutionTime(System.nanoTime-startTime) | ||
105 | ] | 108 | ] |
106 | } else { | 109 | } else { |
107 | ruleBuilder.action[match | | 110 | ruleBuilder.action[match | |
108 | //println(name) | 111 | statistics.incrementTransformationCount |
112 | // println(name) | ||
109 | val startTime = System.nanoTime | 113 | val startTime = System.nanoTime |
110 | //val problem = match.get(0) as LogicProblem | 114 | //val problem = match.get(0) as LogicProblem |
111 | val interpretation = match.get(1) as PartialInterpretation | 115 | val interpretation = match.get(1) as PartialInterpretation |
@@ -132,14 +136,19 @@ class RefinementRuleProvider { | |||
132 | val newLink = factory2.createBinaryElementRelationLink => [it.param1 = container it.param2 = newElement] | 136 | val newLink = factory2.createBinaryElementRelationLink => [it.param1 = container it.param2 = newElement] |
133 | relationInterpretation.relationlinks+=newLink | 137 | relationInterpretation.relationlinks+=newLink |
134 | 138 | ||
139 | val propagatorStartTime = System.nanoTime | ||
140 | statistics.addExecutionTime(propagatorStartTime-startTime) | ||
141 | |||
135 | // Scope propagation | 142 | // Scope propagation |
136 | scopePropagator.propagateAdditionToType(typeInterpretation) | 143 | scopePropagator.propagateAdditionToType(typeInterpretation) |
137 | 144 | statistics.addScopePropagationTime(System.nanoTime-propagatorStartTime) | |
138 | statistics.addExecutionTime(System.nanoTime-startTime) | ||
139 | ] | 145 | ] |
140 | } | 146 | } |
141 | } else { | 147 | } else { |
142 | ruleBuilder.action[match | | 148 | ruleBuilder.action[match | |
149 | statistics.incrementTransformationCount | ||
150 | // println(name) | ||
151 | |||
143 | val startTime = System.nanoTime | 152 | val startTime = System.nanoTime |
144 | //val problem = match.get(0) as LogicProblem | 153 | //val problem = match.get(0) as LogicProblem |
145 | val interpretation = match.get(1) as PartialInterpretation | 154 | val interpretation = match.get(1) as PartialInterpretation |
@@ -162,29 +171,31 @@ class RefinementRuleProvider { | |||
162 | typeInterpretation.elements += newElement | 171 | typeInterpretation.elements += newElement |
163 | typeInterpretation.supertypeInterpretation.forEach[it.elements += newElement] | 172 | typeInterpretation.supertypeInterpretation.forEach[it.elements += newElement] |
164 | 173 | ||
174 | val propagatorStartTime = System.nanoTime | ||
175 | statistics.addExecutionTime(propagatorStartTime-startTime) | ||
176 | |||
165 | // Scope propagation | 177 | // Scope propagation |
166 | scopePropagator.propagateAdditionToType(typeInterpretation) | 178 | scopePropagator.propagateAdditionToType(typeInterpretation) |
167 | 179 | statistics.addScopePropagationTime(System.nanoTime-propagatorStartTime) | |
168 | statistics.addExecutionTime(System.nanoTime-startTime) | ||
169 | ] | 180 | ] |
170 | } | 181 | } |
171 | return ruleBuilder.build | 182 | return ruleBuilder.build |
172 | } | 183 | } |
173 | 184 | ||
174 | def createRelationRefinementRules(GeneratedPatterns patterns, ModelGenerationStatistics statistics) { | 185 | def createRelationRefinementRules(GeneratedPatterns patterns, ScopePropagator scopePropagator, ModelGenerationStatistics statistics) { |
175 | val res = new LinkedHashMap | 186 | val res = new LinkedHashMap |
176 | for(LHSEntry: patterns.refinerelationQueries.entrySet) { | 187 | for(LHSEntry: patterns.refinerelationQueries.entrySet) { |
177 | val declaration = LHSEntry.key.key | 188 | val declaration = LHSEntry.key.key |
178 | val inverseReference = LHSEntry.key.value | 189 | val inverseReference = LHSEntry.key.value |
179 | val lhs = LHSEntry.value as IQuerySpecification<ViatraQueryMatcher<GenericPatternMatch>> | 190 | val lhs = LHSEntry.value as IQuerySpecification<ViatraQueryMatcher<GenericPatternMatch>> |
180 | val rule = createRelationRefinementRule(declaration,inverseReference,lhs,statistics) | 191 | val rule = createRelationRefinementRule(declaration,inverseReference,lhs,scopePropagator,statistics) |
181 | res.put(LHSEntry.key,rule) | 192 | res.put(LHSEntry.key,rule) |
182 | } | 193 | } |
183 | return res | 194 | return res |
184 | } | 195 | } |
185 | 196 | ||
186 | def private BatchTransformationRule<GenericPatternMatch, ViatraQueryMatcher<GenericPatternMatch>> | 197 | def private BatchTransformationRule<GenericPatternMatch, ViatraQueryMatcher<GenericPatternMatch>> |
187 | createRelationRefinementRule(RelationDeclaration declaration, Relation inverseRelation, IQuerySpecification<ViatraQueryMatcher<GenericPatternMatch>> lhs, ModelGenerationStatistics statistics) | 198 | createRelationRefinementRule(RelationDeclaration declaration, Relation inverseRelation, IQuerySpecification<ViatraQueryMatcher<GenericPatternMatch>> lhs, ScopePropagator scopePropagator, ModelGenerationStatistics statistics) |
188 | { | 199 | { |
189 | val name = '''addRelation_«declaration.name.canonizeName»«IF inverseRelation != null»_and_«inverseRelation.name.canonizeName»«ENDIF»''' | 200 | val name = '''addRelation_«declaration.name.canonizeName»«IF inverseRelation != null»_and_«inverseRelation.name.canonizeName»«ENDIF»''' |
190 | val ruleBuilder = factory.createRule | 201 | val ruleBuilder = factory.createRule |
@@ -192,8 +203,9 @@ class RefinementRuleProvider { | |||
192 | .precondition(lhs) | 203 | .precondition(lhs) |
193 | if (inverseRelation == null) { | 204 | if (inverseRelation == null) { |
194 | ruleBuilder.action [ match | | 205 | ruleBuilder.action [ match | |
206 | statistics.incrementTransformationCount | ||
195 | val startTime = System.nanoTime | 207 | val startTime = System.nanoTime |
196 | //println(name) | 208 | // println(name) |
197 | // val problem = match.get(0) as LogicProblem | 209 | // val problem = match.get(0) as LogicProblem |
198 | // val interpretation = match.get(1) as PartialInterpretation | 210 | // val interpretation = match.get(1) as PartialInterpretation |
199 | val relationInterpretation = match.get(2) as PartialRelationInterpretation | 211 | val relationInterpretation = match.get(2) as PartialRelationInterpretation |
@@ -201,12 +213,19 @@ class RefinementRuleProvider { | |||
201 | val trg = match.get(4) as DefinedElement | 213 | val trg = match.get(4) as DefinedElement |
202 | val link = createBinaryElementRelationLink => [it.param1 = src it.param2 = trg] | 214 | val link = createBinaryElementRelationLink => [it.param1 = src it.param2 = trg] |
203 | relationInterpretation.relationlinks += link | 215 | relationInterpretation.relationlinks += link |
204 | statistics.addExecutionTime(System.nanoTime-startTime) | 216 | |
217 | val propagatorStartTime = System.nanoTime | ||
218 | statistics.addExecutionTime(propagatorStartTime-startTime) | ||
219 | |||
220 | // Scope propagation | ||
221 | scopePropagator.propagateAdditionToRelation(declaration) | ||
222 | statistics.addScopePropagationTime(System.nanoTime-propagatorStartTime) | ||
205 | ] | 223 | ] |
206 | } else { | 224 | } else { |
207 | ruleBuilder.action [ match | | 225 | ruleBuilder.action [ match | |
226 | statistics.incrementTransformationCount | ||
208 | val startTime = System.nanoTime | 227 | val startTime = System.nanoTime |
209 | //println(name) | 228 | // println(name) |
210 | // val problem = match.get(0) as LogicProblem | 229 | // val problem = match.get(0) as LogicProblem |
211 | // val interpretation = match.get(1) as PartialInterpretation | 230 | // val interpretation = match.get(1) as PartialInterpretation |
212 | val relationInterpretation = match.get(2) as PartialRelationInterpretation | 231 | val relationInterpretation = match.get(2) as PartialRelationInterpretation |
@@ -217,7 +236,13 @@ class RefinementRuleProvider { | |||
217 | relationInterpretation.relationlinks += link | 236 | relationInterpretation.relationlinks += link |
218 | val inverseLink = createBinaryElementRelationLink => [it.param1 = trg it.param2 = src] | 237 | val inverseLink = createBinaryElementRelationLink => [it.param1 = trg it.param2 = src] |
219 | inverseInterpretation.relationlinks += inverseLink | 238 | inverseInterpretation.relationlinks += inverseLink |
220 | statistics.addExecutionTime(System.nanoTime-startTime) | 239 | |
240 | val propagatorStartTime = System.nanoTime | ||
241 | statistics.addExecutionTime(propagatorStartTime-startTime) | ||
242 | |||
243 | // Scope propagation | ||
244 | scopePropagator.propagateAdditionToRelation(declaration) | ||
245 | statistics.addScopePropagationTime(System.nanoTime-propagatorStartTime) | ||
221 | ] | 246 | ] |
222 | } | 247 | } |
223 | 248 | ||