aboutsummaryrefslogtreecommitdiffstats
path: root/subprojects/viatra-runtime/src/main/java/tools/refinery/viatra/runtime/matchers/psystem/rewriters/PBodyNormalizer.java
diff options
context:
space:
mode:
Diffstat (limited to 'subprojects/viatra-runtime/src/main/java/tools/refinery/viatra/runtime/matchers/psystem/rewriters/PBodyNormalizer.java')
-rw-r--r--subprojects/viatra-runtime/src/main/java/tools/refinery/viatra/runtime/matchers/psystem/rewriters/PBodyNormalizer.java310
1 files changed, 310 insertions, 0 deletions
diff --git a/subprojects/viatra-runtime/src/main/java/tools/refinery/viatra/runtime/matchers/psystem/rewriters/PBodyNormalizer.java b/subprojects/viatra-runtime/src/main/java/tools/refinery/viatra/runtime/matchers/psystem/rewriters/PBodyNormalizer.java
new file mode 100644
index 00000000..90943129
--- /dev/null
+++ b/subprojects/viatra-runtime/src/main/java/tools/refinery/viatra/runtime/matchers/psystem/rewriters/PBodyNormalizer.java
@@ -0,0 +1,310 @@
1/*******************************************************************************
2 * Copyright (c) 2004-2010 Gabor Bergmann and Daniel Varro
3 * This program and the accompanying materials are made available under the
4 * terms of the Eclipse Public License v. 2.0 which is available at
5 * http://www.eclipse.org/legal/epl-v20.html.
6 *
7 * SPDX-License-Identifier: EPL-2.0
8 *******************************************************************************/
9
10package tools.refinery.viatra.runtime.matchers.psystem.rewriters;
11
12import java.util.ArrayList;
13import java.util.Collection;
14import java.util.Collections;
15import java.util.Comparator;
16import java.util.HashMap;
17import java.util.HashSet;
18import java.util.Iterator;
19import java.util.LinkedHashSet;
20import java.util.LinkedList;
21import java.util.List;
22import java.util.Map;
23import java.util.Queue;
24import java.util.Set;
25
26import tools.refinery.viatra.runtime.matchers.context.IInputKey;
27import tools.refinery.viatra.runtime.matchers.context.IQueryMetaContext;
28import tools.refinery.viatra.runtime.matchers.planning.QueryProcessingException;
29import tools.refinery.viatra.runtime.matchers.planning.helpers.TypeHelper;
30import tools.refinery.viatra.runtime.matchers.psystem.ITypeConstraint;
31import tools.refinery.viatra.runtime.matchers.psystem.ITypeInfoProviderConstraint;
32import tools.refinery.viatra.runtime.matchers.psystem.PBody;
33import tools.refinery.viatra.runtime.matchers.psystem.PConstraint;
34import tools.refinery.viatra.runtime.matchers.psystem.TypeJudgement;
35import tools.refinery.viatra.runtime.matchers.psystem.basicdeferred.Equality;
36import tools.refinery.viatra.runtime.matchers.psystem.basicdeferred.Inequality;
37import tools.refinery.viatra.runtime.matchers.psystem.queries.PDisjunction;
38import tools.refinery.viatra.runtime.matchers.psystem.queries.PQuery;
39import tools.refinery.viatra.runtime.matchers.psystem.queries.PQuery.PQueryStatus;
40import tools.refinery.viatra.runtime.matchers.util.CollectionsFactory;
41
42/**
43 * A disjunction rewriter for creating a normalized form of specification, unifying variables and running basic sanity
44 * checks. This rewriter does not copy but modifies directly the original specification, requiring a mutable
45 * disjunction.
46 *
47 * @author Gabor Bergmann
48 *
49 */
50public class PBodyNormalizer extends PDisjunctionRewriter {
51
52 private IQueryMetaContext context;
53
54 public PBodyNormalizer(IQueryMetaContext context) {
55 this.context = context;
56 }
57
58 /**
59 * Returns whether unary constraint elimination is enabled. This behavior can be customized by creating a subclass
60 * with a custom implementation.
61 *
62 * @since 1.6
63 */
64 protected boolean shouldCalculateImpliedTypes(PQuery query) {
65 return true;
66 }
67
68 /**
69 * Returns whether 'weakened alternative' suggestions of the context shall be expanded as additional PConstraints.
70 * This behavior can be customized by creating a subclass
71 * with a custom implementation.
72 *
73 * @since 1.6
74 */
75 protected boolean shouldExpandWeakenedAlternatives(PQuery query) {
76 return false;
77 }
78
79 @Override
80 public PDisjunction rewrite(PDisjunction disjunction) {
81 Set<PBody> normalizedBodies = new LinkedHashSet<>();
82 for (PBody body : disjunction.getBodies()) {
83 PBodyCopier copier = new PBodyCopier(body, getTraceCollector());
84 PBody modifiedBody = copier.getCopiedBody();
85 normalizeBody(modifiedBody);
86 normalizedBodies.add(modifiedBody);
87 modifiedBody.setStatus(PQueryStatus.OK);
88 }
89 return new PDisjunction(normalizedBodies);
90 }
91
92 public void setContext(IQueryMetaContext context) {
93 this.context = context;
94 }
95
96 /**
97 * Provides a normalized version of the pattern body. May return a different version than the original version if
98 * needed.
99 *
100 * @param body
101 */
102 public PBody normalizeBody(PBody body) {
103 try {
104 return normalizeBodyInternal(body);
105 } catch (QueryProcessingException e) {
106 throw new RewriterException("Error during rewriting: {1}", new String[] { e.getMessage() },
107 e.getShortMessage(), body.getPattern(), e);
108 }
109 }
110
111 PBody normalizeBodyInternal(PBody body) {
112 // UNIFICATION AND WEAK INEQUALITY ELIMINATION
113 unifyVariablesAlongEqualities(body);
114 eliminateWeakInequalities(body);
115 removeMootEqualities(body);
116
117 // ADDING WEAKENED ALTERNATIVES
118 if (shouldExpandWeakenedAlternatives(body.getPattern())) {
119 expandWeakenedAlternativeConstraints(body);
120 }
121
122 // CONSTRAINT ELIMINATION WITH TYPE INFERENCE
123 if (shouldCalculateImpliedTypes(body.getPattern())) {
124 eliminateInferrableTypes(body, context);
125 } else {
126 // ELIMINATE DUPLICATE TYPE CONSTRAINTS
127 eliminateDuplicateTypeConstraints(body);
128 }
129
130
131 // PREVENTIVE CHECKS
132 checkSanity(body);
133 return body;
134 }
135
136 private void removeMootEqualities(PBody body) {
137 Set<Equality> equals = body.getConstraintsOfType(Equality.class);
138 for (Equality equality : equals) {
139 if (equality.isMoot()) {
140 equality.delete();
141 derivativeRemoved(equality, ConstraintRemovalReason.MOOT_EQUALITY);
142 }
143 }
144 }
145
146 /**
147 * Unifies allVariables along equalities so that they can be handled as one.
148 *
149 * @param body
150 */
151 void unifyVariablesAlongEqualities(PBody body) {
152 Set<Equality> equals = body.getConstraintsOfType(Equality.class);
153 for (Equality equality : equals) {
154 if (!equality.isMoot()) {
155 equality.getWho().unifyInto(equality.getWithWhom());
156 }
157 }
158 }
159
160 /**
161 * Eliminates weak inequalities if they are not substantiated.
162 *
163 * @param body
164 */
165 void eliminateWeakInequalities(PBody body) {
166 for (Inequality inequality : body.getConstraintsOfType(Inequality.class)){
167 if (inequality.isEliminable()){
168 inequality.eliminateWeak();
169 derivativeRemoved(inequality, ConstraintRemovalReason.WEAK_INEQUALITY_SELF_LOOP);
170 }
171 }
172 }
173
174 /**
175 * Eliminates all type constraints that are inferrable from other constraints.
176 */
177 void eliminateInferrableTypes(final PBody body, IQueryMetaContext context) {
178 Set<TypeJudgement> subsumedByRetainedConstraints = new HashSet<TypeJudgement>();
179 LinkedList<ITypeConstraint> allTypeConstraints = new LinkedList<ITypeConstraint>();
180 for (PConstraint pConstraint : body.getConstraints()) {
181 if (pConstraint instanceof ITypeConstraint) {
182 allTypeConstraints.add((ITypeConstraint) pConstraint);
183 } else if (pConstraint instanceof ITypeInfoProviderConstraint) {
184 // non-type constraints are all retained
185 final Set<TypeJudgement> directJudgements = ((ITypeInfoProviderConstraint) pConstraint)
186 .getImpliedJudgements(context);
187 subsumedByRetainedConstraints = TypeHelper.typeClosure(subsumedByRetainedConstraints, directJudgements,
188 context);
189 }
190 }
191 Comparator<ITypeConstraint> eliminationOrder = (o1, o2) -> {
192 IInputKey type1 = o1.getEquivalentJudgement().getInputKey();
193 IInputKey type2 = o2.getEquivalentJudgement().getInputKey();
194
195 int result = context.getSuggestedEliminationOrdering().compare(type1, type2);
196 return (result == 0)
197 ? PConstraint.COMPARE_BY_MONOTONOUS_ID.compare(o1, o2)
198 : result;
199 };
200
201 Collections.sort(allTypeConstraints, eliminationOrder);
202 Queue<ITypeConstraint> potentialConstraints = allTypeConstraints; // rename for better comprehension
203
204 while (!potentialConstraints.isEmpty()) {
205 ITypeConstraint candidate = potentialConstraints.poll();
206
207 boolean isSubsumed = subsumedByRetainedConstraints.contains(candidate.getEquivalentJudgement());
208 if (!isSubsumed) {
209 Set<TypeJudgement> typeClosure = subsumedByRetainedConstraints;
210 for (ITypeConstraint subsuming : potentialConstraints) { // the remaining ones
211 final Set<TypeJudgement> directJudgements = subsuming.getImpliedJudgements(context);
212 typeClosure = TypeHelper.typeClosure(typeClosure, directJudgements, context);
213
214 if (typeClosure.contains(candidate.getEquivalentJudgement())) {
215 isSubsumed = true;
216 break;
217 }
218 }
219 }
220 if (isSubsumed) { // eliminated
221 candidate.delete();
222 derivativeRemoved(candidate, ConstraintRemovalReason.TYPE_SUBSUMED);
223 } else { // retained
224 subsumedByRetainedConstraints = TypeHelper.typeClosure(subsumedByRetainedConstraints,
225 candidate.getImpliedJudgements(context), context);
226 }
227 }
228 }
229
230 /**
231 * Inserts "weakened alternative" constraints suggested by the meta context that aid in coming up with a query plan.
232 */
233 void expandWeakenedAlternativeConstraints(PBody body) {
234 Set<TypeJudgement> allJudgements = new HashSet<TypeJudgement>();
235 Set<TypeJudgement> newJudgementsToAdd = new HashSet<TypeJudgement>();
236 Queue<TypeJudgement> judgementsToProcess = new LinkedList<TypeJudgement>();
237 Map<TypeJudgement, List<PConstraint>> traceability = CollectionsFactory.createMap();
238
239 for (ITypeConstraint typeConstraint : body.getConstraintsOfType(ITypeConstraint.class)) {
240 TypeJudgement equivalentJudgement = typeConstraint.getEquivalentJudgement();
241 judgementsToProcess.add(equivalentJudgement);
242 allJudgements.add(equivalentJudgement);
243 traceability.computeIfAbsent(equivalentJudgement, k-> new ArrayList<>()).add(typeConstraint);
244 }
245
246 while (!judgementsToProcess.isEmpty()) {
247 TypeJudgement judgement = judgementsToProcess.poll();
248 for (TypeJudgement alternativeJudgement : judgement.getWeakenedAlternativeJudgements(context)) {
249 if (allJudgements.add(alternativeJudgement)) {
250 newJudgementsToAdd.add(alternativeJudgement);
251 judgementsToProcess.add(alternativeJudgement);
252 traceability.merge(
253 alternativeJudgement,
254 traceability.getOrDefault(judgement, new ArrayList<>()),
255 (old,further) -> {old.addAll(further); return old;}
256 );
257 }
258 }
259 }
260
261 for (TypeJudgement typeJudgement : newJudgementsToAdd) {
262 PConstraint newConstraint = typeJudgement.createConstraintFor(body);
263 for (PConstraint source : traceability.getOrDefault(typeJudgement, Collections.emptyList())) {
264 addTrace(source, newConstraint);
265 }
266 }
267 }
268
269 private Object getConstraintKey(PConstraint constraint) {
270 if (constraint instanceof ITypeConstraint) {
271 return ((ITypeConstraint) constraint).getEquivalentJudgement();
272 }
273 // Do not check duplication for any other types
274 return constraint;
275 }
276
277 void eliminateDuplicateTypeConstraints(PBody body) {
278 Map<Object, PConstraint> constraints = new HashMap<>();
279 for (PConstraint constraint : body.getConstraints()) {
280 Object key = getConstraintKey(constraint);
281 // Retain first found instance of a constraint
282 if (!constraints.containsKey(key)) {
283 constraints.put(key, constraint);
284 }
285 }
286
287 // Retain collected constraints, remove everything else
288 Iterator<PConstraint> iterator = body.getConstraints().iterator();
289 Collection<PConstraint> toRetain = constraints.values();
290 while(iterator.hasNext()){
291 PConstraint next = iterator.next();
292 if (!toRetain.contains(next)){
293 derivativeRemoved(next, ConstraintRemovalReason.DUPLICATE);
294 iterator.remove();
295 }
296 }
297 }
298
299 /**
300 * Verifies the sanity of all constraints. Should be issued as a preventive check before layouting.
301 *
302 * @param body
303 * @throws RetePatternBuildException
304 */
305 void checkSanity(PBody body) {
306 for (PConstraint pConstraint : body.getConstraints())
307 pConstraint.checkSanity();
308 }
309
310}