diff options
author | Kristóf Marussy <kris7topher@gmail.com> | 2019-08-14 18:26:33 +0200 |
---|---|---|
committer | Kristóf Marussy <kris7topher@gmail.com> | 2019-08-14 18:26:33 +0200 |
commit | fc84d3fe670331bc89fb1e4c44104bc1fc811438 (patch) | |
tree | 466da8333151c51d2e17075600f9452ed35835da /Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit | |
parent | Be more lenient with rounding IP solver results (diff) | |
download | VIATRA-Generator-fc84d3fe670331bc89fb1e4c44104bc1fc811438.tar.gz VIATRA-Generator-fc84d3fe670331bc89fb1e4c44104bc1fc811438.tar.zst VIATRA-Generator-fc84d3fe670331bc89fb1e4c44104bc1fc811438.zip |
Measurements WIP
Diffstat (limited to 'Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit')
8 files changed, 861 insertions, 261 deletions
diff --git a/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/CountMatches.xtend b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/CountMatches.xtend deleted file mode 100644 index 02caf9dd..00000000 --- a/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/CountMatches.xtend +++ /dev/null | |||
@@ -1,176 +0,0 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.run | ||
2 | |||
3 | import hu.bme.mit.inf.dslreasoner.domains.yakindu.sgraph.yakindumm.YakindummPackage | ||
4 | import hu.bme.mit.inf.dslreasoner.partialsnapshot_mavo.yakindu.mutated.Mutated | ||
5 | import hu.bme.mit.inf.dslreasoner.workspace.FileSystemWorkspace | ||
6 | import java.io.File | ||
7 | import java.util.ArrayList | ||
8 | import java.util.Collection | ||
9 | import java.util.Comparator | ||
10 | import java.util.HashMap | ||
11 | import java.util.List | ||
12 | import java.util.Map | ||
13 | import java.util.TreeSet | ||
14 | import org.eclipse.emf.ecore.EObject | ||
15 | import org.eclipse.emf.ecore.resource.Resource | ||
16 | import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl | ||
17 | import org.eclipse.viatra.query.runtime.api.IPatternMatch | ||
18 | import org.eclipse.viatra.query.runtime.api.IQuerySpecification | ||
19 | import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine | ||
20 | import org.eclipse.viatra.query.runtime.emf.EMFScope | ||
21 | |||
22 | class QueryComparator implements Comparator<IQuerySpecification<?>>{ | ||
23 | |||
24 | override compare(IQuerySpecification<?> arg0, IQuerySpecification<?> arg1) { | ||
25 | arg0.fullyQualifiedName.compareTo(arg1.fullyQualifiedName) | ||
26 | } | ||
27 | } | ||
28 | |||
29 | class CountMatches { | ||
30 | var static List<IQuerySpecification<?>> wfPatterns; | ||
31 | var static Map<IQuerySpecification<?>,IQuerySpecification<?>> query2Reference | ||
32 | |||
33 | def static void main(String[] args) { | ||
34 | YakindummPackage.eINSTANCE.eClass | ||
35 | Resource.Factory.Registry.INSTANCE.extensionToFactoryMap.put("*",new XMIResourceFactoryImpl) | ||
36 | |||
37 | wfPatterns = Mutated.instance.specifications.toList; | ||
38 | //wfPatterns = wfPatterns.filter[it.allAnnotations.exists[it.name == "Constraint"]].toList | ||
39 | wfPatterns.sort(new QueryComparator) | ||
40 | |||
41 | val groupName2Representant = new HashMap | ||
42 | query2Reference = new HashMap | ||
43 | for(wfPattern : wfPatterns) { | ||
44 | val groupName = wfPattern.groupName | ||
45 | if(groupName2Representant.containsKey(groupName)) { | ||
46 | val representant = groupName2Representant.get(groupName) | ||
47 | query2Reference.put(wfPattern,representant) | ||
48 | } else { | ||
49 | groupName2Representant.put(groupName,wfPattern) | ||
50 | } | ||
51 | } | ||
52 | |||
53 | |||
54 | println('''modelpath;run;model;« | ||
55 | FOR wfPattern:wfPatterns SEPARATOR ";"»#(« | ||
56 | wfPattern.fullyQualifiedName.split("\\.").last»);hash(« | ||
57 | wfPattern.fullyQualifiedName.split("\\.").last»)«ENDFOR»;« | ||
58 | FOR mutant : wfPatterns.filter[query2Reference.keySet.contains(it)] SEPARATOR ';'»diff(« | ||
59 | mutant.fullyQualifiedName.split("\\.").last»)«ENDFOR»''' | ||
60 | ) | ||
61 | countMatches('''D:/FASE18Meas/RemoHF''') | ||
62 | } | ||
63 | |||
64 | def private static simpleName(IQuerySpecification<?> wfPattern) { | ||
65 | wfPattern.fullyQualifiedName.split("\\.").last | ||
66 | } | ||
67 | def private static groupName(IQuerySpecification<?> wfPattern) { | ||
68 | wfPattern.simpleName.split('_').head | ||
69 | } | ||
70 | |||
71 | def static void countMatches(String path) { | ||
72 | val file = new File(path) | ||
73 | if(file.isDirectory) { | ||
74 | for(subFileName : file.list) { | ||
75 | (path + "/" + subFileName).countMatches | ||
76 | } | ||
77 | } else if(file.isFile) { | ||
78 | if(path.endsWith("xmi")) { | ||
79 | countMatches(file,path) | ||
80 | } | ||
81 | } | ||
82 | } | ||
83 | |||
84 | def static void countMatches(File file, String path) { | ||
85 | |||
86 | |||
87 | val pathSegments = path.split("/") | ||
88 | val groupName = pathSegments.get(pathSegments.size-2).split("\\.").last.split("_").get(0) | ||
89 | print(groupName +";") | ||
90 | val nameExtension = pathSegments.get(pathSegments.size-1).split("\\.").get(0).split("_") | ||
91 | try{ | ||
92 | val runNumber = nameExtension.get(1) | ||
93 | val modelNumber = nameExtension.get(2) | ||
94 | print('''«runNumber»;«modelNumber»''') | ||
95 | } catch(Exception e) { | ||
96 | print('''«file.name»;0''') | ||
97 | } | ||
98 | |||
99 | val parent = file.parent | ||
100 | val workspace = new FileSystemWorkspace(parent,"") | ||
101 | val model = workspace.readModel(EObject,file.name) | ||
102 | |||
103 | val engine = ViatraQueryEngine.on(new EMFScope(model)) | ||
104 | val objectCode = model.eResource.calculateObjectCode | ||
105 | |||
106 | val pattern2Hash = new HashMap | ||
107 | for(pattern : wfPatterns) { | ||
108 | val matcher = pattern.getMatcher(engine) | ||
109 | val matches = matcher.allMatches | ||
110 | val hash = matches.getMatchSetDescriptor(objectCode) | ||
111 | pattern2Hash.put(pattern,hash) | ||
112 | print(''';«matcher.countMatches»;«hash»''') | ||
113 | } | ||
114 | var mutantsKilled = 0 | ||
115 | for(mutant : wfPatterns.filter[query2Reference.keySet.contains(it)]) { | ||
116 | val equals = pattern2Hash.get(mutant) == pattern2Hash.get(query2Reference.get(mutant)) | ||
117 | print(''';''') | ||
118 | if(equals) { | ||
119 | print('0') | ||
120 | } else { | ||
121 | print('1') | ||
122 | mutantsKilled++ | ||
123 | } | ||
124 | } | ||
125 | //print(''';«mutantsKilled»''') | ||
126 | println() | ||
127 | } | ||
128 | |||
129 | def static Map<EObject,Integer> calculateObjectCode(Resource resource) { | ||
130 | val res = new HashMap | ||
131 | val iterator = resource.allContents | ||
132 | var index = 1 | ||
133 | while(iterator.hasNext) { | ||
134 | res.put(iterator.next,index++) | ||
135 | } | ||
136 | return res | ||
137 | } | ||
138 | |||
139 | def static getMatchSetDescriptor(Collection<? extends IPatternMatch> matchSet, Map<EObject,Integer> objectCode) { | ||
140 | val set = new TreeSet(new ArrayComparator) | ||
141 | for(match: matchSet) { | ||
142 | val size = match.parameterNames.size | ||
143 | val idArray = new ArrayList<Integer>(size) | ||
144 | for(i:0..<size) { | ||
145 | val objectInMatch = match.get(i) | ||
146 | if(objectInMatch instanceof EObject) { | ||
147 | val id = objectCode.get(objectInMatch) | ||
148 | if(id!== null) { | ||
149 | idArray+= id | ||
150 | } else { | ||
151 | throw new IllegalArgumentException('''Unindexed object in match: «objectInMatch»''') | ||
152 | } | ||
153 | } else { | ||
154 | throw new IllegalArgumentException('''Unknown type object in match: "«objectInMatch.class.simpleName»"''') | ||
155 | } | ||
156 | } | ||
157 | set += idArray | ||
158 | } | ||
159 | return '''«FOR match : set SEPARATOR ','»[«FOR index : match SEPARATOR ','»«index»«ENDFOR»]«ENDFOR»'''.toString.hashCode | ||
160 | } | ||
161 | } | ||
162 | |||
163 | class ArrayComparator implements Comparator<List<Integer>> { | ||
164 | |||
165 | override compare(List<Integer> arg0, List<Integer> arg1) { | ||
166 | if(arg0.size === arg1.size) { | ||
167 | for(i : 0..<arg0.size) { | ||
168 | val comparison = arg0.get(i).compareTo(arg1.get(i)) | ||
169 | if(comparison !== 0) return comparison | ||
170 | } | ||
171 | return 0 | ||
172 | } else { | ||
173 | throw new IllegalArgumentException('''the arrays need to be in the same size''') | ||
174 | } | ||
175 | } | ||
176 | } \ No newline at end of file | ||
diff --git a/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/Ecore2LogicTraceBasedHint.xtend b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/Ecore2LogicTraceBasedHint.xtend new file mode 100644 index 00000000..dc2de30c --- /dev/null +++ b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/Ecore2LogicTraceBasedHint.xtend | |||
@@ -0,0 +1,56 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.run | ||
2 | |||
3 | import com.google.common.collect.ImmutableMap | ||
4 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2Logic | ||
5 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2Logic_Trace | ||
6 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RelationDeclaration | ||
7 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type | ||
8 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.LinearTypeConstraintHint | ||
9 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partialinterpretation.PartialInterpretation | ||
10 | import java.util.Map | ||
11 | import org.eclipse.viatra.query.runtime.api.IPatternMatch | ||
12 | import org.eclipse.viatra.query.runtime.api.ViatraQueryMatcher | ||
13 | |||
14 | abstract class Ecore2LogicTraceBasedHint implements LinearTypeConstraintHint { | ||
15 | val Map<String, Type> nameToType | ||
16 | val Map<String, Map<String, RelationDeclaration>> nameToRelation | ||
17 | |||
18 | protected new(Ecore2Logic ecore2Logic, Ecore2Logic_Trace trace) { | ||
19 | nameToType = ImmutableMap.copyOf(ecore2Logic.allClassesInScope(trace).toMap[name].mapValues [ eClass | | ||
20 | ecore2Logic.TypeofEClass(trace, eClass) | ||
21 | ]) | ||
22 | nameToRelation = ImmutableMap.copyOf(ecore2Logic.allReferencesInScope(trace).groupBy[EContainingClass.name]. | ||
23 | mapValues [ references | | ||
24 | ImmutableMap.copyOf(references.toMap[name].mapValues [ reference | | ||
25 | ecore2Logic.relationOfReference(trace, reference) | ||
26 | ]) | ||
27 | ]) | ||
28 | } | ||
29 | |||
30 | protected def getType(String name) { | ||
31 | nameToType.get(name) | ||
32 | } | ||
33 | |||
34 | protected def relation(String typeName, String relationName) { | ||
35 | nameToRelation.get(typeName).get(relationName) | ||
36 | } | ||
37 | |||
38 | protected static def <T extends IPatternMatch> int countMatches(ViatraQueryMatcher<T> matcher, PartialInterpretation p) { | ||
39 | val match = matcher.newEmptyMatch | ||
40 | match.set(0, p.problem) | ||
41 | match.set(1, p) | ||
42 | matcher.countMatches(match) | ||
43 | } | ||
44 | |||
45 | protected static def <T extends IPatternMatch> int getCount(ViatraQueryMatcher<T> matcher, PartialInterpretation p) { | ||
46 | val match = matcher.newEmptyMatch | ||
47 | match.set(0, p.problem) | ||
48 | match.set(1, p) | ||
49 | val realMatch = matcher.getOneArbitraryMatch(match) | ||
50 | if (realMatch.present) { | ||
51 | realMatch.get.get(2) as Integer | ||
52 | } else { | ||
53 | 0 | ||
54 | } | ||
55 | } | ||
56 | } | ||
diff --git a/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/MetamodelLoader.xtend b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/MetamodelLoader.xtend index 34f3c267..54724226 100644 --- a/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/MetamodelLoader.xtend +++ b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/MetamodelLoader.xtend | |||
@@ -4,18 +4,23 @@ import functionalarchitecture.FunctionalarchitecturePackage | |||
4 | import hu.bme.mit.inf.dslreasoner.domains.alloyexamples.Ecore | 4 | import hu.bme.mit.inf.dslreasoner.domains.alloyexamples.Ecore |
5 | import hu.bme.mit.inf.dslreasoner.domains.alloyexamples.FileSystem | 5 | import hu.bme.mit.inf.dslreasoner.domains.alloyexamples.FileSystem |
6 | import hu.bme.mit.inf.dslreasoner.domains.alloyexamples.Filesystem.FilesystemPackage | 6 | import hu.bme.mit.inf.dslreasoner.domains.alloyexamples.Filesystem.FilesystemPackage |
7 | import hu.bme.mit.inf.dslreasoner.domains.satellite.queries.internal.SatelliteQueriesAll | ||
7 | import hu.bme.mit.inf.dslreasoner.domains.transima.fam.FamPatterns | 8 | import hu.bme.mit.inf.dslreasoner.domains.transima.fam.FamPatterns |
8 | import hu.bme.mit.inf.dslreasoner.domains.yakindu.sgraph.yakindumm.YakindummPackage | 9 | import hu.bme.mit.inf.dslreasoner.domains.yakindu.sgraph.yakindumm.YakindummPackage |
10 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2Logic | ||
11 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2Logic_Trace | ||
9 | import hu.bme.mit.inf.dslreasoner.ecore2logic.EcoreMetamodelDescriptor | 12 | import hu.bme.mit.inf.dslreasoner.ecore2logic.EcoreMetamodelDescriptor |
10 | import hu.bme.mit.inf.dslreasoner.partialsnapshot_mavo.yakindu.Patterns | 13 | import hu.bme.mit.inf.dslreasoner.partialsnapshot_mavo.yakindu.Patterns |
11 | import hu.bme.mit.inf.dslreasoner.viatra2logic.ViatraQuerySetDescriptor | 14 | import hu.bme.mit.inf.dslreasoner.viatra2logic.ViatraQuerySetDescriptor |
12 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.ModelGenerationMethod | 15 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.ModelGenerationMethod |
16 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.LinearTypeConstraintHint | ||
13 | import hu.bme.mit.inf.dslreasoner.viatrasolver.reasoner.ModelGenerationMethodBasedGlobalConstraint | 17 | import hu.bme.mit.inf.dslreasoner.viatrasolver.reasoner.ModelGenerationMethodBasedGlobalConstraint |
14 | import hu.bme.mit.inf.dslreasoner.workspace.ReasonerWorkspace | 18 | import hu.bme.mit.inf.dslreasoner.workspace.ReasonerWorkspace |
15 | import java.util.Collection | 19 | import java.util.Collection |
16 | import java.util.HashMap | 20 | import java.util.HashMap |
17 | import java.util.LinkedHashMap | 21 | import java.util.LinkedHashMap |
18 | import java.util.List | 22 | import java.util.List |
23 | import java.util.Map | ||
19 | import java.util.Set | 24 | import java.util.Set |
20 | import org.eclipse.emf.ecore.EAttribute | 25 | import org.eclipse.emf.ecore.EAttribute |
21 | import org.eclipse.emf.ecore.EClass | 26 | import org.eclipse.emf.ecore.EClass |
@@ -24,60 +29,83 @@ import org.eclipse.emf.ecore.EEnumLiteral | |||
24 | import org.eclipse.emf.ecore.EObject | 29 | import org.eclipse.emf.ecore.EObject |
25 | import org.eclipse.emf.ecore.EReference | 30 | import org.eclipse.emf.ecore.EReference |
26 | import org.eclipse.emf.ecore.EcorePackage | 31 | import org.eclipse.emf.ecore.EcorePackage |
32 | import org.eclipse.xtend.lib.annotations.Data | ||
27 | import org.eclipse.xtext.xbase.lib.Functions.Function1 | 33 | import org.eclipse.xtext.xbase.lib.Functions.Function1 |
28 | import hu.bme.mit.inf.dslreasoner.domains.transima.fam.Type | 34 | import satellite.SatellitePackage |
29 | import hu.bme.mit.inf.dslreasoner.domains.transima.fam.Model | 35 | |
36 | @Data | ||
37 | class TypeQuantiles { | ||
38 | double low | ||
39 | double high | ||
40 | } | ||
30 | 41 | ||
31 | abstract class MetamodelLoader { | 42 | abstract class MetamodelLoader { |
32 | protected val ReasonerWorkspace workspace | 43 | protected val ReasonerWorkspace workspace |
44 | |||
33 | new(ReasonerWorkspace workspace) { | 45 | new(ReasonerWorkspace workspace) { |
34 | this.workspace = workspace | 46 | this.workspace = workspace |
35 | } | 47 | } |
48 | |||
36 | def EcoreMetamodelDescriptor loadMetamodel() | 49 | def EcoreMetamodelDescriptor loadMetamodel() |
50 | |||
37 | def Set<EClass> getRelevantTypes(EcoreMetamodelDescriptor descriptor) | 51 | def Set<EClass> getRelevantTypes(EcoreMetamodelDescriptor descriptor) |
52 | |||
38 | def Set<EReference> getRelevantReferences(EcoreMetamodelDescriptor descriptor) | 53 | def Set<EReference> getRelevantReferences(EcoreMetamodelDescriptor descriptor) |
54 | |||
39 | def ViatraQuerySetDescriptor loadQueries(EcoreMetamodelDescriptor metamodel) | 55 | def ViatraQuerySetDescriptor loadQueries(EcoreMetamodelDescriptor metamodel) |
56 | |||
40 | def List<EObject> loadPartialModel() | 57 | def List<EObject> loadPartialModel() |
41 | 58 | ||
42 | def List<Function1<ModelGenerationMethod,ModelGenerationMethodBasedGlobalConstraint>> additionalConstraints() | 59 | def List<Function1<ModelGenerationMethod, ModelGenerationMethodBasedGlobalConstraint>> additionalConstraints() |
43 | 60 | ||
44 | def <T> filterByNames(Iterable<T> collection, Function1<T,String> nameExtractor, Collection<String> requiredNames) { | 61 | def Map<String, TypeQuantiles> getTypeQuantiles() { |
62 | emptyMap | ||
63 | } | ||
64 | |||
65 | def List<LinearTypeConstraintHint> getHints(Ecore2Logic ecore2Logic, Ecore2Logic_Trace trace) { | ||
66 | emptyList | ||
67 | } | ||
68 | |||
69 | def <T> filterByNames(Iterable<T> collection, Function1<T, String> nameExtractor, | ||
70 | Collection<String> requiredNames) { | ||
45 | val res = collection.filter[requiredNames.contains(nameExtractor.apply(it))] | 71 | val res = collection.filter[requiredNames.contains(nameExtractor.apply(it))] |
46 | if(res.size != requiredNames.size) throw new IllegalArgumentException | 72 | if(res.size != requiredNames.size) throw new IllegalArgumentException |
47 | return res.toSet | 73 | return res.toSet |
48 | } | 74 | } |
49 | } | 75 | } |
50 | 76 | ||
51 | class FAMLoader extends MetamodelLoader{ | 77 | class FAMLoader extends MetamodelLoader { |
52 | 78 | ||
53 | new(ReasonerWorkspace workspace) { | 79 | new(ReasonerWorkspace workspace) { |
54 | super(workspace) | 80 | super(workspace) |
55 | } | 81 | } |
56 | 82 | ||
57 | override loadMetamodel() { | 83 | override loadMetamodel() { |
58 | val package = FunctionalarchitecturePackage.eINSTANCE | 84 | val package = FunctionalarchitecturePackage.eINSTANCE |
59 | val List<EClass> classes = package.EClassifiers.filter(EClass).toList | 85 | val List<EClass> classes = package.EClassifiers.filter(EClass).toList |
60 | val List<EEnum> enums = package.EClassifiers.filter(EEnum).toList | 86 | val List<EEnum> enums = package.EClassifiers.filter(EEnum).toList |
61 | val List<EEnumLiteral> literals = enums.map[ELiterals].flatten.toList | 87 | val List<EEnumLiteral> literals = enums.map[ELiterals].flatten.toList |
62 | val List<EReference> references = classes.map[EReferences].flatten.toList | 88 | val List<EReference> references = classes.map[EReferences].flatten.filter[name != "type" && name != "model"]. |
89 | toList | ||
63 | val List<EAttribute> attributes = classes.map[EAttributes].flatten.toList | 90 | val List<EAttribute> attributes = classes.map[EAttributes].flatten.toList |
64 | return new EcoreMetamodelDescriptor(classes,#{},false,enums,literals,references,attributes) | 91 | return new EcoreMetamodelDescriptor(classes, #{}, false, enums, literals, references, attributes) |
65 | } | 92 | } |
66 | 93 | ||
67 | override getRelevantTypes(EcoreMetamodelDescriptor descriptor) { | 94 | override getRelevantTypes(EcoreMetamodelDescriptor descriptor) { |
68 | return descriptor.classes.filterByNames([it.name],#["FunctionalElement"]) | 95 | return descriptor.classes.filterByNames([it.name], #["FunctionalElement"]) |
69 | } | 96 | } |
97 | |||
70 | override getRelevantReferences(EcoreMetamodelDescriptor descriptor) { | 98 | override getRelevantReferences(EcoreMetamodelDescriptor descriptor) { |
71 | return descriptor.references.filterByNames([it.name],#["subElements"]) | 99 | return descriptor.references.filterByNames([it.name], #["subElements"]) |
72 | } | 100 | } |
73 | 101 | ||
74 | override loadQueries(EcoreMetamodelDescriptor metamodel) { | 102 | override loadQueries(EcoreMetamodelDescriptor metamodel) { |
75 | val i = FamPatterns.instance | 103 | val i = FamPatterns.instance |
76 | val patterns = i.specifications.toList | 104 | val patterns = i.specifications.toList |
77 | val wfPatterns = patterns.filter[it.allAnnotations.exists[it.name== "Constraint"]].toSet | 105 | val wfPatterns = patterns.filter[it.allAnnotations.exists[it.name == "Constraint"]].toSet |
78 | val derivedFeatures = new LinkedHashMap | 106 | val derivedFeatures = new LinkedHashMap |
79 | derivedFeatures.put(Type.instance,metamodel.attributes.filter[it.name == "type"].head) | 107 | // derivedFeatures.put(Type.instance,metamodel.attributes.filter[it.name == "type"].head) |
80 | derivedFeatures.put(Model.instance,metamodel.references.filter[it.name == "model"].head) | 108 | // derivedFeatures.put(Model.instance,metamodel.references.filter[it.name == "model"].head) |
81 | val res = new ViatraQuerySetDescriptor( | 109 | val res = new ViatraQuerySetDescriptor( |
82 | patterns, | 110 | patterns, |
83 | wfPatterns, | 111 | wfPatterns, |
@@ -85,65 +113,67 @@ class FAMLoader extends MetamodelLoader{ | |||
85 | ) | 113 | ) |
86 | return res | 114 | return res |
87 | } | 115 | } |
116 | |||
88 | override loadPartialModel() { | 117 | override loadPartialModel() { |
89 | this.workspace.readModel(EObject,"FAM.xmi").eResource.allContents.toList | 118 | this.workspace.readModel(EObject, "FAM.xmi").eResource.allContents.toList |
90 | } | 119 | } |
91 | 120 | ||
92 | override additionalConstraints() { #[] } | 121 | override additionalConstraints() { #[] } |
93 | } | 122 | } |
94 | 123 | ||
95 | class YakinduLoader extends MetamodelLoader{ | 124 | class YakinduLoader extends MetamodelLoader { |
96 | 125 | ||
97 | var useSynchronization = true; | 126 | var useSynchronization = true; |
98 | var useComplexStates = false; | 127 | var useComplexStates = false; |
99 | public static val patternsWithSynchronization = #[ | 128 | public static val patternsWithSynchronization = #["synchHasNoOutgoing", "synchHasNoIncoming", |
100 | "synchHasNoOutgoing", "synchHasNoIncoming", "SynchronizedIncomingInSameRegion", "notSynchronizingStates", | 129 | "SynchronizedIncomingInSameRegion", "SynchronizedIncomingInSameRegionHelper1", |
101 | "hasMultipleOutgoingTrainsition", "hasMultipleIncomingTrainsition", "SynchronizedRegionsAreNotSiblings", | 130 | "SynchronizedIncomingInSameRegionHelper2", "notSynchronizingStates", "hasMultipleOutgoingTrainsition", |
102 | "SynchronizedRegionDoesNotHaveMultipleRegions", "synchThree", "twoSynch","noSynch2","synch","noSynch4","noSynch3","noSynch"] | 131 | "hasMultipleIncomingTrainsition", "SynchronizedRegionsAreNotSiblings", |
103 | public static val patternsWithComplexStates =#["outgoingFromExit","outgoingFromFinal","choiceHasNoOutgoing","choiceHasNoIncoming"] | 132 | "SynchronizedRegionsAreNotSiblingsHelper1", "SynchronizedRegionsAreNotSiblingsHelper2", |
133 | "SynchronizedRegionDoesNotHaveMultipleRegions", "synchThree", "twoSynch", "noSynch2", "synch", "noSynch4", | ||
134 | "noSynch3", "noSynch"] | ||
135 | public static val patternsWithComplexStates = #["outgoingFromExit", "outgoingFromFinal", "choiceHasNoOutgoing", | ||
136 | "choiceHasNoIncoming"] | ||
137 | |||
104 | new(ReasonerWorkspace workspace) { | 138 | new(ReasonerWorkspace workspace) { |
105 | super(workspace) | 139 | super(workspace) |
106 | YakindummPackage.eINSTANCE.eClass | 140 | YakindummPackage.eINSTANCE.eClass |
107 | } | 141 | } |
108 | 142 | ||
109 | def setUseSynchronization(boolean useSynchronization) { | 143 | def setUseSynchronization(boolean useSynchronization) { |
110 | this.useSynchronization = useSynchronization | 144 | this.useSynchronization = useSynchronization |
111 | } | 145 | } |
146 | |||
112 | def setUseComplexStates(boolean useComplexStates) { | 147 | def setUseComplexStates(boolean useComplexStates) { |
113 | this.useComplexStates = useComplexStates | 148 | this.useComplexStates = useComplexStates |
114 | } | 149 | } |
115 | 150 | ||
116 | override loadMetamodel() { | 151 | override loadMetamodel() { |
117 | val useSynchInThisLoad = this.useSynchronization | 152 | val useSynchInThisLoad = this.useSynchronization |
118 | val useComplexStates = this.useComplexStates | 153 | val useComplexStates = this.useComplexStates |
119 | 154 | ||
120 | val package = YakindummPackage.eINSTANCE | 155 | val package = YakindummPackage.eINSTANCE |
121 | val List<EClass> classes = package.EClassifiers.filter(EClass) | 156 | val List<EClass> classes = package.EClassifiers.filter(EClass).filter [ |
122 | .filter[useSynchInThisLoad || (it.name != "Synchronization")] | 157 | useSynchInThisLoad || (it.name != "Synchronization") |
123 | .filter[useComplexStates || (it.name != "Choice" && it.name != "Exit" && it.name != "FinalState")] | 158 | ].filter[useComplexStates || (it.name != "Choice" && it.name != "Exit" && it.name != "FinalState")].toList |
124 | .toList | ||
125 | val List<EEnum> enums = package.EClassifiers.filter(EEnum).toList | 159 | val List<EEnum> enums = package.EClassifiers.filter(EEnum).toList |
126 | val List<EEnumLiteral> literals = enums.map[ELiterals].flatten.toList | 160 | val List<EEnumLiteral> literals = enums.map[ELiterals].flatten.toList |
127 | val List<EReference> references = classes.map[EReferences].flatten.toList | 161 | val List<EReference> references = classes.map[EReferences].flatten.toList |
128 | val List<EAttribute> attributes = classes.map[EAttributes].flatten.toList | 162 | val List<EAttribute> attributes = classes.map[EAttributes].flatten.toList |
129 | 163 | ||
130 | return new EcoreMetamodelDescriptor(classes,#{},false,enums,literals,references,attributes) | 164 | return new EcoreMetamodelDescriptor(classes, #{}, false, enums, literals, references, attributes) |
131 | } | 165 | } |
166 | |||
132 | override loadQueries(EcoreMetamodelDescriptor metamodel) { | 167 | override loadQueries(EcoreMetamodelDescriptor metamodel) { |
133 | val useSynchInThisLoad = this.useSynchronization | 168 | val useSynchInThisLoad = this.useSynchronization |
134 | 169 | ||
135 | val i = Patterns.instance | 170 | val i = Patterns.instance |
136 | val patterns = i.specifications | 171 | val patterns = i.specifications.filter [ spec | |
137 | .filter[spec | | 172 | useSynchInThisLoad || !patternsWithSynchronization.exists[spec.fullyQualifiedName.endsWith(it)] |
138 | useSynchInThisLoad || | 173 | ].filter [ spec | |
139 | !patternsWithSynchronization.exists[spec.fullyQualifiedName.endsWith(it)] | 174 | useComplexStates || !patternsWithComplexStates.exists[spec.fullyQualifiedName.endsWith(it)] |
140 | ] | 175 | ].toList |
141 | .filter[spec | | 176 | val wfPatterns = patterns.filter[it.allAnnotations.exists[it.name == "Constraint"]].toSet |
142 | useComplexStates || | ||
143 | !patternsWithComplexStates.exists[spec.fullyQualifiedName.endsWith(it)] | ||
144 | ] | ||
145 | .toList | ||
146 | val wfPatterns = patterns.filter[it.allAnnotations.exists[it.name== "Constraint"]].toSet | ||
147 | val derivedFeatures = new LinkedHashMap | 177 | val derivedFeatures = new LinkedHashMap |
148 | val res = new ViatraQuerySetDescriptor( | 178 | val res = new ViatraQuerySetDescriptor( |
149 | patterns, | 179 | patterns, |
@@ -152,53 +182,71 @@ class YakinduLoader extends MetamodelLoader{ | |||
152 | ) | 182 | ) |
153 | return res | 183 | return res |
154 | } | 184 | } |
185 | |||
155 | override getRelevantTypes(EcoreMetamodelDescriptor descriptor) { | 186 | override getRelevantTypes(EcoreMetamodelDescriptor descriptor) { |
156 | descriptor.classes.filterByNames([it.name],#["Vertex","Transition","Synchronization"]) | 187 | descriptor.classes.filterByNames([it.name], #["Vertex", "Transition", "Synchronization"]) |
157 | } | 188 | } |
158 | 189 | ||
159 | override getRelevantReferences(EcoreMetamodelDescriptor descriptor) { | 190 | override getRelevantReferences(EcoreMetamodelDescriptor descriptor) { |
160 | descriptor.references.filterByNames([it.name],#["source","target"]) | 191 | descriptor.references.filterByNames([it.name], #["source", "target"]) |
161 | } | 192 | } |
162 | 193 | ||
163 | override loadPartialModel() { | 194 | override loadPartialModel() { |
164 | this.workspace.readModel(EObject,"Yakindu.xmi").eResource.allContents.toList | 195 | this.workspace.readModel(EObject, "Yakindu.xmi").eResource.allContents.toList |
165 | } | 196 | } |
166 | 197 | ||
167 | override additionalConstraints() { //#[] | 198 | override additionalConstraints() { // #[] |
168 | #[[method | new SGraphInconsistencyDetector(method)]] | 199 | #[[method|new SGraphInconsistencyDetector(method)]] |
200 | } | ||
201 | |||
202 | override getTypeQuantiles() { | ||
203 | #{ | ||
204 | "Choice" -> new TypeQuantiles(0.118279569892473, 0.154020979020979), | ||
205 | "Entry" -> new TypeQuantiles(0.0283018867924528, 0.0620167525773196), | ||
206 | "Exit" -> new TypeQuantiles(0, 0), | ||
207 | "FinalState" -> new TypeQuantiles(0, 0), | ||
208 | "Region" -> new TypeQuantiles(0.0294117647058824, 0.0633258678611422), | ||
209 | "State" -> new TypeQuantiles(0.132023636740618, 0.175925925925926), | ||
210 | // "Statechart" -> new TypeQuantiles(0.00961538461538462, 0.010752688172043), | ||
211 | "Transition" -> new TypeQuantiles(0.581632653061224, 0.645161290322581) | ||
212 | } | ||
213 | } | ||
214 | |||
215 | override getHints(Ecore2Logic ecore2Logic, Ecore2Logic_Trace trace) { | ||
216 | #[new SGraphHint(ecore2Logic, trace)] | ||
169 | } | 217 | } |
170 | } | 218 | } |
171 | 219 | ||
172 | class FileSystemLoader extends MetamodelLoader{ | 220 | class FileSystemLoader extends MetamodelLoader { |
173 | 221 | ||
174 | new(ReasonerWorkspace workspace) { | 222 | new(ReasonerWorkspace workspace) { |
175 | super(workspace) | 223 | super(workspace) |
176 | } | 224 | } |
177 | 225 | ||
178 | override loadMetamodel() { | 226 | override loadMetamodel() { |
179 | val package = FilesystemPackage.eINSTANCE | 227 | val package = FilesystemPackage.eINSTANCE |
180 | val List<EClass> classes = package.EClassifiers.filter(EClass).toList | 228 | val List<EClass> classes = package.EClassifiers.filter(EClass).toList |
181 | val List<EEnum> enums = package.EClassifiers.filter(EEnum).toList | 229 | val List<EEnum> enums = package.EClassifiers.filter(EEnum).toList |
182 | val List<EEnumLiteral> literals = enums.map[ELiterals].flatten.toList | 230 | val List<EEnumLiteral> literals = enums.map[ELiterals].flatten.toList |
183 | val List<EReference> references = classes.map[EReferences].flatten.toList | 231 | val List<EReference> references = classes.map[EReferences].flatten.filter[name != "live"].toList |
184 | val List<EAttribute> attributes = classes.map[EAttributes].flatten.toList | 232 | val List<EAttribute> attributes = classes.map[EAttributes].flatten.toList |
185 | return new EcoreMetamodelDescriptor(classes,#{},false,enums,literals,references,attributes) | 233 | return new EcoreMetamodelDescriptor(classes, #{}, false, enums, literals, references, attributes) |
186 | } | 234 | } |
187 | 235 | ||
188 | override getRelevantTypes(EcoreMetamodelDescriptor descriptor) { | 236 | override getRelevantTypes(EcoreMetamodelDescriptor descriptor) { |
189 | return null | 237 | return null |
190 | } | 238 | } |
191 | 239 | ||
192 | override getRelevantReferences(EcoreMetamodelDescriptor descriptor) { | 240 | override getRelevantReferences(EcoreMetamodelDescriptor descriptor) { |
193 | null | 241 | null |
194 | } | 242 | } |
195 | 243 | ||
196 | override loadQueries(EcoreMetamodelDescriptor metamodel) { | 244 | override loadQueries(EcoreMetamodelDescriptor metamodel) { |
197 | val patternGroup = FileSystem.instance | 245 | val patternGroup = FileSystem.instance |
198 | val patterns = patternGroup.specifications.toList | 246 | val patterns = patternGroup.specifications.toList |
199 | val wfPatterns = patterns.filter[it.allAnnotations.exists[it.name == "Constraint"]].toSet | 247 | val wfPatterns = patterns.filter[it.allAnnotations.exists[it.name == "Constraint"]].toSet |
200 | val derivedFeatures = new HashMap | 248 | val derivedFeatures = new HashMap |
201 | derivedFeatures.put(patternGroup.live,metamodel.references.filter[it.name == "live"].head) | 249 | // derivedFeatures.put(patternGroup.live,metamodel.references.filter[it.name == "live"].head) |
202 | return new ViatraQuerySetDescriptor( | 250 | return new ViatraQuerySetDescriptor( |
203 | patterns, | 251 | patterns, |
204 | wfPatterns, | 252 | wfPatterns, |
@@ -206,41 +254,46 @@ class FileSystemLoader extends MetamodelLoader{ | |||
206 | ) | 254 | ) |
207 | 255 | ||
208 | } | 256 | } |
209 | 257 | ||
210 | override loadPartialModel() { | 258 | override loadPartialModel() { |
211 | this.workspace.readModel(EObject,"fs.xmi").eResource.allContents.toList | 259 | this.workspace.readModel(EObject, "fs.xmi").eResource.allContents.toList |
212 | } | 260 | } |
213 | 261 | ||
214 | override additionalConstraints() { | 262 | override additionalConstraints() { |
215 | #[[method | new FileSystemInconsistencyDetector(method)]] | 263 | #[[method|new FileSystemInconsistencyDetector(method)]] |
216 | } | 264 | } |
217 | 265 | ||
218 | } | 266 | } |
219 | 267 | ||
220 | class EcoreLoader extends MetamodelLoader { | 268 | class EcoreLoader extends MetamodelLoader { |
221 | 269 | ||
222 | new(ReasonerWorkspace workspace) { | 270 | new(ReasonerWorkspace workspace) { |
223 | super(workspace) | 271 | super(workspace) |
224 | } | 272 | } |
225 | 273 | ||
226 | override loadMetamodel() { | 274 | override loadMetamodel() { |
227 | val package = EcorePackage.eINSTANCE | 275 | val package = EcorePackage.eINSTANCE |
228 | val List<EClass> classes = package.EClassifiers.filter(EClass).filter[it.name!="EFactory"].toList | 276 | val List<EClass> classes = package.EClassifiers.filter(EClass).filter [ |
277 | it.name != "EFactory" && it.name != "EObject" && it.name != "EResource" | ||
278 | ].toList | ||
229 | val List<EEnum> enums = package.EClassifiers.filter(EEnum).toList | 279 | val List<EEnum> enums = package.EClassifiers.filter(EEnum).toList |
230 | val List<EEnumLiteral> literals = enums.map[ELiterals].flatten.toList | 280 | val List<EEnumLiteral> literals = enums.map[ELiterals].flatten.toList |
231 | val List<EReference> references = classes.map[EReferences].flatten.filter[it.name!="eFactoryInstance"].filter[!it.derived].toList | 281 | val List<EReference> references = classes.map[EReferences].flatten.filter [ |
232 | val List<EAttribute> attributes = #[] //classes.map[EAttributes].flatten.toList | 282 | it.name != "eFactoryInstance" && it.name != "contents" && it.name != "references" && |
233 | return new EcoreMetamodelDescriptor(classes,#{},false,enums,literals,references,attributes) | 283 | it.name != "eGenericType" && it.name != "eGenericSuperTypes" |
284 | ].filter[!it.derived].toList | ||
285 | val List<EAttribute> attributes = #[] // classes.map[EAttributes].flatten.toList | ||
286 | return new EcoreMetamodelDescriptor(classes, #{}, false, enums, literals, references, attributes) | ||
234 | } | 287 | } |
235 | 288 | ||
236 | override getRelevantTypes(EcoreMetamodelDescriptor descriptor) { | 289 | override getRelevantTypes(EcoreMetamodelDescriptor descriptor) { |
237 | return null | 290 | return null |
238 | } | 291 | } |
239 | 292 | ||
240 | override getRelevantReferences(EcoreMetamodelDescriptor descriptor) { | 293 | override getRelevantReferences(EcoreMetamodelDescriptor descriptor) { |
241 | null | 294 | null |
242 | } | 295 | } |
243 | 296 | ||
244 | override loadQueries(EcoreMetamodelDescriptor metamodel) { | 297 | override loadQueries(EcoreMetamodelDescriptor metamodel) { |
245 | val patternGroup = Ecore.instance | 298 | val patternGroup = Ecore.instance |
246 | val patterns = patternGroup.specifications.toList | 299 | val patterns = patternGroup.specifications.toList |
@@ -253,13 +306,92 @@ class EcoreLoader extends MetamodelLoader { | |||
253 | ) | 306 | ) |
254 | 307 | ||
255 | } | 308 | } |
256 | 309 | ||
257 | override loadPartialModel() { | 310 | override loadPartialModel() { |
258 | this.workspace.readModel(EObject,"ecore.xmi").eResource.allContents.toList | 311 | this.workspace.readModel(EObject, "ecore.xmi").eResource.allContents.toList |
259 | } | 312 | } |
260 | 313 | ||
261 | override additionalConstraints() { | 314 | override additionalConstraints() { |
262 | #[] | 315 | #[] |
263 | } | 316 | } |
317 | |||
318 | override getTypeQuantiles() { | ||
319 | #{ | ||
320 | "EAnnotation" -> new TypeQuantiles(0, 0), | ||
321 | "EAttribute" -> new TypeQuantiles(0.14, 0.300943396226415), | ||
322 | "EClass" -> new TypeQuantiles(0.224014336917563, 0.372881355932203), | ||
323 | "EDataType" -> new TypeQuantiles(0, 0), | ||
324 | "EEnum" -> new TypeQuantiles(0, 0.0275208638045255), | ||
325 | "EEnumLiteral" -> new TypeQuantiles(0, 0.105204907665065), | ||
326 | "EGenericType" -> new TypeQuantiles(0, 0), | ||
327 | "EOperation" -> new TypeQuantiles(0, 0), | ||
328 | "EPackage" -> new TypeQuantiles(0.0119047619047619, 0.0192307692307692), | ||
329 | "EParameter" -> new TypeQuantiles(0, 0), | ||
330 | "EReference" -> new TypeQuantiles(0.217599234815878, 0.406779661016949), | ||
331 | "EStringToStringMapEntry" -> new TypeQuantiles(0, 0), | ||
332 | "ETypeParameter" -> new TypeQuantiles(0, 0) | ||
333 | } | ||
334 | } | ||
335 | |||
336 | } | ||
337 | |||
338 | class SatelliteLoader extends MetamodelLoader { | ||
339 | |||
340 | new(ReasonerWorkspace workspace) { | ||
341 | super(workspace) | ||
342 | } | ||
343 | |||
344 | override loadMetamodel() { | ||
345 | val package = SatellitePackage.eINSTANCE | ||
346 | val List<EClass> classes = package.EClassifiers.filter(EClass).toList | ||
347 | val List<EEnum> enums = package.EClassifiers.filter(EEnum).toList | ||
348 | val List<EEnumLiteral> literals = enums.map[ELiterals].flatten.toList | ||
349 | val List<EReference> references = classes.map[EReferences].flatten.toList | ||
350 | val List<EAttribute> attributes = classes.map[EAttributes].flatten.toList | ||
351 | return new EcoreMetamodelDescriptor(classes, #{}, false, enums, literals, references, attributes) | ||
352 | } | ||
353 | |||
354 | override getRelevantTypes(EcoreMetamodelDescriptor descriptor) { | ||
355 | null | ||
356 | } | ||
357 | |||
358 | override getRelevantReferences(EcoreMetamodelDescriptor descriptor) { | ||
359 | null | ||
360 | } | ||
361 | |||
362 | override loadQueries(EcoreMetamodelDescriptor metamodel) { | ||
363 | val i = SatelliteQueriesAll.instance | ||
364 | val patterns = i.specifications.toList | ||
365 | val wfPatterns = patterns.filter[it.allAnnotations.exists[it.name == "Constraint"]].toSet | ||
366 | val derivedFeatures = new LinkedHashMap | ||
367 | val res = new ViatraQuerySetDescriptor( | ||
368 | patterns, | ||
369 | wfPatterns, | ||
370 | derivedFeatures | ||
371 | ) | ||
372 | return res | ||
373 | } | ||
374 | |||
375 | override loadPartialModel() { | ||
376 | this.workspace.readModel(EObject, "satellite.xmi").eResource.allContents.toList | ||
377 | } | ||
378 | |||
379 | override additionalConstraints() { #[] } | ||
380 | |||
381 | override getHints(Ecore2Logic ecore2Logic, Ecore2Logic_Trace trace) { | ||
382 | #[new SatelliteHint(ecore2Logic, trace)] | ||
383 | } | ||
384 | |||
385 | override getTypeQuantiles() { | ||
386 | #{ | ||
387 | "CubeSat3U" -> new TypeQuantiles(0.1, 0.25), | ||
388 | "CubeSat6U" -> new TypeQuantiles(0, 0.25), | ||
389 | "SmallSat" -> new TypeQuantiles(0, 0.15), | ||
390 | "UHFCommSubsystem" -> new TypeQuantiles(0.08, 0.25), | ||
391 | "XCommSubsystem" -> new TypeQuantiles(0.08, 0.25), | ||
392 | "KaCommSubsystem" -> new TypeQuantiles(0, 0.1), | ||
393 | "InterferometryPayload" -> new TypeQuantiles(0.15, 0.25) | ||
394 | } | ||
395 | } | ||
264 | 396 | ||
265 | } \ No newline at end of file | 397 | } |
diff --git a/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/SGraphHint.xtend b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/SGraphHint.xtend new file mode 100644 index 00000000..97ce4ee6 --- /dev/null +++ b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/SGraphHint.xtend | |||
@@ -0,0 +1,46 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.run | ||
2 | |||
3 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2Logic | ||
4 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2Logic_Trace | ||
5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.LinearTypeExpressionBuilderFactory | ||
6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.PatternGenerator | ||
7 | |||
8 | class SGraphHint extends Ecore2LogicTraceBasedHint { | ||
9 | new(Ecore2Logic ecore2Logic, Ecore2Logic_Trace trace) { | ||
10 | super(ecore2Logic, trace) | ||
11 | } | ||
12 | |||
13 | override getAdditionalPatterns(extension PatternGenerator patternGenerator) { | ||
14 | "" | ||
15 | } | ||
16 | |||
17 | override createConstraintUpdater(LinearTypeExpressionBuilderFactory it) { | ||
18 | val newEntriesWithoutRegionCount = createBuilder.add(1, "Entry".type).add(-1, "Region".type).build | ||
19 | val newStatesWithoutRegionCount = createBuilder.add(1, "State".type).add(-1, "Region".type).build | ||
20 | val newTransitionWithoutNeedsOutgoingCount = createBuilder.add(1, "Transition".type).add(-1, "Entry".type). | ||
21 | add(-1, "Choice".type).build | ||
22 | val newTransitionWithoutNeedsIncomingCount = createBuilder.add(1, "Transition".type).add(-1, "Choice".type). | ||
23 | build | ||
24 | |||
25 | val regionsWithoutEntryMatcher = createMatcher( | ||
26 | "unfinishedBy_pattern_hu_bme_mit_inf_dslreasoner_partialsnapshot_mavo_yakindu_noEntryInRegion") | ||
27 | val regionsWithoutStateMatcher = createMatcher( | ||
28 | "unfinishedBy_pattern_hu_bme_mit_inf_dslreasoner_partialsnapshot_mavo_yakindu_noStateInRegion") | ||
29 | val entryHasNoOutgoingMatcher = createMatcher( | ||
30 | "unfinishedBy_pattern_hu_bme_mit_inf_dslreasoner_partialsnapshot_mavo_yakindu_noOutgoingTransitionFromEntry") | ||
31 | val choiceHasNoOutgoingMatcher = createMatcher( | ||
32 | "unfinishedBy_pattern_hu_bme_mit_inf_dslreasoner_partialsnapshot_mavo_yakindu_choiceHasNoOutgoing") | ||
33 | val choiceHasNoIncomingMatcher = createMatcher( | ||
34 | "unfinishedBy_pattern_hu_bme_mit_inf_dslreasoner_partialsnapshot_mavo_yakindu_choiceHasNoIncoming") | ||
35 | val transitionWithoutTargetMatcher = createMatcher("unfinishedLowerMultiplicity_target_reference_Transition") | ||
36 | |||
37 | return [ p | | ||
38 | newEntriesWithoutRegionCount.assertEqualsTo(regionsWithoutEntryMatcher.countMatches(p)) | ||
39 | newStatesWithoutRegionCount.tightenLowerBound(regionsWithoutStateMatcher.countMatches(p)) | ||
40 | newTransitionWithoutNeedsOutgoingCount.tightenLowerBound( | ||
41 | entryHasNoOutgoingMatcher.countMatches(p) + choiceHasNoOutgoingMatcher.countMatches(p)) | ||
42 | newTransitionWithoutNeedsIncomingCount.tightenLowerBound( | ||
43 | choiceHasNoIncomingMatcher.countMatches(p) - transitionWithoutTargetMatcher.getCount(p)) | ||
44 | ] | ||
45 | } | ||
46 | } | ||
diff --git a/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/SatelliteHint.xtend b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/SatelliteHint.xtend new file mode 100644 index 00000000..e95c0c64 --- /dev/null +++ b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/SatelliteHint.xtend | |||
@@ -0,0 +1,86 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.run | ||
2 | |||
3 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2Logic | ||
4 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2Logic_Trace | ||
5 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.Modality | ||
6 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.LinearTypeExpressionBuilderFactory | ||
7 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.patterns.PatternGenerator | ||
8 | |||
9 | class SatelliteHint extends Ecore2LogicTraceBasedHint { | ||
10 | static val INTERFEROMETY_PAYLOAD = "hint_interferometryPayload" | ||
11 | static val REMAINING_CONTENTS_KA_COMM_SUBSYSTEM = "hint_kaCommSubsystem" | ||
12 | static val HINT_SPACECRAFT_UHF_POSSIBLE_LINK = "hint_spacecraftWithUhfPossibleLink" | ||
13 | static val HINT_SPACECRAFT_UHF_ONLY_NO_LINK = "hint_spacecraftUhfOnlyNoLink" | ||
14 | |||
15 | new(Ecore2Logic ecore2Logic, Ecore2Logic_Trace trace) { | ||
16 | super(ecore2Logic, trace) | ||
17 | } | ||
18 | |||
19 | override getAdditionalPatterns(PatternGenerator it) ''' | ||
20 | pattern «INTERFEROMETY_PAYLOAD»(problem:LogicProblem, interpretation:PartialInterpretation, object:DefinedElement) { | ||
21 | find interpretation(problem, interpretation); | ||
22 | find mustExist(problem, interpretation, object); | ||
23 | «typeIndexer.referInstanceOf("InterferometryPayload".type, Modality.MUST, "object")» | ||
24 | } | ||
25 | |||
26 | private pattern «REMAINING_CONTENTS_KA_COMM_SUBSYSTEM»_helper(problem:LogicProblem, interpretation:PartialInterpretation, object:DefinedElement, remainingContents:java Integer) { | ||
27 | find remainingContents_commSubsystem_reference_CommunicatingElement_helper(problem, interpretation, object, remainingContents); | ||
28 | «typeIndexer.referInstanceOf("SmallSat".type, Modality.MUST, "object")» | ||
29 | } | ||
30 | |||
31 | pattern «REMAINING_CONTENTS_KA_COMM_SUBSYSTEM»(problem:LogicProblem, interpretation:PartialInterpretation, remainingContents:java Integer) { | ||
32 | find interpretation(problem, interpretation); | ||
33 | remainingContents == sum find «REMAINING_CONTENTS_KA_COMM_SUBSYSTEM»_helper(problem, interpretation, _, #_); | ||
34 | } | ||
35 | |||
36 | private pattern hint_spacecraftNotUhfOnly(problem:LogicProblem, interpretation:PartialInterpretation, spacecraft:DefinedElement) { | ||
37 | find interpretation(problem, interpretation); | ||
38 | find mustExist(problem, interpretation, spacecraft); | ||
39 | «typeIndexer.referInstanceOf("Spacecraft".type, Modality.MUST, "spacecraft")» | ||
40 | «relationDeclarationIndexer.referRelation("CommunicatingElement".relation("commSubsystem"), "spacecraft", "comm", Modality.MAY)» | ||
41 | neg «typeIndexer.referInstanceOf("UHFCommSubsystem".type, Modality.MUST, "comm")» | ||
42 | } | ||
43 | |||
44 | private pattern hint_spacecraftWithUhf(problem:LogicProblem, interpretation:PartialInterpretation, spacecraft:DefinedElement) { | ||
45 | find interpretation(problem, interpretation); | ||
46 | find mustExist(problem, interpretation, spacecraft); | ||
47 | «typeIndexer.referInstanceOf("Spacecraft".type, Modality.MUST, "spacecraft")» | ||
48 | «relationDeclarationIndexer.referRelation("CommunicatingElement".relation("commSubsystem"), "spacecraft", "comm", Modality.MUST)» | ||
49 | «typeIndexer.referInstanceOf("UHFCommSubsystem".type, Modality.MUST, "comm")» | ||
50 | } | ||
51 | |||
52 | pattern «HINT_SPACECRAFT_UHF_POSSIBLE_LINK»(problem:LogicProblem, interpretation:PartialInterpretation) { | ||
53 | find hint_spacecraftWithUhf(problem, interpretation, spacecraft); | ||
54 | find hint_spacecraftNotUhfOnly(problem, interpretation, spacecraft); | ||
55 | } | ||
56 | |||
57 | pattern «HINT_SPACECRAFT_UHF_ONLY_NO_LINK»(problem:LogicProblem, interpretation:PartialInterpretation) { | ||
58 | find interpretation(problem, interpretation); | ||
59 | find mustExist(problem, interpretation, spacecraft); | ||
60 | «typeIndexer.referInstanceOf("Spacecraft".type, Modality.MUST, "spacecraft")» | ||
61 | neg find hint_spacecraftNotUhfOnly(problem, interpretation, spacecraft); | ||
62 | find currentInRelation_pattern_hu_bme_mit_inf_dslreasoner_domains_satellite_queries_noLinkToGroundStation(problem, interpretation, spacecraft); | ||
63 | } | ||
64 | ''' | ||
65 | |||
66 | override createConstraintUpdater(LinearTypeExpressionBuilderFactory it) { | ||
67 | val interferometryPayloadCount = createBuilder.add(1, "InterferometryPayload".type).build | ||
68 | val kaCommSubsystemWithoutSmallSatCount = createBuilder.add(1, "KaCommSubsystem".type).add(-2, "SmallSat".type). | ||
69 | build | ||
70 | val uhfCommSubsystemCount = createBuilder.add(1, "UHFCommSubsystem".type).build | ||
71 | |||
72 | val interferometryPayloadMatcher = createMatcher(INTERFEROMETY_PAYLOAD) | ||
73 | val kaCommSubsystemRemainingContentsMatcher = createMatcher(REMAINING_CONTENTS_KA_COMM_SUBSYSTEM) | ||
74 | val uhfPossibleLinkMatcher = createMatcher(HINT_SPACECRAFT_UHF_POSSIBLE_LINK) | ||
75 | val uhfNoLinkMatcher = createMatcher(HINT_SPACECRAFT_UHF_ONLY_NO_LINK) | ||
76 | |||
77 | return [ p | | ||
78 | interferometryPayloadCount.tightenLowerBound(2 - interferometryPayloadMatcher.countMatches(p)) | ||
79 | kaCommSubsystemWithoutSmallSatCount.tightenUpperBound(kaCommSubsystemRemainingContentsMatcher.getCount(p)) | ||
80 | if (uhfPossibleLinkMatcher.countMatches(p) == 0 && uhfNoLinkMatcher.countMatches(p) >= 1) { | ||
81 | uhfCommSubsystemCount.tightenLowerBound(1) | ||
82 | } | ||
83 | ] | ||
84 | } | ||
85 | |||
86 | } | ||
diff --git a/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/TypeDistributionCalculator.xtend b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/TypeDistributionCalculator.xtend new file mode 100644 index 00000000..e2d6e6ca --- /dev/null +++ b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/TypeDistributionCalculator.xtend | |||
@@ -0,0 +1,35 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.run | ||
2 | |||
3 | import hu.bme.mit.inf.dslreasoner.domains.yakindu.sgraph.yakindumm.YakindummPackage | ||
4 | import java.io.File | ||
5 | import org.eclipse.emf.common.util.URI | ||
6 | import org.eclipse.emf.ecore.EPackage | ||
7 | import org.eclipse.emf.ecore.EcorePackage | ||
8 | import org.eclipse.emf.ecore.resource.Resource | ||
9 | import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl | ||
10 | import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl | ||
11 | |||
12 | class TypeDistributionCalculator { | ||
13 | public static def void main(String[] args) { | ||
14 | Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put("*", new XMIResourceFactoryImpl) | ||
15 | EPackage.Registry.INSTANCE.put(EcorePackage.eNS_URI, EcorePackage.eINSTANCE) | ||
16 | EPackage.Registry.INSTANCE.put(YakindummPackage.eNS_URI, YakindummPackage.eINSTANCE) | ||
17 | |||
18 | println("model,className,count") | ||
19 | val directory = new File(args.get(0)) | ||
20 | for (file : directory.listFiles) { | ||
21 | val modelName = file.name | ||
22 | val resourceSet = new ResourceSetImpl | ||
23 | val resource = resourceSet.getResource(URI.createFileURI(file.absolutePath), true) | ||
24 | val objectsByTypeName = resource.allContents.filter [ obj | | ||
25 | val featureName = obj.eContainingFeature?.name | ||
26 | // Filter out "derived containment" references in Ecore. | ||
27 | // See https://stackoverflow.com/a/46340165 | ||
28 | featureName != "eGenericType" && featureName != "eGenericSuperTypes" | ||
29 | ].groupBy[eClass.name] | ||
30 | for (pair : objectsByTypeName.entrySet) { | ||
31 | println('''«modelName»,«pair.key»,«pair.value.size»''') | ||
32 | } | ||
33 | } | ||
34 | } | ||
35 | } | ||
diff --git a/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/script/MeasurementScript.xtend b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/script/MeasurementScript.xtend new file mode 100644 index 00000000..5abff962 --- /dev/null +++ b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/script/MeasurementScript.xtend | |||
@@ -0,0 +1,70 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.run.script | ||
2 | |||
3 | import java.util.List | ||
4 | import org.eclipse.xtend.lib.annotations.Accessors | ||
5 | |||
6 | @Accessors | ||
7 | class MeasurementScript { | ||
8 | String inputPath | ||
9 | String outputPath | ||
10 | int timeout | ||
11 | boolean saveModels | ||
12 | boolean saveTemporaryFiles | ||
13 | int warmupIterations | ||
14 | int iterations | ||
15 | Domain domain | ||
16 | Scope scope | ||
17 | List<Integer> sizes | ||
18 | Solver solver | ||
19 | ScopePropagator scopePropagator | ||
20 | ScopeConstraints propagatedConstraints | ||
21 | PolyhedronSolver polyhedronSolver | ||
22 | ScopeHeuristic scopeHeuristic | ||
23 | |||
24 | def toCsvHeader() { | ||
25 | '''«domain»,«scope»,«solver»,«scopePropagator ?: "NULL"»,«propagatedConstraints ?: "NULL"»,«polyhedronSolver ?: "NULL"»''' | ||
26 | } | ||
27 | } | ||
28 | |||
29 | enum Domain { | ||
30 | fs, | ||
31 | ecore, | ||
32 | Yakindu, | ||
33 | FAM, | ||
34 | satellite | ||
35 | } | ||
36 | |||
37 | enum Scope { | ||
38 | none, | ||
39 | quantiles | ||
40 | } | ||
41 | |||
42 | enum Solver { | ||
43 | ViatraSolver, | ||
44 | AlloySolver | ||
45 | } | ||
46 | |||
47 | enum ScopePropagator { | ||
48 | none, | ||
49 | basic, | ||
50 | polyhedral | ||
51 | } | ||
52 | |||
53 | enum ScopeConstraints { | ||
54 | none, | ||
55 | typeHierarchy, | ||
56 | relations, | ||
57 | hints | ||
58 | } | ||
59 | |||
60 | enum PolyhedronSolver { | ||
61 | Z3Integer, | ||
62 | Z3Real, | ||
63 | Cbc, | ||
64 | Clp | ||
65 | } | ||
66 | |||
67 | enum ScopeHeuristic { | ||
68 | basic, | ||
69 | polyhedral | ||
70 | } | ||
diff --git a/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/script/MeasurementScriptRunner.xtend b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/script/MeasurementScriptRunner.xtend new file mode 100644 index 00000000..48e750cb --- /dev/null +++ b/Tests/hu.bme.mit.inf.dslreasoner.run/src/hu/bme/mit/inf/dslreasoner/run/script/MeasurementScriptRunner.xtend | |||
@@ -0,0 +1,351 @@ | |||
1 | package hu.bme.mit.inf.dslreasoner.run.script | ||
2 | |||
3 | import com.google.gson.Gson | ||
4 | import hu.bme.mit.inf.dslreasoner.ecore2logic.EClassMapper | ||
5 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2Logic | ||
6 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2LogicConfiguration | ||
7 | import hu.bme.mit.inf.dslreasoner.ecore2logic.Ecore2Logic_Trace | ||
8 | import hu.bme.mit.inf.dslreasoner.logic.model.builder.DocumentationLevel | ||
9 | import hu.bme.mit.inf.dslreasoner.logic.model.builder.TypeScopes | ||
10 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.DefinedElement | ||
11 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.IntLiteral | ||
12 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.RealLiteral | ||
13 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.StringLiteral | ||
14 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.Type | ||
15 | import hu.bme.mit.inf.dslreasoner.logic.model.logiclanguage.TypeDefinition | ||
16 | import hu.bme.mit.inf.dslreasoner.logic.model.logicproblem.LogicProblem | ||
17 | import hu.bme.mit.inf.dslreasoner.logic.model.logicresult.IntStatisticEntry | ||
18 | import hu.bme.mit.inf.dslreasoner.logic.model.logicresult.LogicresultFactory | ||
19 | import hu.bme.mit.inf.dslreasoner.logic.model.logicresult.ModelResult | ||
20 | import hu.bme.mit.inf.dslreasoner.logic.model.logicresult.RealStatisticEntry | ||
21 | import hu.bme.mit.inf.dslreasoner.logic.model.logicresult.Statistics | ||
22 | import hu.bme.mit.inf.dslreasoner.logic.model.logicresult.StringStatisticEntry | ||
23 | import hu.bme.mit.inf.dslreasoner.logic2ecore.Logic2Ecore | ||
24 | import hu.bme.mit.inf.dslreasoner.run.EcoreLoader | ||
25 | import hu.bme.mit.inf.dslreasoner.run.FAMLoader | ||
26 | import hu.bme.mit.inf.dslreasoner.run.FileSystemLoader | ||
27 | import hu.bme.mit.inf.dslreasoner.run.MetamodelLoader | ||
28 | import hu.bme.mit.inf.dslreasoner.run.SatelliteLoader | ||
29 | import hu.bme.mit.inf.dslreasoner.run.YakinduLoader | ||
30 | import hu.bme.mit.inf.dslreasoner.util.CollectionsUtil | ||
31 | import hu.bme.mit.inf.dslreasoner.viatra2logic.Viatra2Logic | ||
32 | import hu.bme.mit.inf.dslreasoner.viatra2logic.Viatra2LogicConfiguration | ||
33 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.PolyhedralScopePropagatorConstraints | ||
34 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.PolyhedralScopePropagatorSolver | ||
35 | import hu.bme.mit.inf.dslreasoner.viatrasolver.logic2viatra.cardinality.ScopePropagatorStrategy | ||
36 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretation2logic.InstanceModel2Logic | ||
37 | import hu.bme.mit.inf.dslreasoner.viatrasolver.partialinterpretationlanguage.partial2logicannotations.PartialModelRelation2Assertion | ||
38 | import hu.bme.mit.inf.dslreasoner.viatrasolver.reasoner.ViatraReasoner | ||
39 | import hu.bme.mit.inf.dslreasoner.viatrasolver.reasoner.ViatraReasonerConfiguration | ||
40 | import hu.bme.mit.inf.dslreasoner.workspace.FileSystemWorkspace | ||
41 | import hu.bme.mit.inf.dslreasoner.workspace.ReasonerWorkspace | ||
42 | import java.io.FileReader | ||
43 | import java.util.HashMap | ||
44 | import java.util.HashSet | ||
45 | import java.util.Map | ||
46 | import java.util.Set | ||
47 | import org.eclipse.emf.ecore.EObject | ||
48 | import org.eclipse.emf.ecore.resource.Resource | ||
49 | import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl | ||
50 | import org.eclipse.viatra.query.patternlanguage.emf.EMFPatternLanguageStandaloneSetup | ||
51 | import org.eclipse.viatra.query.runtime.api.ViatraQueryEngineOptions | ||
52 | import org.eclipse.viatra.query.runtime.rete.matcher.ReteBackendFactory | ||
53 | import org.eclipse.xtend.lib.annotations.Data | ||
54 | |||
55 | class MeasurementScriptRunner { | ||
56 | static val MODEL_SIZE_GAP = 0 | ||
57 | static val SCOPE_PROPAGATOR_TIMEOUT = 10 | ||
58 | static val USEC_TO_MSEC = 1000000 | ||
59 | |||
60 | static extension val LogicresultFactory = LogicresultFactory.eINSTANCE | ||
61 | |||
62 | val MeasurementScript script | ||
63 | val ReasonerWorkspace inputWorkspace | ||
64 | val ReasonerWorkspace outputWorkspace | ||
65 | val MetamodelLoader metamodelLoader | ||
66 | |||
67 | new(MeasurementScript script) { | ||
68 | this.script = script | ||
69 | inputWorkspace = new FileSystemWorkspace(script.inputPath + "/", "") | ||
70 | outputWorkspace = new FileSystemWorkspace(script.outputPath + | ||
71 | "/", '''«script.domain»_«script.solver»_«script.scope»_«script.scopePropagator ?: "na"»_«script.propagatedConstraints ?: "na"»_«script.polyhedronSolver ?: "na"»_''') | ||
72 | metamodelLoader = switch (script.domain) { | ||
73 | case fs: new FileSystemLoader(inputWorkspace) | ||
74 | case ecore: new EcoreLoader(inputWorkspace) | ||
75 | case Yakindu: new YakinduLoader(inputWorkspace) => [useSynchronization = false; useComplexStates = true] | ||
76 | case FAM: new FAMLoader(inputWorkspace) | ||
77 | case satellite: new SatelliteLoader(inputWorkspace) | ||
78 | default: throw new IllegalArgumentException("Unsupported domain: " + script.domain) | ||
79 | } | ||
80 | } | ||
81 | |||
82 | def run() { | ||
83 | if (script.sizes.empty) { | ||
84 | return | ||
85 | } | ||
86 | val start = System.currentTimeMillis | ||
87 | val warmupSize = script.sizes.head | ||
88 | for (var int i = 0; i < script.warmupIterations; i++) { | ||
89 | System.err.println('''Warmup «i + 1»/«script.warmupIterations»...''') | ||
90 | runExperiment(warmupSize) | ||
91 | } | ||
92 | val warmupEnd = System.currentTimeMillis | ||
93 | System.err.println('''Warmup completed in «(warmupEnd - start) / 1000» seconds''') | ||
94 | for (size : script.sizes) { | ||
95 | var int failures = 0 | ||
96 | for (var int i = 0; i < script.iterations; i++) { | ||
97 | System.err.println("Running GC...") | ||
98 | runGc() | ||
99 | System.err.println('''Iteration «i + 1»/«script.iterations» of size «size»...''') | ||
100 | val startTime = System.currentTimeMillis | ||
101 | val result = runExperiment(size) | ||
102 | val headerPrefix = '''«script.toCsvHeader»,«size»,«i + 1»,«result.resultName»''' | ||
103 | println('''«headerPrefix»,startTime,«startTime»''') | ||
104 | println('''«headerPrefix»,logic2SolverTransformationTime,«result.statistics.transformationTime»''') | ||
105 | println('''«headerPrefix»,solverTime,«result.statistics.solverTime»''') | ||
106 | for (statistic : result.statistics.entries) { | ||
107 | val valueString = switch (statistic) { | ||
108 | IntStatisticEntry: statistic.value.toString | ||
109 | RealStatisticEntry: statistic.value.toString | ||
110 | StringStatisticEntry: statistic.value.toString | ||
111 | default: statistic.toString | ||
112 | } | ||
113 | println('''«headerPrefix»,«statistic.name»,«valueString»''') | ||
114 | } | ||
115 | if (script.saveModels && result.model !== null) { | ||
116 | outputWorkspace.writeModel(result.model, '''«size»_«i + 1».xmi''') | ||
117 | } | ||
118 | if (result.resultName === "InsuficientResourcesResultImpl") { | ||
119 | failures++ | ||
120 | } | ||
121 | System.out.flush | ||
122 | } | ||
123 | if (failures == script.iterations) { | ||
124 | System.err.println("All measurements failed") | ||
125 | return | ||
126 | } | ||
127 | } | ||
128 | val end = System.currentTimeMillis | ||
129 | System.err.println('''Measurement completed in «(end - start) / 1000» seconds''') | ||
130 | } | ||
131 | |||
132 | private static def void runGc() { | ||
133 | System.gc | ||
134 | Thread.sleep(100) | ||
135 | System.gc | ||
136 | Thread.sleep(100) | ||
137 | System.gc | ||
138 | Thread.sleep(800) | ||
139 | } | ||
140 | |||
141 | private def runExperiment(int modelSize) { | ||
142 | if (script.solver != Solver.ViatraSolver) { | ||
143 | throw new IllegalArgumentException("Only VIATRA-Generator is supported") | ||
144 | } | ||
145 | val config = new ViatraReasonerConfiguration | ||
146 | config.solutionScope.numberOfRequiredSolutions = 1 | ||
147 | config.scopePropagatorStrategy = switch (script.scopePropagator) { | ||
148 | case none: | ||
149 | ScopePropagatorStrategy.None | ||
150 | case basic: | ||
151 | switch (script.propagatedConstraints) { | ||
152 | case none: | ||
153 | ScopePropagatorStrategy.Basic | ||
154 | case typeHierarchy: | ||
155 | ScopePropagatorStrategy.BasicTypeHierarchy | ||
156 | case relations, | ||
157 | case hints: | ||
158 | throw new IllegalArgumentException( | ||
159 | "Basic scope propagator does not support relational and hint constraints") | ||
160 | default: | ||
161 | throw new IllegalArgumentException("Unknown scope constraints: " + script.propagatedConstraints) | ||
162 | } | ||
163 | case polyhedral: { | ||
164 | val constraints = switch (script.propagatedConstraints) { | ||
165 | case none: | ||
166 | throw new IllegalArgumentException( | ||
167 | "Polyhedral scope propagator needs at least type hierarchy constraints") | ||
168 | case typeHierarchy: | ||
169 | PolyhedralScopePropagatorConstraints.TypeHierarchy | ||
170 | case relations, | ||
171 | case hints: | ||
172 | PolyhedralScopePropagatorConstraints.Relational | ||
173 | default: | ||
174 | throw new IllegalArgumentException("Unknown scope constraints: " + script.propagatedConstraints) | ||
175 | } | ||
176 | val polyhedronSolver = switch (script.polyhedronSolver) { | ||
177 | case Z3Integer: PolyhedralScopePropagatorSolver.Z3Integer | ||
178 | case Z3Real: PolyhedralScopePropagatorSolver.Z3Real | ||
179 | case Cbc: PolyhedralScopePropagatorSolver.Cbc | ||
180 | case Clp: PolyhedralScopePropagatorSolver.Clp | ||
181 | default: throw new IllegalArgumentException("Unknown polyhedron solver: " + script.polyhedronSolver) | ||
182 | } | ||
183 | val updateHeuristic = script.scopeHeuristic != ScopeHeuristic.basic | ||
184 | new ScopePropagatorStrategy.Polyhedral(constraints, polyhedronSolver, updateHeuristic, | ||
185 | SCOPE_PROPAGATOR_TIMEOUT) | ||
186 | } | ||
187 | default: | ||
188 | throw new IllegalArgumentException("Unknown scope propagator: " + script.scopePropagator) | ||
189 | } | ||
190 | config.runtimeLimit = script.timeout | ||
191 | config.documentationLevel = if(script.saveTemporaryFiles) DocumentationLevel.NORMAL else DocumentationLevel.NONE | ||
192 | config.debugConfiguration.partialInterpretatioVisualiser = null | ||
193 | config.searchSpaceConstraints.additionalGlobalConstraints += metamodelLoader.additionalConstraints | ||
194 | |||
195 | val modelLoadingStart = System.nanoTime | ||
196 | val metamodelDescriptor = metamodelLoader.loadMetamodel | ||
197 | val partialModelDescriptor = metamodelLoader.loadPartialModel | ||
198 | val queryDescriptor = metamodelLoader.loadQueries(metamodelDescriptor) | ||
199 | val modelLoadingTime = System.nanoTime - modelLoadingStart | ||
200 | |||
201 | val domain2LogicTransformationStart = System.nanoTime | ||
202 | val Ecore2Logic ecore2Logic = new Ecore2Logic | ||
203 | val Viatra2Logic viatra2Logic = new Viatra2Logic(ecore2Logic) | ||
204 | val InstanceModel2Logic instanceModel2Logic = new InstanceModel2Logic | ||
205 | var modelGeneration = ecore2Logic.transformMetamodel(metamodelDescriptor, new Ecore2LogicConfiguration()) | ||
206 | var problem = modelGeneration.output | ||
207 | problem = instanceModel2Logic.transform( | ||
208 | modelGeneration, | ||
209 | partialModelDescriptor | ||
210 | ).output | ||
211 | problem = viatra2Logic.transformQueries( | ||
212 | queryDescriptor, | ||
213 | modelGeneration, | ||
214 | new Viatra2LogicConfiguration | ||
215 | ).output | ||
216 | initializeScope(config, modelSize, problem, ecore2Logic, modelGeneration.trace) | ||
217 | if (script.propagatedConstraints == ScopeConstraints.hints) { | ||
218 | config.hints = metamodelLoader.getHints(ecore2Logic, modelGeneration.trace) | ||
219 | } | ||
220 | val domain2LogicTransformationTime = System.nanoTime - domain2LogicTransformationStart | ||
221 | |||
222 | if (config.documentationLevel != DocumentationLevel.NONE) { | ||
223 | outputWorkspace.writeModel(problem, "initial.logicproblem") | ||
224 | } | ||
225 | |||
226 | val solver = new ViatraReasoner | ||
227 | val result = solver.solve(problem, config, outputWorkspace) | ||
228 | val statistics = result.statistics | ||
229 | statistics.entries += createIntStatisticEntry => [ | ||
230 | name = "modelLoadingTime" | ||
231 | value = (modelLoadingTime / USEC_TO_MSEC) as int | ||
232 | ] | ||
233 | statistics.entries += createIntStatisticEntry => [ | ||
234 | name = "domain2LogicTransformationTime" | ||
235 | value = (domain2LogicTransformationTime / USEC_TO_MSEC) as int | ||
236 | ] | ||
237 | var EObject modelResult = null | ||
238 | if (result instanceof ModelResult) { | ||
239 | val intepretations = solver.getInterpretations(result) | ||
240 | if (intepretations.size != 1) { | ||
241 | throw new IllegalStateException("Expected 1 interpretation, got " + intepretations.size) | ||
242 | } | ||
243 | var resultTransformationStart = System.nanoTime | ||
244 | val logic2Ecore = new Logic2Ecore(ecore2Logic) | ||
245 | modelResult = logic2Ecore.transformInterpretation(intepretations.head, modelGeneration.trace) | ||
246 | val resultTransformationTime = System.nanoTime - resultTransformationStart | ||
247 | statistics.entries += createIntStatisticEntry => [ | ||
248 | name = "ecore2LogicTransformationTime" | ||
249 | value = (resultTransformationTime / USEC_TO_MSEC) as int | ||
250 | ] | ||
251 | } | ||
252 | |||
253 | new ExperimentResult(result.class.simpleName, statistics, modelResult) | ||
254 | } | ||
255 | |||
256 | private def initializeScope(ViatraReasonerConfiguration config, int modelSize, LogicProblem problem, | ||
257 | EClassMapper eClassMapper, Ecore2Logic_Trace trace) { | ||
258 | val knownElements = initializeKnownElements(problem, config.typeScopes) | ||
259 | if (modelSize < 0) { | ||
260 | config.typeScopes.minNewElements = 0 | ||
261 | config.typeScopes.maxNewElements = TypeScopes.Unlimited | ||
262 | } else { | ||
263 | val numberOfKnownElements = knownElements.values.flatten.toSet.size | ||
264 | val newElementCount = modelSize - numberOfKnownElements | ||
265 | config.typeScopes.minNewElements = newElementCount | ||
266 | config.typeScopes.maxNewElements = newElementCount + MODEL_SIZE_GAP | ||
267 | } | ||
268 | switch (script.scope) { | ||
269 | case none: | ||
270 | return | ||
271 | case quantiles: { | ||
272 | val quantiles = metamodelLoader.typeQuantiles | ||
273 | for (eClassInScope : eClassMapper.allClassesInScope(trace)) { | ||
274 | val quantile = quantiles.get(eClassInScope.name) | ||
275 | if (quantile !== null) { | ||
276 | val type = eClassMapper.TypeofEClass(trace, eClassInScope) | ||
277 | val knownInstances = knownElements.get(type) | ||
278 | val currentCount = if(knownInstances === null) 0 else knownInstances.size | ||
279 | val lowCount = Math.floor(modelSize * quantile.low) as int | ||
280 | val highCount = Math.ceil((modelSize + MODEL_SIZE_GAP) * quantile.high) as int | ||
281 | config.typeScopes.minNewElementsByType.put(type, lowCount - currentCount) | ||
282 | config.typeScopes.maxNewElementsByType.put(type, highCount - currentCount) | ||
283 | } | ||
284 | } | ||
285 | } | ||
286 | default: | ||
287 | throw new IllegalArgumentException("Unknown scope: " + script.scope) | ||
288 | } | ||
289 | } | ||
290 | |||
291 | /* | ||
292 | * Copied from hu.bme.mit.inf.dslreasoner.application.execution.ScopeLoader.initialiseknownElements(LogicProblem, TypeScopes) | ||
293 | */ | ||
294 | private static def initializeKnownElements(LogicProblem p, TypeScopes s) { | ||
295 | val Map<Type, Set<DefinedElement>> res = new HashMap | ||
296 | |||
297 | // 1. fill map with every types | ||
298 | for (t : p.types) { | ||
299 | res.put(t, new HashSet) | ||
300 | } | ||
301 | |||
302 | // 2. fill map with every objects | ||
303 | for (definedType : p.types.filter(TypeDefinition)) { | ||
304 | val supertypes = CollectionsUtil.<Type>transitiveClosureStar(definedType)[supertypes] | ||
305 | for (supertype : supertypes) { | ||
306 | for (element : definedType.elements) { | ||
307 | res.get(supertype).add(element) | ||
308 | } | ||
309 | } | ||
310 | } | ||
311 | val partialModelContents = p.annotations.filter(PartialModelRelation2Assertion).map[target].toList.map [ | ||
312 | eAllContents.toIterable | ||
313 | ].flatten.toList | ||
314 | s.knownIntegers += partialModelContents.filter(IntLiteral).map[it.value] | ||
315 | s.knownReals += partialModelContents.filter(RealLiteral).map[it.value] | ||
316 | s.knownStrings += partialModelContents.filter(StringLiteral).map[it.value] | ||
317 | |||
318 | res | ||
319 | } | ||
320 | |||
321 | public static def void main(String[] args) { | ||
322 | if (args.length != 1) { | ||
323 | System.err.println("Missing measurement script name.") | ||
324 | System.exit(-1) | ||
325 | } | ||
326 | EMFPatternLanguageStandaloneSetup.doSetup | ||
327 | ViatraQueryEngineOptions.setSystemDefaultBackends(ReteBackendFactory.INSTANCE, ReteBackendFactory.INSTANCE, | ||
328 | ReteBackendFactory.INSTANCE) | ||
329 | Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put("*", new XMIResourceFactoryImpl) | ||
330 | val config = readConfig(args.get(0)) | ||
331 | val runnner = new MeasurementScriptRunner(config) | ||
332 | runnner.run() | ||
333 | } | ||
334 | |||
335 | static def readConfig(String scriptPath) { | ||
336 | val gson = new Gson | ||
337 | val reader = new FileReader(scriptPath) | ||
338 | try { | ||
339 | gson.fromJson(reader, MeasurementScript) | ||
340 | } finally { | ||
341 | reader.close | ||
342 | } | ||
343 | } | ||
344 | |||
345 | @Data | ||
346 | private static class ExperimentResult { | ||
347 | String resultName | ||
348 | Statistics statistics | ||
349 | EObject model | ||
350 | } | ||
351 | } | ||