diff --git a/pom.xml b/pom.xml
index b47e1deb..6f54967a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,181 +1,88 @@
-
- 4.0.0
- de.dhbwstuttgart
- JavaTXcompiler
- jar
+ 4.0.0
+ de.dhbwstuttgart
+ JavaTXcompiler
+ jar
+ 0.1
+ JavaTXcompiler
+ http://maven.apache.org
+
+
+ junit
+ junit
+ 4.0
+ test
+
+
+ org.antlr
+ antlr4
+ 4.7
+
+
+ commons-io
+ commons-io
+ 2.6
+
+
+com.google.guava
+ guava
+ 22.0
+
+
+ org.reflections
+ reflections
+ 0.9.11
+
+
+ org.ow2.asm
+ asm-all
+ [4.0.0,)
+
+
+
- 0.1
- JavaTXcompiler
- http://maven.apache.org
-
-
- junit
- junit
- 4.0
- test
-
-
- org.antlr
- antlr4
- 4.7
-
-
- commons-io
- commons-io
- 2.6
-
-
- com.google.guava
- guava
- 22.0
-
-
- org.reflections
- reflections
- 0.9.11
-
-
-
- org.ow2.asm
- asm
- 7.0
-
-
-
-
-
-
- target
- target/classes
- ${project.artifactId}-${project.version}
- target/test-classes
-
-
- org.antlr
- antlr4-maven-plugin
- 4.7
-
-
- antlr
-
- antlr4
-
-
- src/main/antlr4/java8
- ${project.basedir}/target/generated-sources/antlr4/de/dhbwstuttgart/parser/antlr
-
- -package
- de.dhbwstuttgart.parser.antlr
-
-
-
-
- aspParser
-
- antlr4
-
-
- src/main/antlr4/sat
- ${project.basedir}/target/generated-sources/antlr4/de/dhbwstuttgart/sat/asp/parser/antlr
-
- -package
- de.dhbwstuttgart.sat.asp.parser.antlr
-
-
-
-
-
-
-
- maven-assembly-plugin
-
-
- package
-
- single
-
-
-
-
-
- jar-with-dependencies
-
-
-
-
- org.reficio
- p2-maven-plugin
- 1.1.2-SNAPSHOT
-
-
- default-cli
-
-
-
-
-
- de.dhbwstuttgart:JavaTXcompiler:0.1
-
-
- org.reflections:reflections:0.9.11
-
-
- com.google.guava:guava:22.0
-
-
- javax.annotation:javax.annotation-api:1.3.1
-
-
- org.glassfish:javax.annotation:3.1.1
-
-
-
-
-
-
-
-
-
-
-
- reficio
- http://repo.reficio.org/maven/
-
-
-
-
- maven-repository
- file:///${project.basedir}/target
-
-
-
- 1.8
- 1.8
- 0.23.0
-
-
-
- maven-repository
- MyCo Internal Repository
- file:///${project.basedir}/maven-repository/
-
-
+
+ target
+ target/classes
+ ${artifactId}-${version}
+ target/test-classes
+ src/
+ test/
+
+
+ org.antlr
+ antlr4-maven-plugin
+ 4.7
+
+
+ antlr
+
+ antlr4
+
+
+ src/de/dhbwstuttgart/parser/antlr/
+ src/de/dhbwstuttgart/parser/antlr/
+
+ -package
+ de.dhbwstuttgart.parser.antlr
+
+
+
+
+
+
+
+
+ 1.8
+ 1.8
+
diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
new file mode 100644
index 00000000..1ce9c857
--- /dev/null
+++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
@@ -0,0 +1,929 @@
+package de.dhbwstuttgart.typeinference.unify;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.RecursiveTask;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
+import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
+import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
+import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
+import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
+import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
+import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
+import de.dhbwstuttgart.typeinference.unify.model.SuperType;
+import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
+import de.dhbwstuttgart.typeinference.unify.model.Unifier;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
+import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+
+import com.google.common.collect.Ordering;
+
+
+/**
+ * Implementation of the type unification algorithm
+ * @author Florian Steurer
+ */
+public class TypeUnifyTask extends RecursiveTask>> {
+
+ private static final long serialVersionUID = 1L;
+ private static int i = 0;
+ private boolean printtag = false;
+
+ public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
+ FileWriter logFile;
+
+ /**
+ * The implementation of setOps that will be used during the unification
+ */
+ protected ISetOperations setOps = new GuavaSetOperations();
+
+ /**
+ * The implementation of the standard unify that will be used during the unification
+ */
+ protected IUnify stdUnify = new MartelliMontanariUnify();
+
+ /**
+ * The implementation of the rules that will be used during the unification.
+ */
+ protected IRuleSet rules;
+
+ protected Set eq;
+
+ protected IFiniteClosure fc;
+
+ protected Ordering> oup;
+
+ protected boolean parallel;
+
+ public TypeUnifyTask() {
+ rules = new RuleSet();
+ }
+
+ public TypeUnifyTask(Set eq, IFiniteClosure fc, boolean parallel, FileWriter logFile) {
+ this.eq = eq;
+ this.fc = fc;
+ this.oup = new OrderingUnifyPair(fc);
+ this.parallel = parallel;
+ this.logFile = logFile;
+ rules = new RuleSet(logFile);
+ }
+
+ @Override
+ protected Set> compute() {
+ return unify(eq, fc, parallel);
+ }
+
+ /**
+ * Computes all principal type unifiers for a set of constraints.
+ * @param eq The set of constraints
+ * @param fc The finite closure
+ * @return The set of all principal type unifiers
+ */
+ protected Set> unify(Set eq, IFiniteClosure fc, boolean parallel) {
+ /*
+ * Step 1: Repeated application of reduce, adapt, erase, swap
+ */
+ writeLog("Unifikation: " + eq.toString());
+ eq = eq.stream().map(x -> {x.setVariance((byte)-1); return x;}).collect(Collectors.toCollection(HashSet::new));
+ Set eq0 = applyTypeUnificationRules(eq, fc);
+
+ /*
+ * Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
+ */
+ Set eq1s = new HashSet<>();
+ Set eq2s = new HashSet<>();
+ splitEq(eq0, eq1s, eq2s);
+
+ /*
+ * Step 4: Create possible typings
+ *
+ * "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
+ * mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
+ */
+
+ // There are up to 10 toplevel set. 8 of 10 are the result of the
+ // cartesian product of the sets created by pattern matching.
+ List>> topLevelSets = new ArrayList<>();
+
+ //System.out.println(eq2s);
+
+ if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
+ Set> wrap = new HashSet<>();
+ wrap.add(eq1s);
+ topLevelSets.add(wrap); // Add Eq1'
+ }
+
+ // Add the set of [a =. Theta | (a=. Theta) in Eq2']
+ Set bufferSet = eq2s.stream()
+ .filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
+ .collect(Collectors.toSet());
+
+ if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
+ Set> wrap = new HashSet<>();
+ wrap.add(bufferSet);
+ topLevelSets.add(wrap);
+ eq2s.removeAll(bufferSet);
+ }
+
+ // Sets that originate from pair pattern matching
+ // Sets of the "second level"
+ Set undefinedPairs = new HashSet<>();
+ if (printtag) System.out.println("eq2s " + eq2s);
+ //writeLog("BufferSet: " + bufferSet.toString()+"\n");
+ Set>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs);
+ //PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
+ //nicht ausgewertet Faculty Beispiel im 1. Schritt
+ //PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
+ //Typen getestet werden.
+ if (printtag) System.out.println("secondLevelSets:" +secondLevelSets);
+ // If pairs occured that did not match one of the cartesian product cases,
+ // those pairs are contradictory and the unification is impossible.
+ if(!undefinedPairs.isEmpty()) {
+ writeLog("UndefinedPairs; " + undefinedPairs);
+ Set> error = new HashSet<>();
+ error.add(undefinedPairs);
+ return error;
+ }
+
+ /* Up to here, no cartesian products are calculated.
+ * filters for pairs and sets can be applied here */
+
+ // Alternative: Sub cartesian products of the second level (pattern matched) sets
+ // "the big (x)"
+ /* for(Set>> secondLevelSet : secondLevelSets) {
+ //System.out.println("secondLevelSet "+secondLevelSet.size());
+ List>> secondLevelSetList = new ArrayList<>(secondLevelSet);
+ Set>> cartResult = setOps.cartesianProduct(secondLevelSetList);
+ //System.out.println("CardResult: "+cartResult.size());
+ // Flatten and add to top level sets
+ Set> flat = new HashSet<>();
+ int j = 0;
+ for(List> s : cartResult) {
+ j++;
+ //System.out.println("s from CardResult: "+cartResult.size() + " " + j);
+ Set flat1 = new HashSet<>();
+ for(Set s1 : s)
+ flat1.addAll(s1);
+ flat.add(flat1);
+ }
+ //topLevelSets.add(flat);
+ }
+ */
+
+ //Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden
+ for(Set>> secondLevelSet : secondLevelSets) {
+ for (Set> secondlevelelem : secondLevelSet) {
+ topLevelSets.add(secondlevelelem);
+ }
+ }
+ //System.out.println(topLevelSets);
+ //System.out.println();
+
+
+ //Aufruf von computeCartesianRecursive ANFANG
+ return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, fc, parallel);
+
+ }
+
+
+ Set> unify2(Set> setToFlatten, Set eq, IFiniteClosure fc, boolean parallel) {
+ //Aufruf von computeCartesianRecursive ENDE
+
+ //keine Ahnung woher das kommt
+ //Set> setToFlatten = topLevelSets.stream().map(x -> x.iterator().next()).collect(Collectors.toCollection(HashSet::new));
+
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
+ // Cartesian product over all (up to 10) top level sets
+ //Set>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
+ // .stream().map(x -> new HashSet<>(x))
+ // .collect(Collectors.toCollection(HashSet::new));
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
+
+ Set> eqPrimePrimeSet = new HashSet<>();
+
+ Set forks = new HashSet<>();
+
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
+ //for(Set> setToFlatten : eqPrimeSet) {
+ // Flatten the cartesian product
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
+ Set eqPrime = new HashSet<>();
+ setToFlatten.stream().forEach(x -> eqPrime.addAll(x));
+
+ /*
+ * Step 5: Substitution
+ */
+ //System.out.println("vor Subst: " + eqPrime);
+ Optional> eqPrimePrime = rules.subst(eqPrime);
+
+ /*
+ * Step 6 a) Restart (fork) for pairs where subst was applied
+ */
+ if(parallel) {
+ if (eqPrime.equals(eq)) //PL 2017-09-29 auskommentiert und durch
+ //(!eqPrimePrime.isPresent()) //PL 2071-09-29 dies ersetzt
+ //Begruendung: Wenn in der Substitution keine Veraenderung
+ //(!eqPrimePrime.isPresent()) erfolgt ist, ist das Ergebnis erzielt.
+ eqPrimePrimeSet.add(eqPrime);
+ else if(eqPrimePrime.isPresent()) {
+ //System.out.println("nextStep: " + eqPrimePrime.get());
+ TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc, true, logFile);
+ forks.add(fork);
+ fork.fork();
+ }
+ else {
+ //System.out.println("nextStep: " + eqPrime);
+ TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc, true, logFile);
+ forks.add(fork);
+ fork.fork();
+ }
+ }
+ else { // sequentiell (Step 6b is included)
+ if (printtag) System.out.println("nextStep: " + eqPrimePrime);
+ if (eqPrime.equals(eq)) { //PL 2017-09-29 auskommentiert und durch
+ //(!eqPrimePrime.isPresent()) //PL 2071-09-29 dies ersetzt
+ //Begruendung: Wenn in der Substitution keine Veraenderung
+ //(!eqPrimePrime.isPresent()) erfolgt ist, ist das Ergebnis erzielt.
+ try {
+ if (isSolvedForm(eqPrime)) {
+ logFile.write(eqPrime.toString()+"\n");
+ logFile.flush();
+ }
+ }
+ catch (IOException e) { }
+ eqPrimePrimeSet.add(eqPrime);
+ }
+ else if(eqPrimePrime.isPresent()) {
+ Set> unifyres = unify(eqPrimePrime.get(), fc, false);
+
+ eqPrimePrimeSet.addAll(unifyres);
+ }
+ else {
+ Set> unifyres = unify(eqPrime, fc, false);
+
+
+ eqPrimePrimeSet.addAll(unifyres);
+ }
+ }
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
+ //}
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
+
+ /*
+ * Step 6 b) Build the union over everything.
+ */
+
+ if(parallel)
+ for(TypeUnifyTask fork : forks)
+ eqPrimePrimeSet.addAll(fork.join());
+
+ /*
+ * Step 7: Filter empty sets;
+ */
+ eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x)).collect(Collectors.toCollection(HashSet::new));
+ if (!eqPrimePrimeSet.isEmpty())
+ writeLog("Result " + eqPrimePrimeSet.toString());
+ return eqPrimePrimeSet;
+ }
+
+
+
+ Set> computeCartesianRecursive(Set> fstElems, ArrayList>> topLevelSets, Set eq, IFiniteClosure fc, boolean parallel) {
+ ArrayList>> remainingSets = new ArrayList<>(topLevelSets);
+ Set> nextSet = remainingSets.remove(0);
+ ArrayList> nextSetasList = new ArrayList<>(nextSet);
+ Set> result = new HashSet<>();
+ int i = 0;
+ byte variance = nextSetasList.iterator().next().iterator().next().getVariance();
+ Set a_next = null;
+ if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D"))
+ System.out.print("");
+ if (nextSetasList.size()>1) {
+ if (variance == 1) {
+ a_next = oup.max(nextSetasList.iterator());
+ }
+ else if (variance == -1) {
+ a_next = oup.min(nextSetasList.iterator());
+ }
+ else if (variance == 0) {
+ a_next = nextSetasList.iterator().next();
+ }
+ }
+ else {
+ a_next = nextSetasList.iterator().next();
+ }
+ while (nextSetasList.size() != 0) {
+ Set a = a_next;
+ //writeLog("nextSet: " + nextSetasList.toString()+ "\n");
+ nextSetasList.remove(a);
+ if (nextSetasList.size() > 0) {
+ if (nextSetasList.size()>1) {
+ if (variance == 1) {
+ a_next = oup.max(nextSetasList.iterator());
+ }
+ else if (variance == -1) {
+ a_next = oup.min(nextSetasList.iterator());
+ }
+ else {
+ a_next = nextSetasList.iterator().next();
+ }
+ }
+ else {
+ a_next = nextSetasList.iterator().next();
+ }
+ }
+ //PL 2018-03-01
+ //TODO: 1. Maximum und Minimum unterscheiden
+ //TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
+ //for(Set a : newSet) {
+ i++;
+ Set> elems = new HashSet>(fstElems);
+ elems.add(a);
+ if (remainingSets.isEmpty()) {
+ result.addAll(unify2(elems, eq, fc, parallel));
+ System.out.println("");
+ }
+ else {
+ result.addAll(computeCartesianRecursive(elems, remainingSets, eq, fc, parallel));
+ }
+ if (!result.isEmpty()) {
+ if (variance == 1) {
+ if (a.iterator().next().getLhsType().getName().equals("WL"))
+ System.out.print("");
+ if (a.equals(a_next) ||
+ (oup.compare(a, a_next) == 1)) {
+ System.out.print("");
+ break;
+ }
+ else {
+ System.out.print("");
+ }
+ }
+ else { if (variance == -1) {
+ if (a.iterator().next().getLhsType().getName().equals("A"))
+ System.out.print("");
+ if (a.equals(a_next) || (oup.compare(a, a_next) == -1)) {
+ System.out.print("");
+ break;
+ }
+ else {
+ System.out.print("");
+ }
+ }
+ else if (variance == 0) {
+ break;
+ }
+ }
+ }
+ }
+ return result;
+ }
+
+ protected boolean isUndefinedPairSet(Set> s) {
+ boolean res = true;
+ if (s.size() ==1) {
+ s.iterator().next().stream().forEach(x -> { res = res && x.isUndefinedPair(); return; });
+ return res;
+ }
+
+ }
+ /**
+ * Checks whether a set of pairs is in solved form.
+ * @param eqPrimePrime The set of pair
+ * @return True if in solved form, false otherwise.
+ */
+ protected boolean isSolvedForm(Set eqPrimePrime) {
+ for(UnifyPair pair : eqPrimePrime) {
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+
+ if(!(lhsType instanceof PlaceholderType))
+ return false;
+
+ // If operator is not equals, both sides must be placeholders
+ if(pair.getPairOp() != PairOperator.EQUALSDOT && !(rhsType instanceof PlaceholderType))
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Repeatedly applies type unification rules to a set of equations.
+ * This is step one of the unification algorithm.
+ * @return The set of pairs that results from repeated application of the inference rules.
+ */
+ public Set applyTypeUnificationRules(Set eq, IFiniteClosure fc) {
+
+ /*
+ * Rule Application Strategy:
+ *
+ * 1. Swap all pairs and erase all erasable pairs
+ * 2. Apply all possible rules to a single pair, then move it to the result set.
+ * Iterating over pairs first, then iterating over rules prevents the application
+ * of rules to a "finished" pair over and over.
+ * 2.1 Apply all rules repeatedly except for erase rules. If
+ * the application of a rule creates new pairs, check immediately
+ * against the erase rules.
+ */
+
+
+ LinkedHashSet targetSet = new LinkedHashSet();
+ LinkedList eqQueue = new LinkedList<>();
+
+ /*
+ * Swap all pairs and erase all erasable pairs
+ */
+ eq.forEach(x -> swapAddOrErase(x, fc, eqQueue));
+
+ /*
+ * Apply rules until the queue is empty
+ */
+ while(!eqQueue.isEmpty()) {
+ UnifyPair pair = eqQueue.pollFirst();
+
+ // ReduceUp, ReduceLow, ReduceUpLow
+ Optional opt = rules.reduceUpLow(pair);
+ opt = opt.isPresent() ? opt : rules.reduceLow(pair);
+ opt = opt.isPresent() ? opt : rules.reduceUp(pair);
+ opt = opt.isPresent() ? opt : rules.reduceWildcardLow(pair);
+ opt = opt.isPresent() ? opt : rules.reduceWildcardLowRight(pair);
+ opt = opt.isPresent() ? opt : rules.reduceWildcardUp(pair);
+ opt = opt.isPresent() ? opt : rules.reduceWildcardUpRight(pair);
+ //PL 2018-03-06 auskommentiert muesste falsch sein vgl. JAVA_BSP/Wildcard6.java
+ //opt = opt.isPresent() ? opt : rules.reduceWildcardLowUp(pair);
+ //opt = opt.isPresent() ? opt : rules.reduceWildcardUpLow(pair);
+ //opt = opt.isPresent() ? opt : rules.reduceWildcardLeft(pair);
+
+ // Reduce TPH
+ opt = opt.isPresent() ? opt : rules.reduceTph(pair);
+
+ // One of the rules has been applied
+ if(opt.isPresent()) {
+ swapAddOrErase(opt.get(), fc, eqQueue);
+ continue;
+ }
+
+ // Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
+ //try {
+ // logFile.write("PAIR1 " + pair + "\n");
+ // logFile.flush();
+ //}
+ //catch (IOException e) { }
+
+ Optional> optSet = rules.reduce1(pair, fc);
+ optSet = optSet.isPresent() ? optSet : rules.reduce2(pair);
+ optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair, fc);
+ optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair, fc);
+ optSet = optSet.isPresent() ? optSet : rules.reduceEq(pair);
+
+ // ReduceTphExt, ReduceTphSup
+ optSet = optSet.isPresent() ? optSet : rules.reduceTphExt(pair);
+ optSet = optSet.isPresent() ? optSet : rules.reduceTphSup(pair);
+
+
+ // FunN Rules
+ optSet = optSet.isPresent() ? optSet : rules.reduceFunN(pair);
+ optSet = optSet.isPresent() ? optSet : rules.greaterFunN(pair);
+ optSet = optSet.isPresent() ? optSet : rules.smallerFunN(pair);
+
+ // One of the rules has been applied
+ if(optSet.isPresent()) {
+ optSet.get().forEach(x -> swapAddOrErase(x, fc, eqQueue));
+ continue;
+ }
+
+ // Adapt, AdaptExt, AdaptSup
+ //try {
+ // logFile.write("PAIR2 " + pair + "\n");
+ // logFile.flush();
+ //}
+ //catch (IOException e) { }
+ opt = rules.adapt(pair, fc);
+ opt = opt.isPresent() ? opt : rules.adaptExt(pair, fc);
+ opt = opt.isPresent() ? opt : rules.adaptSup(pair, fc);
+
+ // One of the rules has been applied
+ if(opt.isPresent()) {
+ swapAddOrErase(opt.get(), fc, eqQueue);
+ continue;
+ }
+
+ // None of the rules has been applied
+ targetSet.add(pair);
+ }
+
+ return targetSet;
+ }
+
+ /**
+ * Applies the rule swap to a pair if possible. Then adds the pair to the set if no erase rule applies.
+ * If an erase rule applies, the pair is not added (erased).
+ * @param pair The pair to swap and add or erase.
+ * @param collection The collection to which the pairs are added.
+ */
+ protected void swapAddOrErase(UnifyPair pair, IFiniteClosure fc, Collection collection) {
+ Optional opt = rules.swap(pair);
+ UnifyPair pair2 = opt.isPresent() ? opt.get() : pair;
+
+ if(rules.erase1(pair2, fc) || rules.erase3(pair2) || rules.erase2(pair2, fc))
+ return;
+
+ collection.add(pair2);
+ }
+
+ /**
+ * Splits the equation eq into a set eq1s where both terms are type variables,
+ * and a set eq2s where one of both terms is not a type variable.
+ * @param eq Set of pairs to be splitted.
+ * @param eq1s Subset of eq where both terms are type variables.
+ * @param eq2s eq/eq1s.
+ */
+ protected void splitEq(Set eq, Set eq1s, Set eq2s) {
+ for(UnifyPair pair : eq)
+ if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
+ eq1s.add(pair);
+ else
+ eq2s.add(pair);
+ }
+
+ /**
+ * Creates sets of pairs specified in the fourth step. Does not calculate cartesian products.
+ * @param undefined All pairs that did not match one of the 8 cases are added to this set.
+ * @return The set of the eight cases (without empty sets). Each case is a set, containing sets generated
+ * from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
+ * (as in case 1 where sigma is added to the innermost set).
+ */
+ protected Set>>> calculatePairSets(Set eq2s, IFiniteClosure fc, Set undefined) {
+ List>>> result = new ArrayList<>(8);
+
+ // Init all 8 cases
+ for(int i = 0; i < 8; i++)
+ result.add(new HashSet<>());
+ Boolean first = true;
+ for(UnifyPair pair : eq2s) {
+ PairOperator pairOp = pair.getPairOp();
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+
+ // Case 1: (a <. Theta')
+ if(pairOp == PairOperator.SMALLERDOT && lhsType instanceof PlaceholderType) {
+ //System.out.println(pair);
+ if (first) { //writeLog(pair.toString()+"\n");
+ Set> x1 = unifyCase1((PlaceholderType) pair.getLhsType(), pair.getRhsType(), (byte)1, fc);
+ //System.out.println(x1);
+ result.get(0).add(x1);
+ }
+ else {
+ Set s1 = new HashSet<>();
+ s1.add(pair);
+ Set> s2 = new HashSet<>();
+ s2.add(s1);
+ result.get(0).add(s2);
+ }
+
+ }
+ // Case 2: (a <.? ? ext Theta')
+ else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
+ if (first) { //writeLog(pair.toString()+"\n");
+ result.get(1).add(unifyCase2((PlaceholderType) pair.getLhsType(), (ExtendsType) pair.getRhsType(), (byte)0, fc));
+ }
+ else {
+ Set s1 = new HashSet<>();
+ s1.add(pair);
+ Set> s2 = new HashSet<>();
+ s2.add(s1);
+ result.get(1).add(s2);
+ }
+
+ // Case 3: (a <.? ? sup Theta')
+ else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
+ if (first) { //writeLog(pair.toString()+"\n");
+ result.get(2).add(unifyCase3((PlaceholderType) lhsType, (SuperType) rhsType, (byte)0, fc));
+ }
+ else {
+ Set s1 = new HashSet<>();
+ s1.add(pair);
+ Set> s2 = new HashSet<>();
+ s2.add(s1);
+ result.get(2).add(s2);
+ }
+
+ // Case 4 was replaced by an inference rule
+ // Case 4: (a <.? Theta')
+ //else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType)
+ // result.get(3).add(unifyCase4((PlaceholderType) lhsType, rhsType, fc));
+
+ // Case 5: (Theta <. a)
+ else if(pairOp == PairOperator.SMALLERDOT && rhsType instanceof PlaceholderType)
+ if (first) { //writeLog(pair.toString()+"\n");
+ if (rhsType.getName().equals("A"))
+ System.out.println();
+ result.get(4).add(unifyCase5(lhsType, (PlaceholderType) rhsType, (byte)-1, fc));
+ }
+ else {
+ Set s1 = new HashSet<>();
+ s1.add(pair);
+ Set> s2 = new HashSet<>();
+ s2.add(s1);
+ result.get(4).add(s2);
+ }
+
+ // Case 6 was replaced by an inference rule.
+ // Case 6: (? ext Theta <.? a)
+ //else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType)
+ // result.get(5).add(unifyCase6((ExtendsType) lhsType, (PlaceholderType) rhsType, fc));
+
+ // Case 7 was replaced by an inference rule
+ // Case 7: (? sup Theta <.? a)
+ //else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType)
+ // result.get(6).add(unifyCase7((SuperType) lhsType, (PlaceholderType) rhsType, fc));
+
+ // Case 8: (Theta <.? a)
+ else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
+ if (first) { //writeLog(pair.toString()+"\n");
+ result.get(7).add(
+ unifyCase8(lhsType, (PlaceholderType) rhsType, (byte)0, fc));
+ }
+ else {
+ Set s1 = new HashSet<>();
+ s1.add(pair);
+ Set> s2 = new HashSet<>();
+ s2.add(s1);
+ result.get(7).add(s2);
+ }
+ // Case unknown: If a pair fits no other case, then the type unification has failed.
+ // Through application of the rules, every pair should have one of the above forms.
+ // Pairs that do not have one of the aboves form are contradictory.
+ else {
+ // If a pair is not defined, the unificiation will fail, so the loop can be stopped here.
+ undefined.add(pair);
+ break;
+ }
+ first = false;
+ }
+
+ // Filter empty sets or sets that only contain an empty set.
+ return result.stream().map(x -> x.stream().filter(y -> y.size() > 0).collect(Collectors.toCollection(HashSet::new)))
+ .filter(x -> x.size() > 0).collect(Collectors.toCollection(HashSet::new));
+ }
+
+ /**
+ * Cartesian product Case 1: (a <. Theta')
+ */
+ protected Set> unifyCase1(PlaceholderType a, UnifyType thetaPrime, byte variance, IFiniteClosure fc) {
+ Set> result = new HashSet<>();
+
+ boolean allGen = thetaPrime.getTypeParams().size() > 0;
+ for(UnifyType t : thetaPrime.getTypeParams())
+ if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
+ allGen = false;
+ break;
+ }
+
+ Set cs = fc.getAllTypesByName(thetaPrime.getName());//cs= [java.util.Vector, java.util.Vector>, ????java.util.Vector???]
+
+ //PL 18-02-06 entfernt, kommt durch unify wieder rein
+ //cs.add(thetaPrime);
+ //PL 18-02-06 entfernt
+
+ for(UnifyType c : cs) {
+ //PL 18-02-05 getChildren durch smaller ersetzt in getChildren werden die Varianlen nicht ersetzt.
+ Set thetaQs = fc.smaller(c).stream().collect(Collectors.toCollection(HashSet::new));
+ //Set thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
+ //thetaQs.add(thetaPrime); //PL 18-02-05 wieder geloescht
+ //PL 2017-10-03: War auskommentiert habe ich wieder einkommentiert,
+ //da children offensichtlich ein echtes kleiner und kein kleinergleich ist
+
+ //PL 18-02-06: eingefuegt, thetaQs der Form V> <. V'> werden entfernt
+ thetaQs = thetaQs.stream().filter(ut -> ut.getTypeParams().arePlaceholders()).collect(Collectors.toCollection(HashSet::new));
+ //PL 18-02-06: eingefuegt
+
+ Set thetaQPrimes = new HashSet<>();
+ TypeParams cParams = c.getTypeParams();
+ if(cParams.size() == 0)
+ thetaQPrimes.add(c);
+ else {
+ ArrayList> candidateParams = new ArrayList<>();
+
+ for(UnifyType param : cParams)
+ candidateParams.add(fc.grArg(param));
+
+ for(TypeParams tp : permuteParams(candidateParams))
+ thetaQPrimes.add(c.setTypeParams(tp));
+ }
+
+ for(UnifyType tqp : thetaQPrimes) {
+ //System.out.println(tqp.toString());
+ //i++;
+ //System.out.println(i);
+ //if (i == 62)
+ // System.out.println(tqp.toString());
+ Optional opt = stdUnify.unify(tqp, thetaPrime);
+ if (!opt.isPresent())
+ continue;
+
+ Unifier unifier = opt.get();
+ unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
+ Set substitutionSet = new HashSet<>();
+ for (Entry sigma : unifier)
+ substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
+
+ //List freshTphs = new ArrayList<>(); PL 18-02-06 in die For-Schleife verschoben
+ for (UnifyType tq : thetaQs) {
+ Set smaller = fc.smaller(unifier.apply(tq));
+ for(UnifyType theta : smaller) {
+ List freshTphs = new ArrayList<>();
+ Set resultPrime = new HashSet<>();
+
+ for(int i = 0; !allGen && i < theta.getTypeParams().size(); i++) {
+ if(freshTphs.size()-1 < i)
+ freshTphs.add(PlaceholderType.freshPlaceholder());
+ resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC));
+ }
+
+ if(allGen)
+ resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
+ else
+ resultPrime.add(new UnifyPair(a, theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0]))), PairOperator.EQUALSDOT));
+ resultPrime.addAll(substitutionSet);
+ //writeLog("Substitution: " + substitutionSet.toString());
+ resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
+ result.add(resultPrime);
+ //writeLog("Result: " + resultPrime.toString());
+ //writeLog("MAX: " + oup.max(resultPrime.iterator()).toString());
+ }
+ }
+
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Cartesian Product Case 2: (a <.? ? ext Theta')
+ */
+ private Set> unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, byte variance, IFiniteClosure fc) {
+ Set> result = new HashSet<>();
+
+ UnifyType aPrime = PlaceholderType.freshPlaceholder();
+ UnifyType extAPrime = new ExtendsType(aPrime);
+ UnifyType thetaPrime = extThetaPrime.getExtendedType();
+ Set resultPrime = new HashSet<>();
+ resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.SMALLERDOT));
+ result.add(resultPrime);
+
+ resultPrime = new HashSet<>();
+ resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT));
+ resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT));
+ resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
+ result.add(resultPrime);
+ //writeLog("Result: " + resultPrime.toString());
+ return result;
+ }
+
+ /**
+ * Cartesian Product Case 3: (a <.? ? sup Theta')
+ */
+ private Set> unifyCase3(PlaceholderType a, SuperType subThetaPrime, byte variance, IFiniteClosure fc) {
+ Set> result = new HashSet<>();
+
+ UnifyType aPrime = PlaceholderType.freshPlaceholder();
+ UnifyType supAPrime = new SuperType(aPrime);
+ UnifyType thetaPrime = subThetaPrime.getSuperedType();
+ Set resultPrime = new HashSet<>();
+ resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT));
+ result.add(resultPrime);
+ //writeLog(resultPrime.toString());
+
+ resultPrime = new HashSet<>();
+ resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
+ resultPrime.add(new UnifyPair(thetaPrime, aPrime, PairOperator.SMALLERDOT));
+ resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
+ result.add(resultPrime);
+ //writeLog(resultPrime.toString());
+
+ return result;
+ }
+
+ /**
+ * Cartesian Product Case 5: (Theta <. a)
+ */
+ private Set> unifyCase5(UnifyType theta, PlaceholderType a, byte variance, IFiniteClosure fc) {
+ Set> result = new HashSet<>();
+
+ boolean allGen = theta.getTypeParams().size() > 0;
+ for(UnifyType t : theta.getTypeParams())
+ if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
+ allGen = false;
+ break;
+ }
+
+ for(UnifyType thetaS : fc.greater(theta)) {
+ Set resultPrime = new HashSet<>();
+
+ UnifyType[] freshTphs = new UnifyType[thetaS.getTypeParams().size()];
+ for(int i = 0; !allGen && i < freshTphs.length; i++) {
+ freshTphs[i] = PlaceholderType.freshPlaceholder();
+ resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.SMALLERDOTWC));
+ }
+
+ if(allGen)
+ resultPrime.add(new UnifyPair(a, thetaS, PairOperator.EQUALSDOT));
+ else
+ resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT));
+ resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
+ result.add(resultPrime);
+ //writeLog(resultPrime.toString());
+ }
+
+ return result;
+ }
+
+ /**
+ * Cartesian Product Case 8: (Theta <.? a)
+ */
+ private Set> unifyCase8(UnifyType theta, PlaceholderType a, byte variance, IFiniteClosure fc) {
+ Set> result = new HashSet<>();
+ //for(UnifyType thetaS : fc.grArg(theta)) {
+ Set resultPrime = new HashSet<>();
+ resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
+ result.add(resultPrime);
+ //writeLog(resultPrime.toString());
+
+ UnifyType freshTph = PlaceholderType.freshPlaceholder();
+ resultPrime = new HashSet<>();
+ resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT));
+ resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT));
+ result.add(resultPrime);
+ //writeLog(resultPrime.toString());
+
+ resultPrime = new HashSet<>();
+ resultPrime.add(new UnifyPair(a, new SuperType(freshTph), PairOperator.EQUALSDOT));
+ resultPrime.add(new UnifyPair(freshTph, theta, PairOperator.SMALLERDOT));
+ resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
+ result.add(resultPrime);
+ //writeLog(resultPrime.toString());
+ //}
+
+ return result;
+ }
+
+ /**
+ * Takes a set of candidates for each position and computes all possible permutations.
+ * @param candidates The length of the list determines the number of type params. Each set
+ * contains the candidates for the corresponding position.
+ */
+ protected Set permuteParams(ArrayList> candidates) {
+ Set result = new HashSet<>();
+ permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
+ return result;
+ }
+
+ /**
+ * Takes a set of candidates for each position and computes all possible permutations.
+ * @param candidates The length of the list determines the number of type params. Each set
+ * contains the candidates for the corresponding position.
+ * @param idx Idx for the current permutatiton.
+ * @param result Set of all permutations found so far
+ * @param current The permutation of type params that is currently explored
+ */
+ private void permuteParams(ArrayList> candidates, int idx, Set result, UnifyType[] current) {
+ if(candidates.size() == idx) {
+ result.add(new TypeParams(Arrays.copyOf(current, current.length)));
+ return;
+ }
+
+ Set localCandidates = candidates.get(idx);
+
+ for(UnifyType t : localCandidates) {
+ current[idx] = t;
+ permuteParams(candidates, idx+1, result, current);
+ }
+ }
+
+ void writeLog(String str) {
+ try {
+ logFile.write(str+"\n");
+ logFile.flush();
+
+ }
+ catch (IOException e) { }
+ }
+}
diff --git a/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java b/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java
new file mode 100644
index 00000000..1c5cde8a
--- /dev/null
+++ b/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java
@@ -0,0 +1,117 @@
+package de.dhbwstuttgart.typeinference.unify.model;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+
+/**
+ * A pair which contains two types and an operator, e.q. (Integer <. a).
+ * @author Florian Steurer
+ */
+public class UnifyPair {
+
+ /**
+ * The type on the left hand side of the pair.
+ */
+ private final UnifyType lhs;
+
+ /**
+ * The type on the right hand side of the pair.
+ */
+ private final UnifyType rhs;
+
+ /**
+ * The operator that determines the relation between the left and right hand side type.
+ */
+ private PairOperator pairOp;
+
+ private byte variance = 0;
+
+ private boolean undefinedPair = false;
+
+ private final int hashCode;
+
+ /**
+ * Creates a new instance of the pair.
+ * @param lhs The type on the left hand side of the pair.
+ * @param rhs The type on the right hand side of the pair.
+ * @param op The operator that determines the relation between the left and right hand side type.
+ */
+ public UnifyPair(UnifyType lhs, UnifyType rhs, PairOperator op) {
+ this.lhs = lhs;
+ this.rhs = rhs;
+ pairOp = op;
+
+ // Caching hashcode
+ hashCode = 17 + 31 * lhs.hashCode() + 31 * rhs.hashCode() + 31 * pairOp.hashCode();
+ }
+
+ /**
+ * Returns the type on the left hand side of the pair.
+ */
+ public UnifyType getLhsType() {
+ return lhs;
+ }
+
+ /**
+ * Returns the type on the right hand side of the pair.
+ */
+ public UnifyType getRhsType() {
+ return rhs;
+ }
+
+ /**
+ * Returns the operator that determines the relation between the left and right hand side type.
+ */
+ public PairOperator getPairOp() {
+ return pairOp;
+ }
+
+ public byte getVariance() {
+ return variance;
+ }
+
+ public void setVariance(byte v) {
+ variance = v;
+ }
+
+ public boolean isUndefinedPair() {
+ return undefinedPair;
+ }
+ @Override
+ public boolean equals(Object obj) {
+ if(!(obj instanceof UnifyPair))
+ return false;
+
+ if(obj.hashCode() != this.hashCode())
+ return false;
+
+ UnifyPair other = (UnifyPair) obj;
+
+ return other.getPairOp() == pairOp
+ && other.getLhsType().equals(lhs)
+ && other.getRhsType().equals(rhs);
+ }
+
+ @Override
+ public int hashCode() {
+ return hashCode;
+ }
+
+ @Override
+ public String toString() {
+ return "(" + lhs + " " + pairOp + " " + rhs + ")";
+ }
+
+ /*
+ public List extends PlaceholderType> getInvolvedPlaceholderTypes() {
+ ArrayList ret = new ArrayList<>();
+ ret.addAll(lhs.getInvolvedPlaceholderTypes());
+ ret.addAll(rhs.getInvolvedPlaceholderTypes());
+ return ret;
+ }
+ */
+}
+
+