diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/PartialOrderSet.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/PartialOrderSet.java new file mode 100644 index 00000000..53ab6493 --- /dev/null +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/PartialOrderSet.java @@ -0,0 +1,120 @@ +package de.dhbwstuttgart.typeinference.unify; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; +import java.util.Vector; + +import com.google.common.collect.Ordering; + +public class PartialOrderSet> implements Set { + + HashSet hs = new HashSet<>(); + Vector ve = new Vector<>(); + F ordering; + + PartialOrderSet(F ordering) { + this.ordering= ordering; + } + + PartialOrderSet(F ordering, Set s) { + this.ordering= ordering; + this.addAll(s); + } + @Override + public int size() { + return ve.size(); + } + + @Override + public boolean isEmpty() { + return ve.isEmpty(); + } + + @Override + public boolean contains(Object o) { + return hs.contains(o); + } + + @Override + public Iterator iterator() { + return ve.iterator(); + } + + @Override + public Object[] toArray() { + return ve.toArray(); + } + + @Override + public T[] toArray(T[] a) { + return ve.toArray(a); + } + + public ArrayList toArrayList() { + return new ArrayList<>(ve); + } + + @Override + public boolean add(E e) { + if (this.contains(e)) { + return false; + } + hs.add(e); + for(int i = 0; i< ve.size(); i++) { + if (ordering.compare(e, ve.elementAt(i)) == -1) { + ve.insertElementAt(e, i); + return true; + } + } + ve.addElement(e); + return true; + } + + @Override + public boolean remove(Object o) { + hs.remove(o); + return ve.remove(o); + } + + @Override + public boolean containsAll(Collection c) { + return hs.containsAll(c); + } + + @Override + public boolean addAll(Collection c) { + Boolean ret = false; + Iterator cit = c.iterator(); + while(cit.hasNext()) { + Boolean retnew = this.add(cit.next()); + ret = ret || retnew; + } + return ret; + } + + @Override + public boolean retainAll(Collection c) { + hs.retainAll(c); + return ve.retainAll(c); + } + + @Override + public boolean removeAll(Collection c) { + hs.removeAll(c); + return ve.removeAll(c); + } + + @Override + public void clear() { + hs.clear(); + ve.clear(); + } + + @Override + public String toString() { + return ve.toString(); + } +} diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java index d1fc2afe..8b43a204 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java @@ -1,20 +1,25 @@ package de.dhbwstuttgart.typeinference.unify; import java.io.FileWriter; +import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.concurrent.ForkJoinPool; +import java.util.stream.Collectors; import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.ConstraintSet; import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure; +import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; public class TypeUnify { public Set> unify(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet cons) { - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, new UnifyResultModel(), cons); + List,OrderingUnifyPair>> oderConstraintsPartial = + oderConstraints.stream().map(x -> new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList()); + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, new UnifyResultModel(), cons); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(unifyTask); Set> res = unifyTask.join(); @@ -22,14 +27,18 @@ public class TypeUnify { } public UnifyResultModel unifyAsync(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet cons, UnifyResultModel ret) { - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, cons); + List,OrderingUnifyPair>> oderConstraintsPartial = + oderConstraints.stream().map(x -> new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList()); + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, ret, cons); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(unifyTask); return ret; } public UnifyResultModel unifyParallel(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet cons, UnifyResultModel ret) { - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, cons); + List,OrderingUnifyPair>> oderConstraintsPartial = + oderConstraints.stream().map(x -> new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList()); + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, ret, cons); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(unifyTask); Set> res = unifyTask.join(); @@ -45,7 +54,9 @@ public class TypeUnify { */ public Set> unifyOderConstraints(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet cons) { - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, new UnifyResultModel(), cons); + List,OrderingUnifyPair>> oderConstraintsPartial = + oderConstraints.stream().map(x -> new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList()); + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, false, logFile, log, 0, new UnifyResultModel(), cons); Set> res = unifyTask.compute(); return res; } diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java index 6916f6dc..a556546b 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java @@ -9,13 +9,14 @@ import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.ConstraintSet; import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; +import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; public class TypeUnify2Task extends TypeUnifyTask { Set> setToFlatten; - public TypeUnify2Task(Set> setToFlatten, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet cons) { + public TypeUnify2Task(Set> setToFlatten, Set eq, List,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet cons) { super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons); this.setToFlatten = setToFlatten; } diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index eb81e200..dba50be1 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -90,7 +90,7 @@ public class TypeUnifyTask extends RecursiveTask>> { protected Set eq; //und-constraints - protected List>> oderConstraintsField; + protected List,OrderingUnifyPair>> oderConstraintsField; protected IFiniteClosure fc; @@ -128,7 +128,7 @@ public class TypeUnifyTask extends RecursiveTask>> { } */ - public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet cons2) { + public TypeUnifyTask(Set eq, List,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet cons2) { synchronized (this) { this.eq = eq; //this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); @@ -203,7 +203,7 @@ public class TypeUnifyTask extends RecursiveTask>> { oderConstraintsField.stream() .filter(x -> x.size()==1) .map(y -> y.stream().findFirst().get()).forEach(x -> neweq.addAll(x)); - ArrayList>> remainingOderconstraints = oderConstraintsField.stream() + ArrayList,OrderingUnifyPair>> remainingOderconstraints = oderConstraintsField.stream() .filter(x -> x.size()>1) .collect(Collectors.toCollection(ArrayList::new)); Set> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField); @@ -477,7 +477,7 @@ public class TypeUnifyTask extends RecursiveTask>> { * @param fc The finite closure * @return The set of all principal type unifiers */ - protected Set> unify(final Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { + protected Set> unify(final Set eq, List,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { //Set aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT) // ).collect(Collectors.toCollection(HashSet::new)); //writeLog(nOfUnify.toString() + " AA: " + aas.toString()); @@ -531,12 +531,12 @@ public class TypeUnifyTask extends RecursiveTask>> { // There are up to 10 toplevel set. 8 of 10 are the result of the // cartesian product of the sets created by pattern matching. - List>> topLevelSets = new ArrayList<>(); + List,OrderingUnifyPair>> topLevelSets = new ArrayList<>(); //System.out.println(eq2s); if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty. - Set> wrap = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> wrap = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); wrap.add(eq1s); topLevelSets.add(wrap); // Add Eq1' } @@ -547,7 +547,7 @@ public class TypeUnifyTask extends RecursiveTask>> { .collect(Collectors.toSet()); if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty. - Set> wrap = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> wrap = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); wrap.add(bufferSet); topLevelSets.add(wrap); eq2s.removeAll(bufferSet); @@ -558,8 +558,8 @@ public class TypeUnifyTask extends RecursiveTask>> { Set undefinedPairs = new HashSet<>(); if (printtag) System.out.println("eq2s " + eq2s); //writeLog("BufferSet: " + bufferSet.toString()+"\n"); - List>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints); - Set>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput); + List,OrderingUnifyPair>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints); + Set,OrderingUnifyPair>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput); //PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer //nicht ausgewertet Faculty Beispiel im 1. Schritt //PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren @@ -606,8 +606,8 @@ public class TypeUnifyTask extends RecursiveTask>> { */ //Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden - for(Set>> secondLevelSet : secondLevelSets) { - for (Set> secondlevelelem : secondLevelSet) { + for(Set,OrderingUnifyPair>> secondLevelSet : secondLevelSets) { + for (PartialOrderSet,OrderingUnifyPair> secondlevelelem : secondLevelSet) { topLevelSets.add(secondlevelelem); } } @@ -622,7 +622,7 @@ public class TypeUnifyTask extends RecursiveTask>> { } - Set> unify2(Set> setToFlatten, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { + Set> unify2(Set> setToFlatten, Set eq, List,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { //Aufruf von computeCartesianRecursive ENDE //keine Ahnung woher das kommt @@ -747,22 +747,22 @@ public class TypeUnifyTask extends RecursiveTask>> { - Set> computeCartesianRecursive(Set> fstElems, ArrayList>> topLevelSets, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { + Set> computeCartesianRecursive(Set> fstElems, ArrayList,OrderingUnifyPair>> topLevelSets, Set eq, List,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { //ArrayList>> remainingSets = new ArrayList<>(topLevelSets); fstElems.addAll(topLevelSets.stream() .filter(x -> x.size()==1) .map(y -> y.stream().findFirst().get()) .collect(Collectors.toCollection(HashSet::new))); - ArrayList>> remainingSets = topLevelSets.stream() + ArrayList,OrderingUnifyPair>> remainingSets = topLevelSets.stream() .filter(x -> x.size()>1) .collect(Collectors.toCollection(ArrayList::new)); if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig Set> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe); return result; } - Set> nextSet = remainingSets.remove(0); + PartialOrderSet,OrderingUnifyPair> nextSet = remainingSets.remove(0); writeLog("nextSet: " + nextSet.toString()); - List> nextSetasList =new ArrayList<>(nextSet); + List> nextSetasList = nextSet.toArrayList(); try { //List> //nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet); @@ -819,7 +819,8 @@ public class TypeUnifyTask extends RecursiveTask>> { //List> nextSetasListRestMin = new ArrayList<>(); //List> nextSetasListRestOder = new ArrayList<>(); if (variance == 1) { - a = oup.max(nextSetasList.iterator()); + //a = oup.max(nextSetasList.iterator()); + a = nextSetasList.get(nextSetasList.size()-1); nextSetasList.remove(a); nextSetasListRest = new ArrayList<>(nextSetasList); Iterator> nextSetasListItRest = new ArrayList>(nextSetasListRest).iterator(); @@ -849,7 +850,8 @@ public class TypeUnifyTask extends RecursiveTask>> { } else if (variance == -1) { - a = oup.min(nextSetasList.iterator()); + //a = oup.min(nextSetasList.iterator()); + a = nextSetasList.get(0); nextSetasList.remove(a); nextSetasListRest = new ArrayList<>(nextSetasList); Iterator> nextSetasListItRest = new ArrayList>(nextSetasListRest).iterator(); @@ -922,7 +924,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Set forks = new HashSet<>(); Set newEqOrig = new HashSet<>(eq); Set> newElemsOrig = new HashSet<>(elems); - List>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); + List,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); newElemsOrig.add(a); /* FORK ANFANG */ @@ -942,7 +944,7 @@ public class TypeUnifyTask extends RecursiveTask>> { } Set newEq = new HashSet<>(eq); Set> newElems = new HashSet<>(elems); - List>> newOderConstraints = new ArrayList<>(oderConstraints); + List,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints); newElems.add(nSaL); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons); forks.add(fork); @@ -973,7 +975,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Set forks = new HashSet<>(); Set newEqOrig = new HashSet<>(eq); Set> newElemsOrig = new HashSet<>(elems); - List>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); + List,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); newElemsOrig.add(a); /* FORK ANFANG */ @@ -993,7 +995,7 @@ public class TypeUnifyTask extends RecursiveTask>> { } Set newEq = new HashSet<>(eq); Set> newElems = new HashSet<>(elems); - List>> newOderConstraints = new ArrayList<>(oderConstraints); + List,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints); newElems.add(nSaL); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons); forks.add(fork); @@ -1025,7 +1027,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Set forks = new HashSet<>(); Set newEqOrig = new HashSet<>(eq); Set> newElemsOrig = new HashSet<>(elems); - List>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); + List,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); newElemsOrig.add(a); /* FORK ANFANG */ @@ -1043,7 +1045,7 @@ public class TypeUnifyTask extends RecursiveTask>> { //nextSetasList.remove(nSaL); Set newEq = new HashSet<>(eq); Set> newElems = new HashSet<>(elems); - List>> newOderConstraints = new ArrayList<>(oderConstraints); + List,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints); newElems.add(nSaL); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons); forks.add(fork); @@ -1665,9 +1667,9 @@ public class TypeUnifyTask extends RecursiveTask>> { * from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements * (as in case 1 where sigma is added to the innermost set). */ - protected Set>>> calculatePairSets(Set eq2s, List>> oderConstraintsInput, IFiniteClosure fc, Set undefined, List>> oderConstraintsOutput) { + protected Set,OrderingUnifyPair>>> calculatePairSets(Set eq2s, List,OrderingUnifyPair>> oderConstraintsInput, IFiniteClosure fc, Set undefined, List,OrderingUnifyPair>> oderConstraintsOutput) { oderConstraintsOutput.addAll(oderConstraintsInput); - List>>> result = new ArrayList<>(9); + List,OrderingUnifyPair>>> result = new ArrayList<>(9); // Init all 8 cases + 9. Case: oderConstraints for(int i = 0; i < 9; i++) @@ -1686,7 +1688,7 @@ public class TypeUnifyTask extends RecursiveTask>> { } } if (eq2sAsList.isEmpty()) { - List>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind + List,OrderingUnifyPair>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind .filter(x -> x.stream() .filter(y -> y.stream().filter(z -> ((z.getLhsType() instanceof PlaceholderType) @@ -1696,7 +1698,7 @@ public class TypeUnifyTask extends RecursiveTask>> { ).findFirst().isPresent() ).findFirst().isPresent()).collect(Collectors.toList()); if (!oderConstraintsVariance.isEmpty()) { - Set> ret = oderConstraintsVariance.get(0); + PartialOrderSet,OrderingUnifyPair> ret = oderConstraintsVariance.get(0); oderConstraintsOutput.remove(ret); //Set retFlat = new HashSet<>(); //ret.stream().forEach(x -> retFlat.addAll(x)); @@ -1710,7 +1712,7 @@ public class TypeUnifyTask extends RecursiveTask>> { if (eq2sAsList.isEmpty() && first) {//Alle eq2s sind empty und alle oderConstraints mit Variance != 0 sind bearbeitet if (!oderConstraintsOutput.isEmpty()) { - Set> ret = oderConstraintsOutput.remove(0); + PartialOrderSet,OrderingUnifyPair> ret = oderConstraintsOutput.remove(0); //if (ret.iterator().next().iterator().next().getLhsType().getName().equals("M")) // System.out.println("M"); //Set retFlat = new HashSet<>(); @@ -1736,7 +1738,7 @@ public class TypeUnifyTask extends RecursiveTask>> { if (((PlaceholderType)(pair.getLhsType())).getName().equals("AR")) { System.out.println("AR"); } - Set> x1 = unifyCase1(pair, fc); + PartialOrderSet,OrderingUnifyPair> x1 = unifyCase1(pair, fc); if (pairOp == PairOperator.SMALLERNEQDOT) { Set remElem = new HashSet<>(); remElem.add(new UnifyPair(pair.getLhsType(), pair.getRhsType(), PairOperator.EQUALSDOT)); @@ -1757,7 +1759,7 @@ public class TypeUnifyTask extends RecursiveTask>> { else { Set s1 = new HashSet<>(); s1.add(pair); - Set> s2 = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> s2 = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); s2.add(s1); result.get(0).add(s2); } @@ -1766,7 +1768,7 @@ public class TypeUnifyTask extends RecursiveTask>> { // Case 2: (a <.? ? ext Theta') else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType) if (first) { //writeLog(pair.toString()+"\n"); - Set> x1 = unifyCase2(pair, fc); + PartialOrderSet,OrderingUnifyPair> x1 = unifyCase2(pair, fc); result.get(1).add(x1); if (x1.isEmpty()) { undefined.add(pair); //Theta ist nicht im FC @@ -1775,7 +1777,7 @@ public class TypeUnifyTask extends RecursiveTask>> { else { Set s1 = new HashSet<>(); s1.add(pair); - Set> s2 = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> s2 = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); s2.add(s1); result.get(1).add(s2); } @@ -1783,7 +1785,7 @@ public class TypeUnifyTask extends RecursiveTask>> { // Case 3: (a <.? ? sup Theta') else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType) if (first) { //writeLog(pair.toString()+"\n"); - Set> x1 = unifyCase3(pair, fc); + PartialOrderSet,OrderingUnifyPair> x1 = unifyCase3(pair, fc); result.get(2).add(x1); if (x1.isEmpty()) { undefined.add(pair); //Theta ist nicht im FC @@ -1792,7 +1794,7 @@ public class TypeUnifyTask extends RecursiveTask>> { else { Set s1 = new HashSet<>(); s1.add(pair); - Set> s2 = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> s2 = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); s2.add(s1); result.get(2).add(s2); } @@ -1805,7 +1807,7 @@ public class TypeUnifyTask extends RecursiveTask>> { // Case 5: (Theta <. a) else if ((pairOp == PairOperator.SMALLERDOT) && rhsType instanceof PlaceholderType) if (first) { //writeLog(pair.toString()+"\n"); - Set> x1 = unifyCase5(pair, fc); + PartialOrderSet,OrderingUnifyPair> x1 = unifyCase5(pair, fc); result.get(4).add(x1); if (x1.isEmpty()) { undefined.add(pair); //Theta ist nicht im FC @@ -1814,7 +1816,7 @@ public class TypeUnifyTask extends RecursiveTask>> { else { Set s1 = new HashSet<>(); s1.add(pair); - Set> s2 = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> s2 = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); s2.add(s1); result.get(4).add(s2); } @@ -1832,7 +1834,7 @@ public class TypeUnifyTask extends RecursiveTask>> { // Case 8: (Theta <.? a) else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType) if (first) { //writeLog(pair.toString()+"\n"); - Set> x1 = unifyCase8(pair, fc); + PartialOrderSet,OrderingUnifyPair> x1 = unifyCase8(pair, fc); result.get(7).add(x1); if (x1.isEmpty()) { undefined.add(pair); //Theta ist nicht im FC @@ -1841,7 +1843,7 @@ public class TypeUnifyTask extends RecursiveTask>> { else { Set s1 = new HashSet<>(); s1.add(pair); - Set> s2 = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> s2 = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); s2.add(s1); result.get(7).add(s2); } @@ -1865,12 +1867,12 @@ public class TypeUnifyTask extends RecursiveTask>> { /** * Cartesian product Case 1: (a <. Theta') */ - protected Set> unifyCase1(UnifyPair pair, IFiniteClosure fc) { + protected PartialOrderSet,OrderingUnifyPair> unifyCase1(UnifyPair pair, IFiniteClosure fc) { PlaceholderType a = (PlaceholderType)pair.getLhsType(); UnifyType thetaPrime = pair.getRhsType(); byte variance = pair.getVariance(); - Set> result = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> result = new PartialOrderSet, OrderingUnifyPair>(new OrderingUnifyPair(fc)); boolean allGen = thetaPrime.getTypeParams().size() > 0; for(UnifyType t : thetaPrime.getTypeParams()) @@ -2009,11 +2011,11 @@ public class TypeUnifyTask extends RecursiveTask>> { /** * Cartesian Product Case 2: (a <.? ? ext Theta') */ - private Set> unifyCase2(UnifyPair pair, IFiniteClosure fc) { + private PartialOrderSet,OrderingUnifyPair> unifyCase2(UnifyPair pair, IFiniteClosure fc) { PlaceholderType a = (PlaceholderType) pair.getLhsType(); ExtendsType extThetaPrime = (ExtendsType) pair.getRhsType(); byte variance = pair.getVariance(); - Set> result = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> result = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); UnifyType aPrime = PlaceholderType.freshPlaceholder(); ((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance()); @@ -2036,12 +2038,12 @@ public class TypeUnifyTask extends RecursiveTask>> { /** * Cartesian Product Case 3: (a <.? ? sup Theta') */ - private Set> unifyCase3(UnifyPair pair, IFiniteClosure fc) { + private PartialOrderSet,OrderingUnifyPair> unifyCase3(UnifyPair pair, IFiniteClosure fc) { PlaceholderType a = (PlaceholderType) pair.getLhsType(); a.reversVariance(); SuperType subThetaPrime = (SuperType) pair.getRhsType(); byte variance = pair.getVariance(); - Set> result = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> result = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); UnifyType aPrime = PlaceholderType.freshPlaceholder(); ((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance()); @@ -2066,11 +2068,11 @@ public class TypeUnifyTask extends RecursiveTask>> { /** * Cartesian Product Case 5: (Theta <. a) */ - private Set> unifyCase5(UnifyPair pair, IFiniteClosure fc) { + private PartialOrderSet,OrderingUnifyPair> unifyCase5(UnifyPair pair, IFiniteClosure fc) { UnifyType theta = pair.getLhsType(); PlaceholderType a = (PlaceholderType) pair.getRhsType(); byte variance = pair.getVariance(); - Set> result = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> result = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); boolean allGen = theta.getTypeParams().size() > 0; for(UnifyType t : theta.getTypeParams()) @@ -2145,11 +2147,11 @@ public class TypeUnifyTask extends RecursiveTask>> { /** * Cartesian Product Case 8: (Theta <.? a) */ - private Set> unifyCase8(UnifyPair pair, IFiniteClosure fc) { + private PartialOrderSet,OrderingUnifyPair> unifyCase8(UnifyPair pair, IFiniteClosure fc) { UnifyType theta = pair.getLhsType(); PlaceholderType a = (PlaceholderType) pair.getRhsType(); byte variance = pair.getVariance(); - Set> result = new HashSet<>(); + PartialOrderSet,OrderingUnifyPair> result = new PartialOrderSet,OrderingUnifyPair>(new OrderingUnifyPair(fc)); //for(UnifyType thetaS : fc.grArg(theta)) { Set resultPrime = new HashSet<>(); resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT, pair.getSubstitution(), pair));