forked from JavaTX/JavaCompilerCore
Compare commits
5 Commits
targetByte
...
unify-Part
Author | SHA1 | Date | |
---|---|---|---|
|
d33335a6d9 | ||
|
25b1d3e62d | ||
|
85a15cb256 | ||
|
0c03c2fbc8 | ||
|
c8daa665f6 |
src
main/java/de/dhbwstuttgart/typeinference/unify
test/resources/bytecode/javFiles
@ -0,0 +1,132 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.Vector;
|
||||
|
||||
import com.google.common.collect.Ordering;
|
||||
|
||||
public class PartialOrderSet<E, F extends Ordering<E>> implements Set<E> {
|
||||
|
||||
//HashSet<E> hs = new HashSet<>();
|
||||
List<E> ve = new LinkedList<>();//new Vector<>();
|
||||
F ordering;
|
||||
|
||||
PartialOrderSet(F ordering) {
|
||||
this.ordering= ordering;
|
||||
}
|
||||
|
||||
PartialOrderSet(F ordering, Set<E> s) {
|
||||
this.ordering= ordering;
|
||||
this.addAll(s);
|
||||
}
|
||||
|
||||
PartialOrderSet(PartialOrderSet<E, F> pos) {
|
||||
this.ordering= pos.ordering;
|
||||
this.ve = new LinkedList<>(pos.ve);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return ve.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return ve.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
return ve.contains(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<E> iterator() {
|
||||
return ve.iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] toArray() {
|
||||
return ve.toArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T[] toArray(T[] a) {
|
||||
return ve.toArray(a);
|
||||
}
|
||||
|
||||
public ArrayList<E> toArrayList() {
|
||||
return new ArrayList<>(ve);
|
||||
}
|
||||
|
||||
public List<E> toList() {
|
||||
return new LinkedList<>(ve);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(E e) {
|
||||
if (this.contains(e)) {
|
||||
return false;
|
||||
}
|
||||
//hs.add(e);
|
||||
for(int i = 0; i< ve.size(); i++) {
|
||||
if (ordering.compare(e, ve.get(i)) == -1) {
|
||||
ve.add(i,e);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
ve.add(e);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
//hs.remove(o);
|
||||
return ve.remove(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
return ve.containsAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends E> c) {
|
||||
Boolean ret = false;
|
||||
Iterator<? extends E> cit = c.iterator();
|
||||
while(cit.hasNext()) {
|
||||
Boolean retnew = this.add(cit.next());
|
||||
ret = ret || retnew;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
//hs.retainAll(c);
|
||||
return ve.retainAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
//hs.removeAll(c);
|
||||
return ve.removeAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
//hs.clear();
|
||||
ve.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return ve.toString();
|
||||
}
|
||||
}
|
@ -1,20 +1,25 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.io.FileWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify {
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, new UnifyResultModel(), cons);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
|
||||
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, new UnifyResultModel(), cons);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
@ -22,14 +27,18 @@ public class TypeUnify {
|
||||
}
|
||||
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons, UnifyResultModel ret) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, cons);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
|
||||
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, ret, cons);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
return ret;
|
||||
}
|
||||
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons, UnifyResultModel ret) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, cons);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
|
||||
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, ret, cons);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
@ -45,7 +54,9 @@ public class TypeUnify {
|
||||
*/
|
||||
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, new UnifyResultModel(), cons);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
|
||||
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, false, logFile, log, 0, new UnifyResultModel(), cons);
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
return res;
|
||||
}
|
||||
|
@ -9,13 +9,14 @@ import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify2Task extends TypeUnifyTask {
|
||||
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<Pair> cons) {
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<Pair> cons) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
|
@ -95,7 +95,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
protected Set<UnifyPair> eq; //und-constraints
|
||||
|
||||
protected List<Set<Set<UnifyPair>>> oderConstraintsField;
|
||||
protected List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsField;
|
||||
|
||||
protected IFiniteClosure fc;
|
||||
|
||||
@ -133,7 +133,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
*/
|
||||
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons2) {
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons2) {
|
||||
synchronized (this) {
|
||||
this.eq = eq;
|
||||
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
||||
@ -209,7 +209,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
oderConstraintsField.stream()
|
||||
.filter(x -> x.size()==1)
|
||||
.map(y -> y.stream().findFirst().get()).forEach(x -> neweq.addAll(x));
|
||||
ArrayList<Set<Set<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
|
||||
ArrayList<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> remainingOderconstraints = oderConstraintsField.stream()
|
||||
.filter(x -> x.size()>1)
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField);
|
||||
@ -228,7 +228,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
public Set<Set<UnifyPair>> computeCartesianRecursiveOderConstraints(Set<Set<UnifyPair>> fstElems, List<Set<Set<UnifyPair>>> topLevelSets, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
|
||||
fstElems.addAll(topLevelSets.stream()
|
||||
@ -274,7 +274,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
// nextSetasList = al;
|
||||
//}
|
||||
//Set<UnifyPair> a = nextSetasListIt.next();
|
||||
/*if (nextSetasList.size()>1) {zu loeschen
|
||||
//
|
||||
/ *if (nextSetasList.size()>1) {zu loeschen
|
||||
if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D"))
|
||||
System.out.print("");
|
||||
if (variance == 1) {
|
||||
@ -290,7 +291,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
else {
|
||||
a_next = nextSetasList.iterator().next();
|
||||
}
|
||||
*/
|
||||
* /
|
||||
if (!nextSetasList.iterator().hasNext())
|
||||
System.out.print("");
|
||||
if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1)
|
||||
@ -299,11 +300,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
|
||||
Set<UnifyPair> a = null;
|
||||
if (variance == 1) {
|
||||
a = oup.max(nextSetasList.iterator());
|
||||
//a = oup.max(nextSetasList.iterator());
|
||||
a = nextSetasList.get(nextSetasList.size()-1);
|
||||
nextSetasList.remove(a);
|
||||
}
|
||||
else if (variance == -1) {
|
||||
a = oup.min(nextSetasList.iterator());
|
||||
//a = oup.min(nextSetasList.iterator());
|
||||
a = nextSetasList.get(0);
|
||||
nextSetasList.remove(a);
|
||||
}
|
||||
else if (variance == 0) {
|
||||
@ -311,7 +314,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
|
||||
//nextSetasList.remove(a);
|
||||
/* zu loeschen
|
||||
/ * zu loeschen
|
||||
if (nextSetasList.size() > 0) {
|
||||
if (nextSetasList.size()>1) {
|
||||
if (variance == 1) {
|
||||
@ -328,7 +331,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
a_next = nextSetasList.iterator().next();
|
||||
}
|
||||
}
|
||||
*/
|
||||
* /
|
||||
//PL 2018-03-01
|
||||
//TODO: 1. Maximum und Minimum unterscheiden
|
||||
//TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
|
||||
@ -372,7 +375,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
|
||||
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
|
||||
/ * auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG * /
|
||||
if (!result.isEmpty() && !isUndefinedPairSetSet(res)) {
|
||||
if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1)
|
||||
System.out.print("");
|
||||
@ -407,9 +410,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//break;
|
||||
}}
|
||||
}
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
|
||||
/ * auskommentiert um alle Max und min Betrachtung auszuschalten ENDE * /
|
||||
|
||||
/* PL 2018-11-05 wird falsch weil es auf der obersten Ebene ist.
|
||||
/ * PL 2018-11-05 wird falsch weil es auf der obersten Ebene ist.
|
||||
if (isUndefinedPairSetSet(res)) {
|
||||
int nofstred= 0;
|
||||
Set<UnifyPair> abhSubst = res.stream()
|
||||
@ -462,7 +465,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
writeLog("Number of Backtracking: " + noBacktracking);
|
||||
System.out.println("");
|
||||
}
|
||||
*/
|
||||
* /
|
||||
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
|
||||
// return result;
|
||||
//}
|
||||
@ -475,7 +478,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
} // End of while (nextSetasList.size() > 0)
|
||||
return result;
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
/**
|
||||
* Computes all principal type unifiers for a set of constraints.
|
||||
@ -483,7 +486,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
* @param fc The finite closure
|
||||
* @return The set of all principal type unifiers
|
||||
*/
|
||||
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
|
||||
// ).collect(Collectors.toCollection(HashSet::new));
|
||||
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
|
||||
@ -537,12 +540,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
// There are up to 10 toplevel set. 8 of 10 are the result of the
|
||||
// cartesian product of the sets created by pattern matching.
|
||||
List<Set<Set<UnifyPair>>> topLevelSets = new ArrayList<>();
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> topLevelSets = new ArrayList<>();
|
||||
|
||||
//System.out.println(eq2s);
|
||||
|
||||
if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
|
||||
Set<Set<UnifyPair>> wrap = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> wrap = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
wrap.add(eq1s);
|
||||
topLevelSets.add(wrap); // Add Eq1'
|
||||
}
|
||||
@ -553,7 +556,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
|
||||
Set<Set<UnifyPair>> wrap = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> wrap = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
wrap.add(bufferSet);
|
||||
topLevelSets.add(wrap);
|
||||
eq2s.removeAll(bufferSet);
|
||||
@ -564,14 +567,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<UnifyPair> undefinedPairs = new HashSet<>();
|
||||
if (printtag) System.out.println("eq2s " + eq2s);
|
||||
//writeLog("BufferSet: " + bufferSet.toString()+"\n");
|
||||
List<Set<Set<UnifyPair>>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints);
|
||||
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints);
|
||||
Set<Set<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
|
||||
//PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
|
||||
//nicht ausgewertet Faculty Beispiel im 1. Schritt
|
||||
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
|
||||
//Typen getestet werden.
|
||||
writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput.toString());
|
||||
if (printtag) System.out.println("secondLevelSets:" +secondLevelSets);
|
||||
writeLog(nOfUnify.toString() + "secondLevelSets:" +secondLevelSets.toString());
|
||||
// If pairs occured that did not match one of the cartesian product cases,
|
||||
// those pairs are contradictory and the unification is impossible.
|
||||
if(!undefinedPairs.isEmpty()) {
|
||||
@ -612,8 +615,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
*/
|
||||
|
||||
//Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden
|
||||
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
|
||||
for (Set<Set<UnifyPair>> secondlevelelem : secondLevelSet) {
|
||||
for(Set<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> secondLevelSet : secondLevelSets) {
|
||||
for (PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> secondlevelelem : secondLevelSet) {
|
||||
topLevelSets.add(secondlevelelem);
|
||||
}
|
||||
}
|
||||
@ -628,7 +631,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
|
||||
|
||||
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||
//Aufruf von computeCartesianRecursive ENDE
|
||||
|
||||
//keine Ahnung woher das kommt
|
||||
@ -752,22 +755,22 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
|
||||
|
||||
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> topLevelSets, Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
|
||||
fstElems.addAll(topLevelSets.stream()
|
||||
.filter(x -> x.size()==1)
|
||||
.map(y -> y.stream().findFirst().get())
|
||||
.collect(Collectors.toCollection(HashSet::new)));
|
||||
ArrayList<Set<Set<UnifyPair>>> remainingSets = topLevelSets.stream()
|
||||
ArrayList<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> remainingSets = topLevelSets.stream()
|
||||
.filter(x -> x.size()>1)
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
|
||||
Set<Set<UnifyPair>> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe);
|
||||
return result;
|
||||
}
|
||||
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0);
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> nextSet = remainingSets.remove(0);
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
List<Set<UnifyPair>> nextSetasList =new ArrayList<>(nextSet);
|
||||
List<Set<UnifyPair>> nextSetasList = nextSet.toList();
|
||||
try {
|
||||
//List<Set<UnifyPair>>
|
||||
//nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet);
|
||||
@ -820,14 +823,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<UnifyPair> a = null;
|
||||
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
|
||||
Set<UnifyPair> a_last = a;
|
||||
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
|
||||
List<Set<UnifyPair>> nextSetasListRest = new LinkedList<>();
|
||||
//List<Set<UnifyPair>> nextSetasListRestMin = new ArrayList<>();
|
||||
//List<Set<UnifyPair>> nextSetasListRestOder = new ArrayList<>();
|
||||
if (variance == 1) {
|
||||
a = oup.max(nextSetasList.iterator());
|
||||
//a = oup.max(nextSetasList.iterator());
|
||||
a = nextSetasList.get(nextSetasList.size()-1);
|
||||
nextSetasList.remove(a);
|
||||
nextSetasListRest = new ArrayList<>(nextSetasList);
|
||||
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
|
||||
nextSetasListRest = new LinkedList<>(nextSetasList);
|
||||
Iterator<Set<UnifyPair>> nextSetasListItRest = new LinkedList<Set<UnifyPair>>(nextSetasListRest).iterator();
|
||||
while (nextSetasListItRest.hasNext()) {
|
||||
Set<UnifyPair> a_next = nextSetasListItRest.next();
|
||||
if (//a.equals(a_next) ||
|
||||
@ -838,9 +842,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//Alle maximale Elemente in nextSetasListRest bestimmen
|
||||
List<Set<UnifyPair>> nextSetasListRestTest;
|
||||
do {
|
||||
nextSetasListRestTest = new ArrayList<Set<UnifyPair>>(nextSetasListRest);
|
||||
nextSetasListRestTest = new LinkedList<Set<UnifyPair>>(nextSetasListRest);
|
||||
if (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> max = oup.max(nextSetasListRest.iterator());
|
||||
//Set<UnifyPair> max = oup.max(nextSetasListRest.iterator());
|
||||
Set<UnifyPair> max = nextSetasListRest.get(nextSetasListRest.size()-1);
|
||||
Iterator<Set<UnifyPair>> nextSetasListItRest2 = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
|
||||
while (nextSetasListItRest2.hasNext()) {
|
||||
Set<UnifyPair> a_nextRest = nextSetasListItRest2.next();
|
||||
@ -854,9 +859,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
}
|
||||
else if (variance == -1) {
|
||||
a = oup.min(nextSetasList.iterator());
|
||||
//a = oup.min(nextSetasList.iterator());
|
||||
a = nextSetasList.get(0);
|
||||
nextSetasList.remove(a);
|
||||
nextSetasListRest = new ArrayList<>(nextSetasList);
|
||||
nextSetasListRest = new LinkedList<>(nextSetasList);
|
||||
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
|
||||
while (nextSetasListItRest.hasNext()) {
|
||||
Set<UnifyPair> a_next = nextSetasListItRest.next();
|
||||
@ -869,9 +875,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
List<Set<UnifyPair>> nextSetasListRestTest;
|
||||
do {
|
||||
nextSetasListRestTest = new ArrayList<Set<UnifyPair>>(nextSetasListRest);
|
||||
nextSetasListRestTest = new LinkedList<Set<UnifyPair>>(nextSetasListRest);
|
||||
if (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> min = oup.min(nextSetasListRest.iterator());
|
||||
//Set<UnifyPair> min = oup.min(nextSetasListRest.iterator());
|
||||
Set<UnifyPair> min = nextSetasListRest.get(0);
|
||||
Iterator<Set<UnifyPair>> nextSetasListItRest2 = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
|
||||
while (nextSetasListItRest2.hasNext()) {
|
||||
Set<UnifyPair> a_nextRest = nextSetasListItRest2.next();
|
||||
@ -885,7 +892,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
else if (variance == 2) {
|
||||
a = nextSetasList.remove(0);
|
||||
nextSetasListRest = new ArrayList<>(nextSetasList);
|
||||
nextSetasListRest = new LinkedList<>(nextSetasList);
|
||||
}
|
||||
else if (variance == 0) {
|
||||
a = nextSetasList.remove(0);
|
||||
@ -928,7 +935,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Set<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig =
|
||||
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
@ -948,7 +957,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints =
|
||||
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons);
|
||||
forks.add(fork);
|
||||
@ -984,7 +995,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Set<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig =
|
||||
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
@ -1004,7 +1017,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints =
|
||||
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons);
|
||||
forks.add(fork);
|
||||
@ -1041,7 +1056,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<TypeUnifyTask> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Set<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig =
|
||||
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
@ -1059,7 +1076,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//nextSetasList.remove(nSaL);
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints =
|
||||
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons);
|
||||
forks.add(fork);
|
||||
@ -1693,9 +1712,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
|
||||
* (as in case 1 where sigma is added to the innermost set).
|
||||
*/
|
||||
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, List<Set<Set<UnifyPair>>> oderConstraintsInput, IFiniteClosure fc, Set<UnifyPair> undefined, List<Set<Set<UnifyPair>>> oderConstraintsOutput) {
|
||||
protected Set<Set<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>>> calculatePairSets(Set<UnifyPair> eq2s, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsInput, IFiniteClosure fc, Set<UnifyPair> undefined, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsOutput) {
|
||||
oderConstraintsOutput.addAll(oderConstraintsInput);
|
||||
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(9);
|
||||
List<Set<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>>> result = new ArrayList<>(9);
|
||||
|
||||
// Init all 8 cases + 9. Case: oderConstraints
|
||||
for(int i = 0; i < 9; i++)
|
||||
@ -1714,7 +1733,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
}
|
||||
if (eq2sAsList.isEmpty()) {
|
||||
List<Set<Set<UnifyPair>>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind
|
||||
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind
|
||||
.filter(x -> x.stream()
|
||||
.filter(y ->
|
||||
y.stream().filter(z -> ((z.getLhsType() instanceof PlaceholderType)
|
||||
@ -1724,7 +1743,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
).findFirst().isPresent()
|
||||
).findFirst().isPresent()).collect(Collectors.toList());
|
||||
if (!oderConstraintsVariance.isEmpty()) {
|
||||
Set<Set<UnifyPair>> ret = oderConstraintsVariance.get(0);
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> ret = oderConstraintsVariance.get(0);
|
||||
oderConstraintsOutput.remove(ret);
|
||||
//Set<UnifyPair> retFlat = new HashSet<>();
|
||||
//ret.stream().forEach(x -> retFlat.addAll(x));
|
||||
@ -1738,7 +1757,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
if (eq2sAsList.isEmpty() && first) {//Alle eq2s sind empty und alle oderConstraints mit Variance != 0 sind bearbeitet
|
||||
if (!oderConstraintsOutput.isEmpty()) {
|
||||
Set<Set<UnifyPair>> ret = oderConstraintsOutput.remove(0);
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> ret = oderConstraintsOutput.remove(0);
|
||||
//if (ret.iterator().next().iterator().next().getLhsType().getName().equals("M"))
|
||||
// System.out.println("M");
|
||||
//Set<UnifyPair> retFlat = new HashSet<>();
|
||||
@ -1764,7 +1783,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
if (((PlaceholderType)(pair.getLhsType())).getName().equals("AR")) {
|
||||
System.out.println("AR");
|
||||
}
|
||||
Set<Set<UnifyPair>> x1 = unifyCase1(pair, fc);
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> x1 = unifyCase1(pair, fc);
|
||||
if (pairOp == PairOperator.SMALLERNEQDOT) {
|
||||
Set<UnifyPair> remElem = new HashSet<>();
|
||||
remElem.add(new UnifyPair(pair.getLhsType(), pair.getRhsType(), PairOperator.EQUALSDOT));
|
||||
@ -1785,7 +1804,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
else {
|
||||
Set<UnifyPair> s1 = new HashSet<>();
|
||||
s1.add(pair);
|
||||
Set<Set<UnifyPair>> s2 = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> s2 = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
s2.add(s1);
|
||||
result.get(0).add(s2);
|
||||
}
|
||||
@ -1794,7 +1813,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
// Case 2: (a <.? ? ext Theta')
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
|
||||
if (first) { //writeLog(pair.toString()+"\n");
|
||||
Set<Set<UnifyPair>> x1 = unifyCase2(pair, fc);
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> x1 = unifyCase2(pair, fc);
|
||||
result.get(1).add(x1);
|
||||
if (x1.isEmpty()) {
|
||||
undefined.add(pair); //Theta ist nicht im FC
|
||||
@ -1803,7 +1822,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
else {
|
||||
Set<UnifyPair> s1 = new HashSet<>();
|
||||
s1.add(pair);
|
||||
Set<Set<UnifyPair>> s2 = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> s2 = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
s2.add(s1);
|
||||
result.get(1).add(s2);
|
||||
}
|
||||
@ -1811,7 +1830,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
// Case 3: (a <.? ? sup Theta')
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
|
||||
if (first) { //writeLog(pair.toString()+"\n");
|
||||
Set<Set<UnifyPair>> x1 = unifyCase3(pair, fc);
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> x1 = unifyCase3(pair, fc);
|
||||
result.get(2).add(x1);
|
||||
if (x1.isEmpty()) {
|
||||
undefined.add(pair); //Theta ist nicht im FC
|
||||
@ -1820,7 +1839,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
else {
|
||||
Set<UnifyPair> s1 = new HashSet<>();
|
||||
s1.add(pair);
|
||||
Set<Set<UnifyPair>> s2 = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> s2 = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
s2.add(s1);
|
||||
result.get(2).add(s2);
|
||||
}
|
||||
@ -1833,7 +1852,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
// Case 5: (Theta <. a)
|
||||
else if ((pairOp == PairOperator.SMALLERDOT) && rhsType instanceof PlaceholderType)
|
||||
if (first) { //writeLog(pair.toString()+"\n");
|
||||
Set<Set<UnifyPair>> x1 = unifyCase5(pair, fc);
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> x1 = unifyCase5(pair, fc);
|
||||
result.get(4).add(x1);
|
||||
if (x1.isEmpty()) {
|
||||
undefined.add(pair); //Theta ist nicht im FC
|
||||
@ -1842,7 +1861,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
else {
|
||||
Set<UnifyPair> s1 = new HashSet<>();
|
||||
s1.add(pair);
|
||||
Set<Set<UnifyPair>> s2 = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> s2 = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
s2.add(s1);
|
||||
result.get(4).add(s2);
|
||||
}
|
||||
@ -1860,7 +1879,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
// Case 8: (Theta <.? a)
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
|
||||
if (first) { //writeLog(pair.toString()+"\n");
|
||||
Set<Set<UnifyPair>> x1 = unifyCase8(pair, fc);
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> x1 = unifyCase8(pair, fc);
|
||||
result.get(7).add(x1);
|
||||
if (x1.isEmpty()) {
|
||||
undefined.add(pair); //Theta ist nicht im FC
|
||||
@ -1869,7 +1888,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
else {
|
||||
Set<UnifyPair> s1 = new HashSet<>();
|
||||
s1.add(pair);
|
||||
Set<Set<UnifyPair>> s2 = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> s2 = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
s2.add(s1);
|
||||
result.get(7).add(s2);
|
||||
}
|
||||
@ -1893,12 +1912,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
/**
|
||||
* Cartesian product Case 1: (a <. Theta')
|
||||
*/
|
||||
protected Set<Set<UnifyPair>> unifyCase1(UnifyPair pair, IFiniteClosure fc) {
|
||||
protected PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> unifyCase1(UnifyPair pair, IFiniteClosure fc) {
|
||||
PlaceholderType a = (PlaceholderType)pair.getLhsType();
|
||||
UnifyType thetaPrime = pair.getRhsType();
|
||||
byte variance = pair.getVariance();
|
||||
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> result = new PartialOrderSet<Set<UnifyPair>, OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
|
||||
boolean allGen = thetaPrime.getTypeParams().size() > 0;
|
||||
for(UnifyType t : thetaPrime.getTypeParams())
|
||||
@ -2037,11 +2056,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
/**
|
||||
* Cartesian Product Case 2: (a <.? ? ext Theta')
|
||||
*/
|
||||
private Set<Set<UnifyPair>> unifyCase2(UnifyPair pair, IFiniteClosure fc) {
|
||||
private PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> unifyCase2(UnifyPair pair, IFiniteClosure fc) {
|
||||
PlaceholderType a = (PlaceholderType) pair.getLhsType();
|
||||
ExtendsType extThetaPrime = (ExtendsType) pair.getRhsType();
|
||||
byte variance = pair.getVariance();
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> result = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
|
||||
UnifyType aPrime = PlaceholderType.freshPlaceholder();
|
||||
((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance());
|
||||
@ -2064,12 +2083,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
/**
|
||||
* Cartesian Product Case 3: (a <.? ? sup Theta')
|
||||
*/
|
||||
private Set<Set<UnifyPair>> unifyCase3(UnifyPair pair, IFiniteClosure fc) {
|
||||
private PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> unifyCase3(UnifyPair pair, IFiniteClosure fc) {
|
||||
PlaceholderType a = (PlaceholderType) pair.getLhsType();
|
||||
a.reversVariance();
|
||||
SuperType subThetaPrime = (SuperType) pair.getRhsType();
|
||||
byte variance = pair.getVariance();
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> result = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
|
||||
UnifyType aPrime = PlaceholderType.freshPlaceholder();
|
||||
((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance());
|
||||
@ -2094,11 +2113,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
/**
|
||||
* Cartesian Product Case 5: (Theta <. a)
|
||||
*/
|
||||
private Set<Set<UnifyPair>> unifyCase5(UnifyPair pair, IFiniteClosure fc) {
|
||||
private PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> unifyCase5(UnifyPair pair, IFiniteClosure fc) {
|
||||
UnifyType theta = pair.getLhsType();
|
||||
PlaceholderType a = (PlaceholderType) pair.getRhsType();
|
||||
byte variance = pair.getVariance();
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> result = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
|
||||
boolean allGen = theta.getTypeParams().size() > 0;
|
||||
for(UnifyType t : theta.getTypeParams())
|
||||
@ -2173,11 +2192,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
/**
|
||||
* Cartesian Product Case 8: (Theta <.? a)
|
||||
*/
|
||||
private Set<Set<UnifyPair>> unifyCase8(UnifyPair pair, IFiniteClosure fc) {
|
||||
private PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> unifyCase8(UnifyPair pair, IFiniteClosure fc) {
|
||||
UnifyType theta = pair.getLhsType();
|
||||
PlaceholderType a = (PlaceholderType) pair.getRhsType();
|
||||
byte variance = pair.getVariance();
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> result = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
|
||||
//for(UnifyType thetaS : fc.grArg(theta)) {
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
|
||||
@ -2238,8 +2257,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
}
|
||||
|
||||
void writeLog(String str) {
|
||||
synchronized ( this ) {
|
||||
synchronized void writeLog(String str) {
|
||||
if (log) {
|
||||
try {
|
||||
logFile.write("Thread no.:" + thNo + "\n");
|
||||
@ -2252,7 +2270,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import java.util.Vector;
|
||||
import java.lang.Integer;
|
||||
//import java.lang.Float;
|
||||
import java.lang.Float;
|
||||
//import java.lang.Byte;
|
||||
//import java.lang.Boolean;
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import java.util.Vector;
|
||||
import java.lang.Integer;
|
||||
import java.lang.Byte;
|
||||
//import java.lang.Byte;
|
||||
import java.lang.Boolean;
|
||||
|
||||
public class MatrixOP extends Vector<Vector<Integer>> {
|
||||
|
Loading…
x
Reference in New Issue
Block a user