Compare commits

...

5 Commits

Author SHA1 Message Date
Martin Plümicke
d33335a6d9 modified: ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
Bildung von newOderConstraints wieder rueckgaengig gemacht

	modified:   ../../resources/bytecode/javFiles/MatrixOP.jav
2019-02-14 09:38:26 +01:00
Martin Plümicke
25b1d3e62d modified: ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
ArrayList durch LinkedList ersetzt
computeCartesianRecursiveOderConstraints auskommentiert
2019-02-14 08:53:32 +01:00
Martin Plümicke
85a15cb256 modified: ../../../../main/java/de/dhbwstuttgart/typeinference/unify/PartialOrderSet.java
modified:   ../../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
PartialOrderSet als LinkedList realisiert

	modified:   ../../bytecode/javFiles/Matrix.jav
2019-02-13 18:34:12 +01:00
Martin Plümicke
0c03c2fbc8 Merge branch 'unify-test' of ssh://gohorb.ba-horb.de/bahome/projekt/git/JavaCompilerCore into unify-PartialOrderSet
Conflicts:
	src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java

	modified:   ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java
	modified:   ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
	modified:   ../../resources/bytecode/javFiles/Matrix.jav
	modified:   ../../resources/bytecode/javFiles/MatrixOP.jav
2019-02-11 14:41:28 +01:00
Martin Plümicke
c8daa665f6 new file: src/main/java/de/dhbwstuttgart/typeinference/unify/PartialOrderSet.java
modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
PartialOrderSet implemetiert und in TypeUnify, TypeUnify2Task, TypeUnifyTask eingepasst
2019-02-02 20:26:26 +01:00
6 changed files with 243 additions and 82 deletions

View File

@ -0,0 +1,132 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import com.google.common.collect.Ordering;
public class PartialOrderSet<E, F extends Ordering<E>> implements Set<E> {
//HashSet<E> hs = new HashSet<>();
List<E> ve = new LinkedList<>();//new Vector<>();
F ordering;
PartialOrderSet(F ordering) {
this.ordering= ordering;
}
PartialOrderSet(F ordering, Set<E> s) {
this.ordering= ordering;
this.addAll(s);
}
PartialOrderSet(PartialOrderSet<E, F> pos) {
this.ordering= pos.ordering;
this.ve = new LinkedList<>(pos.ve);
}
@Override
public int size() {
return ve.size();
}
@Override
public boolean isEmpty() {
return ve.isEmpty();
}
@Override
public boolean contains(Object o) {
return ve.contains(o);
}
@Override
public Iterator<E> iterator() {
return ve.iterator();
}
@Override
public Object[] toArray() {
return ve.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
return ve.toArray(a);
}
public ArrayList<E> toArrayList() {
return new ArrayList<>(ve);
}
public List<E> toList() {
return new LinkedList<>(ve);
}
@Override
public boolean add(E e) {
if (this.contains(e)) {
return false;
}
//hs.add(e);
for(int i = 0; i< ve.size(); i++) {
if (ordering.compare(e, ve.get(i)) == -1) {
ve.add(i,e);
return true;
}
}
ve.add(e);
return true;
}
@Override
public boolean remove(Object o) {
//hs.remove(o);
return ve.remove(o);
}
@Override
public boolean containsAll(Collection<?> c) {
return ve.containsAll(c);
}
@Override
public boolean addAll(Collection<? extends E> c) {
Boolean ret = false;
Iterator<? extends E> cit = c.iterator();
while(cit.hasNext()) {
Boolean retnew = this.add(cit.next());
ret = ret || retnew;
}
return ret;
}
@Override
public boolean retainAll(Collection<?> c) {
//hs.retainAll(c);
return ve.retainAll(c);
}
@Override
public boolean removeAll(Collection<?> c) {
//hs.removeAll(c);
return ve.removeAll(c);
}
@Override
public void clear() {
//hs.clear();
ve.clear();
}
@Override
public String toString() {
return ve.toString();
}
}

View File

@ -1,20 +1,25 @@
package de.dhbwstuttgart.typeinference.unify; package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter; import java.io.FileWriter;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinPool;
import java.util.stream.Collectors;
import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet; import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure; import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify { public class TypeUnify {
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons) { public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, new UnifyResultModel(), cons); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, new UnifyResultModel(), cons);
ForkJoinPool pool = new ForkJoinPool(); ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask); pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join(); Set<Set<UnifyPair>> res = unifyTask.join();
@ -22,14 +27,18 @@ public class TypeUnify {
} }
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons, UnifyResultModel ret) { public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, cons); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, ret, cons);
ForkJoinPool pool = new ForkJoinPool(); ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask); pool.invoke(unifyTask);
return ret; return ret;
} }
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons, UnifyResultModel ret) { public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, cons); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, ret, cons);
ForkJoinPool pool = new ForkJoinPool(); ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask); pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join(); Set<Set<UnifyPair>> res = unifyTask.join();
@ -45,7 +54,9 @@ public class TypeUnify {
*/ */
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons) { public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, new UnifyResultModel(), cons); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, false, logFile, log, 0, new UnifyResultModel(), cons);
Set<Set<UnifyPair>> res = unifyTask.compute(); Set<Set<UnifyPair>> res = unifyTask.compute();
return res; return res;
} }

View File

@ -9,13 +9,14 @@ import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet; import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify2Task extends TypeUnifyTask { public class TypeUnify2Task extends TypeUnifyTask {
Set<Set<UnifyPair>> setToFlatten; Set<Set<UnifyPair>> setToFlatten;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<Pair> cons) { public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<Pair> cons) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons); super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons);
this.setToFlatten = setToFlatten; this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement; this.nextSetElement = nextSetElement;

View File

@ -95,7 +95,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
protected Set<UnifyPair> eq; //und-constraints protected Set<UnifyPair> eq; //und-constraints
protected List<Set<Set<UnifyPair>>> oderConstraintsField; protected List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsField;
protected IFiniteClosure fc; protected IFiniteClosure fc;
@ -133,7 +133,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
*/ */
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons2) { public TypeUnifyTask(Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons2) {
synchronized (this) { synchronized (this) {
this.eq = eq; this.eq = eq;
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); //this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
@ -209,7 +209,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
oderConstraintsField.stream() oderConstraintsField.stream()
.filter(x -> x.size()==1) .filter(x -> x.size()==1)
.map(y -> y.stream().findFirst().get()).forEach(x -> neweq.addAll(x)); .map(y -> y.stream().findFirst().get()).forEach(x -> neweq.addAll(x));
ArrayList<Set<Set<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream() ArrayList<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> remainingOderconstraints = oderConstraintsField.stream()
.filter(x -> x.size()>1) .filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField); Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField);
@ -228,7 +228,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
*/ */
/*
public Set<Set<UnifyPair>> computeCartesianRecursiveOderConstraints(Set<Set<UnifyPair>> fstElems, List<Set<Set<UnifyPair>>> topLevelSets, IFiniteClosure fc, boolean parallel, int rekTiefe) { public Set<Set<UnifyPair>> computeCartesianRecursiveOderConstraints(Set<Set<UnifyPair>> fstElems, List<Set<Set<UnifyPair>>> topLevelSets, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets); //ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
fstElems.addAll(topLevelSets.stream() fstElems.addAll(topLevelSets.stream()
@ -274,7 +274,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// nextSetasList = al; // nextSetasList = al;
//} //}
//Set<UnifyPair> a = nextSetasListIt.next(); //Set<UnifyPair> a = nextSetasListIt.next();
/*if (nextSetasList.size()>1) {zu loeschen //
/ *if (nextSetasList.size()>1) {zu loeschen
if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D")) if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D"))
System.out.print(""); System.out.print("");
if (variance == 1) { if (variance == 1) {
@ -290,7 +291,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
else { else {
a_next = nextSetasList.iterator().next(); a_next = nextSetasList.iterator().next();
} }
*/ * /
if (!nextSetasList.iterator().hasNext()) if (!nextSetasList.iterator().hasNext())
System.out.print(""); System.out.print("");
if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1) if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1)
@ -299,11 +300,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) { while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
Set<UnifyPair> a = null; Set<UnifyPair> a = null;
if (variance == 1) { if (variance == 1) {
a = oup.max(nextSetasList.iterator()); //a = oup.max(nextSetasList.iterator());
a = nextSetasList.get(nextSetasList.size()-1);
nextSetasList.remove(a); nextSetasList.remove(a);
} }
else if (variance == -1) { else if (variance == -1) {
a = oup.min(nextSetasList.iterator()); //a = oup.min(nextSetasList.iterator());
a = nextSetasList.get(0);
nextSetasList.remove(a); nextSetasList.remove(a);
} }
else if (variance == 0) { else if (variance == 0) {
@ -311,7 +314,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
//writeLog("nextSet: " + nextSetasList.toString()+ "\n"); //writeLog("nextSet: " + nextSetasList.toString()+ "\n");
//nextSetasList.remove(a); //nextSetasList.remove(a);
/* zu loeschen / * zu loeschen
if (nextSetasList.size() > 0) { if (nextSetasList.size() > 0) {
if (nextSetasList.size()>1) { if (nextSetasList.size()>1) {
if (variance == 1) { if (variance == 1) {
@ -328,7 +331,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
a_next = nextSetasList.iterator().next(); a_next = nextSetasList.iterator().next();
} }
} }
*/ * /
//PL 2018-03-01 //PL 2018-03-01
//TODO: 1. Maximum und Minimum unterscheiden //TODO: 1. Maximum und Minimum unterscheiden
//TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern //TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
@ -372,7 +375,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */ / * auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG * /
if (!result.isEmpty() && !isUndefinedPairSetSet(res)) { if (!result.isEmpty() && !isUndefinedPairSetSet(res)) {
if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1) if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print(""); System.out.print("");
@ -407,9 +410,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//break; //break;
}} }}
} }
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */ / * auskommentiert um alle Max und min Betrachtung auszuschalten ENDE * /
/* PL 2018-11-05 wird falsch weil es auf der obersten Ebene ist. / * PL 2018-11-05 wird falsch weil es auf der obersten Ebene ist.
if (isUndefinedPairSetSet(res)) { if (isUndefinedPairSetSet(res)) {
int nofstred= 0; int nofstred= 0;
Set<UnifyPair> abhSubst = res.stream() Set<UnifyPair> abhSubst = res.stream()
@ -462,7 +465,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
writeLog("Number of Backtracking: " + noBacktracking); writeLog("Number of Backtracking: " + noBacktracking);
System.out.println(""); System.out.println("");
} }
*/ * /
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) { //if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
// return result; // return result;
//} //}
@ -475,7 +478,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} // End of while (nextSetasList.size() > 0) } // End of while (nextSetasList.size() > 0)
return result; return result;
} }
*/
/** /**
* Computes all principal type unifiers for a set of constraints. * Computes all principal type unifiers for a set of constraints.
@ -483,7 +486,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* @param fc The finite closure * @param fc The finite closure
* @return The set of all principal type unifiers * @return The set of all principal type unifiers
*/ */
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT) //Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
// ).collect(Collectors.toCollection(HashSet::new)); // ).collect(Collectors.toCollection(HashSet::new));
//writeLog(nOfUnify.toString() + " AA: " + aas.toString()); //writeLog(nOfUnify.toString() + " AA: " + aas.toString());
@ -537,12 +540,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// There are up to 10 toplevel set. 8 of 10 are the result of the // There are up to 10 toplevel set. 8 of 10 are the result of the
// cartesian product of the sets created by pattern matching. // cartesian product of the sets created by pattern matching.
List<Set<Set<UnifyPair>>> topLevelSets = new ArrayList<>(); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> topLevelSets = new ArrayList<>();
//System.out.println(eq2s); //System.out.println(eq2s);
if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty. if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> wrap = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
wrap.add(eq1s); wrap.add(eq1s);
topLevelSets.add(wrap); // Add Eq1' topLevelSets.add(wrap); // Add Eq1'
} }
@ -553,7 +556,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.collect(Collectors.toSet()); .collect(Collectors.toSet());
if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty. if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> wrap = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
wrap.add(bufferSet); wrap.add(bufferSet);
topLevelSets.add(wrap); topLevelSets.add(wrap);
eq2s.removeAll(bufferSet); eq2s.removeAll(bufferSet);
@ -564,14 +567,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<UnifyPair> undefinedPairs = new HashSet<>(); Set<UnifyPair> undefinedPairs = new HashSet<>();
if (printtag) System.out.println("eq2s " + eq2s); if (printtag) System.out.println("eq2s " + eq2s);
//writeLog("BufferSet: " + bufferSet.toString()+"\n"); //writeLog("BufferSet: " + bufferSet.toString()+"\n");
List<Set<Set<UnifyPair>>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints);
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput); Set<Set<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
//PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer //PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
//nicht ausgewertet Faculty Beispiel im 1. Schritt //nicht ausgewertet Faculty Beispiel im 1. Schritt
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren //PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
//Typen getestet werden. //Typen getestet werden.
writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput.toString()); writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput.toString());
if (printtag) System.out.println("secondLevelSets:" +secondLevelSets); writeLog(nOfUnify.toString() + "secondLevelSets:" +secondLevelSets.toString());
// If pairs occured that did not match one of the cartesian product cases, // If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible. // those pairs are contradictory and the unification is impossible.
if(!undefinedPairs.isEmpty()) { if(!undefinedPairs.isEmpty()) {
@ -612,8 +615,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/ */
//Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden //Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) { for(Set<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> secondLevelSet : secondLevelSets) {
for (Set<Set<UnifyPair>> secondlevelelem : secondLevelSet) { for (PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> secondlevelelem : secondLevelSet) {
topLevelSets.add(secondlevelelem); topLevelSets.add(secondlevelelem);
} }
} }
@ -628,7 +631,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//Aufruf von computeCartesianRecursive ENDE //Aufruf von computeCartesianRecursive ENDE
//keine Ahnung woher das kommt //keine Ahnung woher das kommt
@ -752,22 +755,22 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> topLevelSets, Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets); //ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
fstElems.addAll(topLevelSets.stream() fstElems.addAll(topLevelSets.stream()
.filter(x -> x.size()==1) .filter(x -> x.size()==1)
.map(y -> y.stream().findFirst().get()) .map(y -> y.stream().findFirst().get())
.collect(Collectors.toCollection(HashSet::new))); .collect(Collectors.toCollection(HashSet::new)));
ArrayList<Set<Set<UnifyPair>>> remainingSets = topLevelSets.stream() ArrayList<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> remainingSets = topLevelSets.stream()
.filter(x -> x.size()>1) .filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
Set<Set<UnifyPair>> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe); Set<Set<UnifyPair>> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe);
return result; return result;
} }
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> nextSet = remainingSets.remove(0);
writeLog("nextSet: " + nextSet.toString()); writeLog("nextSet: " + nextSet.toString());
List<Set<UnifyPair>> nextSetasList =new ArrayList<>(nextSet); List<Set<UnifyPair>> nextSetasList = nextSet.toList();
try { try {
//List<Set<UnifyPair>> //List<Set<UnifyPair>>
//nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet); //nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet);
@ -820,14 +823,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<UnifyPair> a = null; Set<UnifyPair> a = null;
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) { while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
Set<UnifyPair> a_last = a; Set<UnifyPair> a_last = a;
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>(); List<Set<UnifyPair>> nextSetasListRest = new LinkedList<>();
//List<Set<UnifyPair>> nextSetasListRestMin = new ArrayList<>(); //List<Set<UnifyPair>> nextSetasListRestMin = new ArrayList<>();
//List<Set<UnifyPair>> nextSetasListRestOder = new ArrayList<>(); //List<Set<UnifyPair>> nextSetasListRestOder = new ArrayList<>();
if (variance == 1) { if (variance == 1) {
a = oup.max(nextSetasList.iterator()); //a = oup.max(nextSetasList.iterator());
a = nextSetasList.get(nextSetasList.size()-1);
nextSetasList.remove(a); nextSetasList.remove(a);
nextSetasListRest = new ArrayList<>(nextSetasList); nextSetasListRest = new LinkedList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator(); Iterator<Set<UnifyPair>> nextSetasListItRest = new LinkedList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) { while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next(); Set<UnifyPair> a_next = nextSetasListItRest.next();
if (//a.equals(a_next) || if (//a.equals(a_next) ||
@ -838,9 +842,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Alle maximale Elemente in nextSetasListRest bestimmen //Alle maximale Elemente in nextSetasListRest bestimmen
List<Set<UnifyPair>> nextSetasListRestTest; List<Set<UnifyPair>> nextSetasListRestTest;
do { do {
nextSetasListRestTest = new ArrayList<Set<UnifyPair>>(nextSetasListRest); nextSetasListRestTest = new LinkedList<Set<UnifyPair>>(nextSetasListRest);
if (!nextSetasListRest.isEmpty()) { if (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> max = oup.max(nextSetasListRest.iterator()); //Set<UnifyPair> max = oup.max(nextSetasListRest.iterator());
Set<UnifyPair> max = nextSetasListRest.get(nextSetasListRest.size()-1);
Iterator<Set<UnifyPair>> nextSetasListItRest2 = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator(); Iterator<Set<UnifyPair>> nextSetasListItRest2 = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest2.hasNext()) { while (nextSetasListItRest2.hasNext()) {
Set<UnifyPair> a_nextRest = nextSetasListItRest2.next(); Set<UnifyPair> a_nextRest = nextSetasListItRest2.next();
@ -854,9 +859,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
else if (variance == -1) { else if (variance == -1) {
a = oup.min(nextSetasList.iterator()); //a = oup.min(nextSetasList.iterator());
a = nextSetasList.get(0);
nextSetasList.remove(a); nextSetasList.remove(a);
nextSetasListRest = new ArrayList<>(nextSetasList); nextSetasListRest = new LinkedList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator(); Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) { while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next(); Set<UnifyPair> a_next = nextSetasListItRest.next();
@ -869,9 +875,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
List<Set<UnifyPair>> nextSetasListRestTest; List<Set<UnifyPair>> nextSetasListRestTest;
do { do {
nextSetasListRestTest = new ArrayList<Set<UnifyPair>>(nextSetasListRest); nextSetasListRestTest = new LinkedList<Set<UnifyPair>>(nextSetasListRest);
if (!nextSetasListRest.isEmpty()) { if (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> min = oup.min(nextSetasListRest.iterator()); //Set<UnifyPair> min = oup.min(nextSetasListRest.iterator());
Set<UnifyPair> min = nextSetasListRest.get(0);
Iterator<Set<UnifyPair>> nextSetasListItRest2 = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator(); Iterator<Set<UnifyPair>> nextSetasListItRest2 = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest2.hasNext()) { while (nextSetasListItRest2.hasNext()) {
Set<UnifyPair> a_nextRest = nextSetasListItRest2.next(); Set<UnifyPair> a_nextRest = nextSetasListItRest2.next();
@ -885,7 +892,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
else if (variance == 2) { else if (variance == 2) {
a = nextSetasList.remove(0); a = nextSetasList.remove(0);
nextSetasListRest = new ArrayList<>(nextSetasList); nextSetasListRest = new LinkedList<>(nextSetasList);
} }
else if (variance == 0) { else if (variance == 0) {
a = nextSetasList.remove(0); a = nextSetasList.remove(0);
@ -928,7 +935,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<TypeUnify2Task> forks = new HashSet<>(); Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq); Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems); Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig =
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
newElemsOrig.add(a); newElemsOrig.add(a);
/* FORK ANFANG */ /* FORK ANFANG */
@ -948,7 +957,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
Set<UnifyPair> newEq = new HashSet<>(eq); Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints);
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints =
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
newElems.add(nSaL); newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons);
forks.add(fork); forks.add(fork);
@ -984,7 +995,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<TypeUnify2Task> forks = new HashSet<>(); Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq); Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems); Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig =
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
newElemsOrig.add(a); newElemsOrig.add(a);
/* FORK ANFANG */ /* FORK ANFANG */
@ -1004,7 +1017,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
Set<UnifyPair> newEq = new HashSet<>(eq); Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints);
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints =
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
newElems.add(nSaL); newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons);
forks.add(fork); forks.add(fork);
@ -1041,7 +1056,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<TypeUnifyTask> forks = new HashSet<>(); Set<TypeUnifyTask> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq); Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems); Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig =
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
newElemsOrig.add(a); newElemsOrig.add(a);
/* FORK ANFANG */ /* FORK ANFANG */
@ -1059,7 +1076,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//nextSetasList.remove(nSaL); //nextSetasList.remove(nSaL);
Set<UnifyPair> newEq = new HashSet<>(eq); Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints);
//List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints =
// oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(x)).collect(Collectors.toList());
newElems.add(nSaL); newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons);
forks.add(fork); forks.add(fork);
@ -1693,9 +1712,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements * from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
* (as in case 1 where sigma is added to the innermost set). * (as in case 1 where sigma is added to the innermost set).
*/ */
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, List<Set<Set<UnifyPair>>> oderConstraintsInput, IFiniteClosure fc, Set<UnifyPair> undefined, List<Set<Set<UnifyPair>>> oderConstraintsOutput) { protected Set<Set<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>>> calculatePairSets(Set<UnifyPair> eq2s, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsInput, IFiniteClosure fc, Set<UnifyPair> undefined, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsOutput) {
oderConstraintsOutput.addAll(oderConstraintsInput); oderConstraintsOutput.addAll(oderConstraintsInput);
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(9); List<Set<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>>> result = new ArrayList<>(9);
// Init all 8 cases + 9. Case: oderConstraints // Init all 8 cases + 9. Case: oderConstraints
for(int i = 0; i < 9; i++) for(int i = 0; i < 9; i++)
@ -1714,7 +1733,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
} }
if (eq2sAsList.isEmpty()) { if (eq2sAsList.isEmpty()) {
List<Set<Set<UnifyPair>>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind
.filter(x -> x.stream() .filter(x -> x.stream()
.filter(y -> .filter(y ->
y.stream().filter(z -> ((z.getLhsType() instanceof PlaceholderType) y.stream().filter(z -> ((z.getLhsType() instanceof PlaceholderType)
@ -1724,7 +1743,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
).findFirst().isPresent() ).findFirst().isPresent()
).findFirst().isPresent()).collect(Collectors.toList()); ).findFirst().isPresent()).collect(Collectors.toList());
if (!oderConstraintsVariance.isEmpty()) { if (!oderConstraintsVariance.isEmpty()) {
Set<Set<UnifyPair>> ret = oderConstraintsVariance.get(0); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> ret = oderConstraintsVariance.get(0);
oderConstraintsOutput.remove(ret); oderConstraintsOutput.remove(ret);
//Set<UnifyPair> retFlat = new HashSet<>(); //Set<UnifyPair> retFlat = new HashSet<>();
//ret.stream().forEach(x -> retFlat.addAll(x)); //ret.stream().forEach(x -> retFlat.addAll(x));
@ -1738,7 +1757,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if (eq2sAsList.isEmpty() && first) {//Alle eq2s sind empty und alle oderConstraints mit Variance != 0 sind bearbeitet if (eq2sAsList.isEmpty() && first) {//Alle eq2s sind empty und alle oderConstraints mit Variance != 0 sind bearbeitet
if (!oderConstraintsOutput.isEmpty()) { if (!oderConstraintsOutput.isEmpty()) {
Set<Set<UnifyPair>> ret = oderConstraintsOutput.remove(0); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> ret = oderConstraintsOutput.remove(0);
//if (ret.iterator().next().iterator().next().getLhsType().getName().equals("M")) //if (ret.iterator().next().iterator().next().getLhsType().getName().equals("M"))
// System.out.println("M"); // System.out.println("M");
//Set<UnifyPair> retFlat = new HashSet<>(); //Set<UnifyPair> retFlat = new HashSet<>();
@ -1764,7 +1783,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if (((PlaceholderType)(pair.getLhsType())).getName().equals("AR")) { if (((PlaceholderType)(pair.getLhsType())).getName().equals("AR")) {
System.out.println("AR"); System.out.println("AR");
} }
Set<Set<UnifyPair>> x1 = unifyCase1(pair, fc); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> x1 = unifyCase1(pair, fc);
if (pairOp == PairOperator.SMALLERNEQDOT) { if (pairOp == PairOperator.SMALLERNEQDOT) {
Set<UnifyPair> remElem = new HashSet<>(); Set<UnifyPair> remElem = new HashSet<>();
remElem.add(new UnifyPair(pair.getLhsType(), pair.getRhsType(), PairOperator.EQUALSDOT)); remElem.add(new UnifyPair(pair.getLhsType(), pair.getRhsType(), PairOperator.EQUALSDOT));
@ -1785,7 +1804,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
else { else {
Set<UnifyPair> s1 = new HashSet<>(); Set<UnifyPair> s1 = new HashSet<>();
s1.add(pair); s1.add(pair);
Set<Set<UnifyPair>> s2 = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> s2 = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
s2.add(s1); s2.add(s1);
result.get(0).add(s2); result.get(0).add(s2);
} }
@ -1794,7 +1813,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// Case 2: (a <.? ? ext Theta') // Case 2: (a <.? ? ext Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType) else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
if (first) { //writeLog(pair.toString()+"\n"); if (first) { //writeLog(pair.toString()+"\n");
Set<Set<UnifyPair>> x1 = unifyCase2(pair, fc); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> x1 = unifyCase2(pair, fc);
result.get(1).add(x1); result.get(1).add(x1);
if (x1.isEmpty()) { if (x1.isEmpty()) {
undefined.add(pair); //Theta ist nicht im FC undefined.add(pair); //Theta ist nicht im FC
@ -1803,7 +1822,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
else { else {
Set<UnifyPair> s1 = new HashSet<>(); Set<UnifyPair> s1 = new HashSet<>();
s1.add(pair); s1.add(pair);
Set<Set<UnifyPair>> s2 = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> s2 = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
s2.add(s1); s2.add(s1);
result.get(1).add(s2); result.get(1).add(s2);
} }
@ -1811,7 +1830,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// Case 3: (a <.? ? sup Theta') // Case 3: (a <.? ? sup Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType) else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
if (first) { //writeLog(pair.toString()+"\n"); if (first) { //writeLog(pair.toString()+"\n");
Set<Set<UnifyPair>> x1 = unifyCase3(pair, fc); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> x1 = unifyCase3(pair, fc);
result.get(2).add(x1); result.get(2).add(x1);
if (x1.isEmpty()) { if (x1.isEmpty()) {
undefined.add(pair); //Theta ist nicht im FC undefined.add(pair); //Theta ist nicht im FC
@ -1820,7 +1839,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
else { else {
Set<UnifyPair> s1 = new HashSet<>(); Set<UnifyPair> s1 = new HashSet<>();
s1.add(pair); s1.add(pair);
Set<Set<UnifyPair>> s2 = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> s2 = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
s2.add(s1); s2.add(s1);
result.get(2).add(s2); result.get(2).add(s2);
} }
@ -1833,7 +1852,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// Case 5: (Theta <. a) // Case 5: (Theta <. a)
else if ((pairOp == PairOperator.SMALLERDOT) && rhsType instanceof PlaceholderType) else if ((pairOp == PairOperator.SMALLERDOT) && rhsType instanceof PlaceholderType)
if (first) { //writeLog(pair.toString()+"\n"); if (first) { //writeLog(pair.toString()+"\n");
Set<Set<UnifyPair>> x1 = unifyCase5(pair, fc); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> x1 = unifyCase5(pair, fc);
result.get(4).add(x1); result.get(4).add(x1);
if (x1.isEmpty()) { if (x1.isEmpty()) {
undefined.add(pair); //Theta ist nicht im FC undefined.add(pair); //Theta ist nicht im FC
@ -1842,7 +1861,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
else { else {
Set<UnifyPair> s1 = new HashSet<>(); Set<UnifyPair> s1 = new HashSet<>();
s1.add(pair); s1.add(pair);
Set<Set<UnifyPair>> s2 = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> s2 = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
s2.add(s1); s2.add(s1);
result.get(4).add(s2); result.get(4).add(s2);
} }
@ -1860,7 +1879,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// Case 8: (Theta <.? a) // Case 8: (Theta <.? a)
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType) else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
if (first) { //writeLog(pair.toString()+"\n"); if (first) { //writeLog(pair.toString()+"\n");
Set<Set<UnifyPair>> x1 = unifyCase8(pair, fc); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> x1 = unifyCase8(pair, fc);
result.get(7).add(x1); result.get(7).add(x1);
if (x1.isEmpty()) { if (x1.isEmpty()) {
undefined.add(pair); //Theta ist nicht im FC undefined.add(pair); //Theta ist nicht im FC
@ -1869,7 +1888,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
else { else {
Set<UnifyPair> s1 = new HashSet<>(); Set<UnifyPair> s1 = new HashSet<>();
s1.add(pair); s1.add(pair);
Set<Set<UnifyPair>> s2 = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> s2 = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
s2.add(s1); s2.add(s1);
result.get(7).add(s2); result.get(7).add(s2);
} }
@ -1893,12 +1912,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/** /**
* Cartesian product Case 1: (a <. Theta') * Cartesian product Case 1: (a <. Theta')
*/ */
protected Set<Set<UnifyPair>> unifyCase1(UnifyPair pair, IFiniteClosure fc) { protected PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> unifyCase1(UnifyPair pair, IFiniteClosure fc) {
PlaceholderType a = (PlaceholderType)pair.getLhsType(); PlaceholderType a = (PlaceholderType)pair.getLhsType();
UnifyType thetaPrime = pair.getRhsType(); UnifyType thetaPrime = pair.getRhsType();
byte variance = pair.getVariance(); byte variance = pair.getVariance();
Set<Set<UnifyPair>> result = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> result = new PartialOrderSet<Set<UnifyPair>, OrderingUnifyPair>(new OrderingUnifyPair(fc));
boolean allGen = thetaPrime.getTypeParams().size() > 0; boolean allGen = thetaPrime.getTypeParams().size() > 0;
for(UnifyType t : thetaPrime.getTypeParams()) for(UnifyType t : thetaPrime.getTypeParams())
@ -2037,11 +2056,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/** /**
* Cartesian Product Case 2: (a <.? ? ext Theta') * Cartesian Product Case 2: (a <.? ? ext Theta')
*/ */
private Set<Set<UnifyPair>> unifyCase2(UnifyPair pair, IFiniteClosure fc) { private PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> unifyCase2(UnifyPair pair, IFiniteClosure fc) {
PlaceholderType a = (PlaceholderType) pair.getLhsType(); PlaceholderType a = (PlaceholderType) pair.getLhsType();
ExtendsType extThetaPrime = (ExtendsType) pair.getRhsType(); ExtendsType extThetaPrime = (ExtendsType) pair.getRhsType();
byte variance = pair.getVariance(); byte variance = pair.getVariance();
Set<Set<UnifyPair>> result = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> result = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
UnifyType aPrime = PlaceholderType.freshPlaceholder(); UnifyType aPrime = PlaceholderType.freshPlaceholder();
((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance()); ((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance());
@ -2064,12 +2083,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/** /**
* Cartesian Product Case 3: (a <.? ? sup Theta') * Cartesian Product Case 3: (a <.? ? sup Theta')
*/ */
private Set<Set<UnifyPair>> unifyCase3(UnifyPair pair, IFiniteClosure fc) { private PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> unifyCase3(UnifyPair pair, IFiniteClosure fc) {
PlaceholderType a = (PlaceholderType) pair.getLhsType(); PlaceholderType a = (PlaceholderType) pair.getLhsType();
a.reversVariance(); a.reversVariance();
SuperType subThetaPrime = (SuperType) pair.getRhsType(); SuperType subThetaPrime = (SuperType) pair.getRhsType();
byte variance = pair.getVariance(); byte variance = pair.getVariance();
Set<Set<UnifyPair>> result = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> result = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
UnifyType aPrime = PlaceholderType.freshPlaceholder(); UnifyType aPrime = PlaceholderType.freshPlaceholder();
((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance()); ((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance());
@ -2094,11 +2113,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/** /**
* Cartesian Product Case 5: (Theta <. a) * Cartesian Product Case 5: (Theta <. a)
*/ */
private Set<Set<UnifyPair>> unifyCase5(UnifyPair pair, IFiniteClosure fc) { private PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> unifyCase5(UnifyPair pair, IFiniteClosure fc) {
UnifyType theta = pair.getLhsType(); UnifyType theta = pair.getLhsType();
PlaceholderType a = (PlaceholderType) pair.getRhsType(); PlaceholderType a = (PlaceholderType) pair.getRhsType();
byte variance = pair.getVariance(); byte variance = pair.getVariance();
Set<Set<UnifyPair>> result = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> result = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
boolean allGen = theta.getTypeParams().size() > 0; boolean allGen = theta.getTypeParams().size() > 0;
for(UnifyType t : theta.getTypeParams()) for(UnifyType t : theta.getTypeParams())
@ -2173,11 +2192,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/** /**
* Cartesian Product Case 8: (Theta <.? a) * Cartesian Product Case 8: (Theta <.? a)
*/ */
private Set<Set<UnifyPair>> unifyCase8(UnifyPair pair, IFiniteClosure fc) { private PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> unifyCase8(UnifyPair pair, IFiniteClosure fc) {
UnifyType theta = pair.getLhsType(); UnifyType theta = pair.getLhsType();
PlaceholderType a = (PlaceholderType) pair.getRhsType(); PlaceholderType a = (PlaceholderType) pair.getRhsType();
byte variance = pair.getVariance(); byte variance = pair.getVariance();
Set<Set<UnifyPair>> result = new HashSet<>(); PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair> result = new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc));
//for(UnifyType thetaS : fc.grArg(theta)) { //for(UnifyType thetaS : fc.grArg(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>(); Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT, pair.getSubstitution(), pair)); resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
@ -2238,8 +2257,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
} }
void writeLog(String str) { synchronized void writeLog(String str) {
synchronized ( this ) {
if (log) { if (log) {
try { try {
logFile.write("Thread no.:" + thNo + "\n"); logFile.write("Thread no.:" + thNo + "\n");
@ -2252,7 +2270,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
catch (IOException e) { catch (IOException e) {
System.err.println("kein LogFile"); System.err.println("kein LogFile");
} }
} }
}
} }
} }

View File

@ -1,6 +1,6 @@
import java.util.Vector; import java.util.Vector;
import java.lang.Integer; import java.lang.Integer;
//import java.lang.Float; import java.lang.Float;
//import java.lang.Byte; //import java.lang.Byte;
//import java.lang.Boolean; //import java.lang.Boolean;

View File

@ -1,6 +1,6 @@
import java.util.Vector; import java.util.Vector;
import java.lang.Integer; import java.lang.Integer;
import java.lang.Byte; //import java.lang.Byte;
import java.lang.Boolean; import java.lang.Boolean;
public class MatrixOP extends Vector<Vector<Integer>> { public class MatrixOP extends Vector<Vector<Integer>> {