modified: ../../src/de/dhbwstuttgart/core/JavaTXCompiler.java

modified:   ../../src/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java
	modified:   ../../src/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
	modified:   ../../src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
This commit is contained in:
Martin Plümicke 2018-10-19 13:08:37 +02:00
parent 6ce9f4415e
commit 6a87f443e4
4 changed files with 343 additions and 64 deletions

View File

@ -239,14 +239,7 @@ public class JavaTXCompiler {
logFile.write(ASTTypePrinter.print(sf));
}
logFile.flush();
Set<List<Constraint<UnifyPair>>> cardProd = unifyCons.cartesianProduct();
for (List<Constraint<UnifyPair>> xCons : cardProd ){
Set<UnifyPair> xConsSet = new HashSet<>();
for (Constraint<UnifyPair> constraint : xCons) {
xConsSet.addAll(constraint);
}
//.collect(Collectors.toCollection(ArrayList::new))))
System.out.println(xConsSet);
Set<String> paraTypeVarNames = allClasses.stream().map(x -> x.getMethods().stream().map(y -> y.getParameterList().getFormalparalist()
.stream().filter(z -> z.getType() instanceof TypePlaceholder)
.map(z -> ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(HashSet::new)))
@ -300,13 +293,14 @@ public class JavaTXCompiler {
}
return y; } );
varianceInheritanceConstrainSet(unifyCons);
Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
//Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
//Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, logFile, log);
System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString()+"\n");
logFile.flush();
results.addAll(result);
}
results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
@ -328,7 +322,8 @@ public class JavaTXCompiler {
}
/**
* Vererbt alle Variancen
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a)
* wenn a eine Variance !=0 hat auf alle Typvariablen in Theta.
* @param eq The set of constraints
*/
private void varianceInheritance(Set<UnifyPair> eq) {
@ -359,7 +354,8 @@ public class JavaTXCompiler {
}
/**
* Vererbt alle Variancen
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a)
* wenn a eine Variance !=0 hat auf alle Typvariablen in Theta.
* @param eq The set of constraints
*/
private void varianceInheritanceConstrainSet(ConstraintSet<UnifyPair> cons) {

View File

@ -70,4 +70,12 @@ public class ConstraintSet<A> {
}
return ret;
}
public List<Set<Constraint<A>>> getOderConstraints() {
return oderConstraints;
}
public Set<A> getUndConstraints() {
return undConstraints;
}
}

View File

@ -1,9 +1,11 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
@ -22,4 +24,10 @@ public class TypeUnify {
return res;
}
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log);
Set<Set<UnifyPair>> res = unifyTask.compute();
return res;
}
}

View File

@ -18,6 +18,7 @@ import java.util.function.BinaryOperator;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
@ -71,7 +72,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
protected IRuleSet rules;
protected Set<UnifyPair> eq;
protected Set<UnifyPair> eq; //und-constraints
protected List<Set<Set<UnifyPair>>> oderConstraints;
protected IFiniteClosure fc;
@ -101,6 +104,25 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
rules = new RuleSet(logFile);
}
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) {
this.eq = eq;
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.oderConstraints = oderConstraints.stream().map(x -> {
Set<Set<UnifyPair>> ret = new HashSet<>();
for (Constraint<UnifyPair> y : x) {
ret.add(new HashSet<>(y));
}
return ret;
}).collect(Collectors.toCollection(ArrayList::new));
//x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.fc = fc;
this.oup = new OrderingUnifyPair(fc);
this.parallel = parallel;
this.logFile = logFile;
this.log = log;
rules = new RuleSet(logFile);
}
/**
* Vererbt alle Variancen
@ -141,6 +163,251 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
else return res;
}
protected Set<Set<UnifyPair>> computeNew() {
Set<Set<UnifyPair>> fstElems = new HashSet<>();
fstElems.add(eq);
Set<Set<UnifyPair>> res = computeCartesianRecursiveOderConstraints(fstElems, oderConstraints, fc, parallel);
if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
else return res;
}
public Set<Set<UnifyPair>> computeCartesianRecursiveOderConstraints(Set<Set<UnifyPair>> fstElems, List<Set<Set<UnifyPair>>> topLevelSets, IFiniteClosure fc, boolean parallel) {
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
fstElems.addAll(topLevelSets.stream()
.filter(x -> x.size()==1)
.map(y -> y.stream().findFirst().get())
.collect(Collectors.toCollection(HashSet::new)));
ArrayList<Set<Set<UnifyPair>>> remainingSets = topLevelSets.stream()
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
Set<UnifyPair> eq = new HashSet<>();
fstElems.stream().forEach(x -> eq.addAll(x));
Set<Set<UnifyPair>> result = unify(eq, fc, parallel);
return result;
}
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0);
writeLog("nextSet: " + nextSet.toString());
List<Set<UnifyPair>> nextSetasList =new ArrayList<>(nextSet);
try {
//List<Set<UnifyPair>>
//nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet);
}
catch (java.lang.IllegalArgumentException e) {
System.out.print("");
}
Set<Set<UnifyPair>> result = new HashSet<>();
int variance = 0;
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType)
.filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType)c.getLhsType()).getVariance())
.reduce((a,b)-> {if (a==b) return a; else return 0; }))
.filter(d -> d.isPresent())
.map(e -> e.get())
.findAny();
if (xi.isPresent()) {
variance = xi.get();
}
//if (variance == 1 && nextSetasList.size() > 1) {
// List<Set<UnifyPair>> al = new ArrayList<>(nextSetasList.size());
// for (int ii = 0; ii < nextSetasList.size();ii++) {
// al.add(0,nextSetasList.get(ii));
// }
// nextSetasList = al;
//}
//Set<UnifyPair> a = nextSetasListIt.next();
/*if (nextSetasList.size()>1) {zu loeschen
if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D"))
System.out.print("");
if (variance == 1) {
a_next = oup.max(nextSetasList.iterator());
}
else if (variance == -1) {
a_next = oup.min(nextSetasList.iterator());
}
else if (variance == 0) {
a_next = nextSetasList.iterator().next();
}
}
else {
a_next = nextSetasList.iterator().next();
}
*/
if (!nextSetasList.iterator().hasNext())
System.out.print("");
if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print("");
writeLog("nextSetasList: " + nextSetasList.toString());
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
Set<UnifyPair> a = null;
if (variance == 1) {
a = oup.max(nextSetasList.iterator());
nextSetasList.remove(a);
}
else if (variance == -1) {
a = oup.min(nextSetasList.iterator());
nextSetasList.remove(a);
}
else if (variance == 0) {
a = nextSetasList.remove(0);
}
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
//nextSetasList.remove(a);
/* zu loeschen
if (nextSetasList.size() > 0) {
if (nextSetasList.size()>1) {
if (variance == 1) {
a_next = oup.max(nextSetasList.iterator());
}
else if (variance == -1) {
a_next = oup.min(nextSetasList.iterator());
}
else {
a_next = nextSetasList.iterator().next();
}
}
else {
a_next = nextSetasList.iterator().next();
}
}
*/
//PL 2018-03-01
//TODO: 1. Maximum und Minimum unterscheiden
//TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
//for(Set<UnifyPair> a : newSet) {
i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
elems.add(a);
if (remainingSets.isEmpty()) {
writeLog("Vor unify Aufruf: " + eq.toString());
Set<UnifyPair> eq = new HashSet<>();
fstElems.stream().forEach(x -> eq.addAll(x));
Set<Set<UnifyPair>> res = unify(eq, fc, parallel);
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = res;
}
else {
if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result))
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
result.addAll(res);
}
//else {
//wenn Korrekte Ergebnisse da und Feherfälle dazukommen Fehlerfälle ignorieren
// if (isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) {
// result = result;
// }
//}
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
if (!result.isEmpty() && !isUndefinedPairSetSet(res)) {
if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print("");
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetasList).iterator();
if (variance == 1) {
System.out.println("");
while (nextSetasListIt.hasNext()) {
Set<UnifyPair> a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) {
nextSetasList.remove(a_next);
}
else {
System.out.println("");
}
}
}
else { if (variance == -1) {
System.out.println("");
while (nextSetasListIt.hasNext()) {
Set<UnifyPair> a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == -1)) {
nextSetasList.remove(0);
}
else {
System.out.println("");
}
}
}
else if (variance == 0) {
break;
}}
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
if (isUndefinedPairSetSet(res)) {
int nofstred= 0;
Set<UnifyPair> abhSubst = res.stream()
.map(b ->
b.stream()
.map(x -> x.getAllSubstitutions())
.reduce((y,z) -> { y.addAll(z); return y;}).get())
.reduce((y,z) -> { y.addAll(z); return y;}).get();
Set<UnifyPair> b = a;//effective final a
Set<UnifyPair> durchschnitt = abhSubst.stream()
.filter(x -> b.contains(x))
//.filter(y -> abhSubst.contains(y))
.collect(Collectors.toCollection(HashSet::new));
//Set<PlaceholderType> vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
int len = nextSetasList.size();
Set<UnifyPair> undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results
Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream()
.map(x -> { Set<UnifyPair> su = x.getAllSubstitutions(); //alle benutzten Substitutionen
su.add(x.getGroundBasePair()); // urspruengliches Paar
su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen
return new Pair<>(su, x.getGroundBasePair());})
.collect(Collectors.toCollection(HashSet::new));
if (res.size() > 1) {
System.out.println();
}
nextSetasList = nextSetasList.stream().filter(x -> {
//Boolean ret = false;
//for (PlaceholderType var : vars) {
// ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get();
//}
return (!x.containsAll(durchschnitt));
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
.collect(Collectors.toCollection(ArrayList::new));
nofstred = nextSetasList.size();
//NOCH NICHT korrekt PL 2018-10-12
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
// .collect(Collectors.toCollection(ArrayList::new));
writeLog("res (undef): " + res.toString());
writeLog("abhSubst: " + abhSubst.toString());
writeLog("a: " + a.toString());
writeLog("Durchschnitt: " + durchschnitt.toString());
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetasList.toString());
writeLog("Number first erased Elements (undef): " + (len - nofstred));
writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size()));
writeLog("Number erased Elements (undef): " + (len - nextSetasList.size()));
noAllErasedElements = noAllErasedElements + (len - nextSetasList.size());
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
noBacktracking++;
writeLog("Number of Backtracking: " + noBacktracking);
System.out.println("");
}
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
// return result;
//}
//else {
// result.removeIf(y -> isUndefinedPairSet(y));
//}
//else result.stream().filter(y -> !isUndefinedPairSet(y));
}
else {//duerfte gar nicht mehr vorkommen PL 2018-04-03
result.addAll(computeCartesianRecursive(elems, remainingSets, eq, fc, parallel));
}
return result;
}
}
/**
* Computes all principal type unifiers for a set of constraints.
* @param eq The set of constraints