This commit is contained in:
Martin Plümicke 2018-02-28 16:07:02 +01:00
parent 1c0fa6a820
commit 6fcaafe477
5 changed files with 102 additions and 18 deletions

View File

@ -25,11 +25,14 @@ import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.Unifier; import de.dhbwstuttgart.typeinference.unify.model.Unifier;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType; import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
import java.io.File; import java.io.File;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import com.google.common.collect.Ordering;
/** /**
* Implementation of the type unification algorithm * Implementation of the type unification algorithm
@ -63,11 +66,18 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
protected IFiniteClosure fc; protected IFiniteClosure fc;
protected Ordering<Set<UnifyPair>> oup;
protected boolean parallel; protected boolean parallel;
public TypeUnifyTask() {
rules = new RuleSet();
}
public TypeUnifyTask(Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) { public TypeUnifyTask(Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
this.eq = eq; this.eq = eq;
this.fc = fc; this.fc = fc;
this.oup = new OrderingUnifyPair(fc);
this.parallel = parallel; this.parallel = parallel;
try { try {
logFile = new FileWriter(new File(rootDirectory+"log")); logFile = new FileWriter(new File(rootDirectory+"log"));
@ -138,7 +148,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// Sets of the "second level" // Sets of the "second level"
Set<UnifyPair> undefinedPairs = new HashSet<>(); Set<UnifyPair> undefinedPairs = new HashSet<>();
if (printtag) System.out.println("eq2s " + eq2s); if (printtag) System.out.println("eq2s " + eq2s);
writeLog(bufferSet.toString()+"\n"); writeLog("BufferSet: " + bufferSet.toString()+"\n");
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs); Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs);
//PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer //PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
//nicht ausgewertet Faculty Beispiel im 1. Schritt //nicht ausgewertet Faculty Beispiel im 1. Schritt
@ -180,13 +190,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Aufruf von computeCartesianRecursive ANFANG //Aufruf von computeCartesianRecursive ANFANG
//return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, fc, parallel); return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, fc, parallel);
//} }
//Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) { Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
//Aufruf von computeCartesianRecursive ENDE //Aufruf von computeCartesianRecursive ENDE
//keine Ahnung woher das kommt //keine Ahnung woher das kommt
@ -194,9 +204,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
// Cartesian product over all (up to 10) top level sets // Cartesian product over all (up to 10) top level sets
Set<Set<Set<UnifyPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets) //Set<Set<Set<UnifyPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
.stream().map(x -> new HashSet<>(x)) // .stream().map(x -> new HashSet<>(x))
.collect(Collectors.toCollection(HashSet::new)); // .collect(Collectors.toCollection(HashSet::new));
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>(); Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
@ -204,7 +214,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<TypeUnifyTask> forks = new HashSet<>(); Set<TypeUnifyTask> forks = new HashSet<>();
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
for(Set<Set<UnifyPair>> setToFlatten : eqPrimeSet) { //for(Set<Set<UnifyPair>> setToFlatten : eqPrimeSet) {
// Flatten the cartesian product // Flatten the cartesian product
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
Set<UnifyPair> eqPrime = new HashSet<>(); Set<UnifyPair> eqPrime = new HashSet<>();
@ -266,7 +276,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
} }
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
} //}
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
/* /*
@ -292,17 +302,25 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
ArrayList<Set<UnifyPair>> newSet = new ArrayList<>(set); ArrayList<Set<UnifyPair>> newSet = new ArrayList<>(set);
Set<Set<UnifyPair>> result = null; Set<Set<UnifyPair>> result = null;
int i = 0; int i = 0;
for(Set<UnifyPair> a : newSet) { Set<UnifyPair> a_next = oup.min(newSet.iterator());
while (newSet.size() != 0) {
Set<UnifyPair> a = a_next;
writeLog("NewSet: " + newSet.toString()+ "\n");
newSet.remove(a);
if (newSet.size() > 0) a_next = oup.min(newSet.iterator());
//for(Set<UnifyPair> a : newSet) {
i++; i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems); Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
elems.add(a); elems.add(a);
if (newSets.isEmpty()) { if (newSets.isEmpty()) {
//BEI BENUTZUNG EIKOMMENTIEREN result = unify2(elems, eq, fc, parallel); result = unify2(elems, eq, fc, parallel);
System.out.println("");
} }
else { else {
result = computeCartesianRecursive(elems,newSets, eq, fc, parallel); result = computeCartesianRecursive(elems,newSets, eq, fc, parallel);
} }
if (!result.isEmpty()) break; if (!result.isEmpty()) //&& (oup.compare(a, a_next) == -1)) break;
if (oup.compare(a, a_next) == -1) break;
} }
return result; return result;
} }
@ -332,7 +350,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* This is step one of the unification algorithm. * This is step one of the unification algorithm.
* @return The set of pairs that results from repeated application of the inference rules. * @return The set of pairs that results from repeated application of the inference rules.
*/ */
protected Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) { public Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) {
/* /*
* Rule Application Strategy: * Rule Application Strategy:
@ -663,9 +681,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
else else
resultPrime.add(new UnifyPair(a, theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0]))), PairOperator.EQUALSDOT)); resultPrime.add(new UnifyPair(a, theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0]))), PairOperator.EQUALSDOT));
resultPrime.addAll(substitutionSet); resultPrime.addAll(substitutionSet);
writeLog(substitutionSet.toString()); writeLog("Substitution: " + substitutionSet.toString());
result.add(resultPrime); result.add(resultPrime);
writeLog(resultPrime.toString()); writeLog("Result: " + resultPrime.toString());
//writeLog("MAX: " + oup.max(resultPrime.iterator()).toString());
} }
} }
@ -692,7 +711,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT)); resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT)); resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime); result.add(resultPrime);
writeLog(resultPrime.toString()); writeLog("Result: " + resultPrime.toString());
return result; return result;
} }

View File

@ -61,4 +61,6 @@ public interface IFiniteClosure {
public Set<UnifyType> getAncestors(UnifyType t); public Set<UnifyType> getAncestors(UnifyType t);
public Set<UnifyType> getChildren(UnifyType t); public Set<UnifyType> getChildren(UnifyType t);
public Set<UnifyType> getAllTypesByName(String typeName); public Set<UnifyType> getAllTypesByName(String typeName);
public int compare(UnifyType rhsType, UnifyType rhsType2);
} }

View File

@ -8,11 +8,14 @@ import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import com.google.common.collect.Ordering;
//PL 18-02-05 Unifier durch Matcher ersetzt //PL 18-02-05 Unifier durch Matcher ersetzt
//mus greater noch erstezt werden //mus greater noch erstezt werden
import de.dhbwstuttgart.typeinference.unify.MartelliMontanariUnify; import de.dhbwstuttgart.typeinference.unify.MartelliMontanariUnify;
import de.dhbwstuttgart.typeinference.unify.Match; import de.dhbwstuttgart.typeinference.unify.Match;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify; import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
@ -20,7 +23,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
* The finite closure for the type unification * The finite closure for the type unification
* @author Florian Steurer * @author Florian Steurer
*/ */
public class FiniteClosure implements IFiniteClosure { public class FiniteClosure extends Ordering<UnifyType> implements IFiniteClosure {
/** /**
* A map that maps every type to the node in the inheritance graph that contains that type. * A map that maps every type to the node in the inheritance graph that contains that type.
@ -430,4 +433,24 @@ public class FiniteClosure implements IFiniteClosure {
public String toString(){ public String toString(){
return this.inheritanceGraph.toString(); return this.inheritanceGraph.toString();
} }
public int compare (UnifyType left, UnifyType right) {
UnifyPair up = new UnifyPair(left, right, PairOperator.SMALLERDOT);
TypeUnifyTask unifyTask = new TypeUnifyTask();
HashSet<UnifyPair> hs = new HashSet<>();
hs.add(up);
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
long smallerLen = smallerRes.stream().filter(x -> !(x.getLhsType() instanceof PlaceholderType && x.getRhsType() instanceof PlaceholderType)).count();
if (smallerLen == 0) return -1;
else {
up = new UnifyPair(right, left, PairOperator.SMALLERDOT);
//TypeUnifyTask unifyTask = new TypeUnifyTask();
hs = new HashSet<>();
hs.add(up);
Set<UnifyPair> greaterRes = unifyTask.applyTypeUnificationRules(hs, this);
long greaterLen = greaterRes.stream().filter(x -> !(x.getLhsType() instanceof PlaceholderType && x.getRhsType() instanceof PlaceholderType)).count();
if (greaterLen == 0) return 1;
else return 0;
}
}
} }

View File

@ -0,0 +1,39 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.HashMap;
import java.util.Optional;
import java.util.Set;
import java.util.function.BinaryOperator;
import java.util.stream.Stream;
import com.google.common.collect.Ordering;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
protected IFiniteClosure fc;
public OrderingUnifyPair(IFiniteClosure fc) {
this.fc = fc;
}
public int compare (UnifyPair left, UnifyPair right) {
if (left == null || right == null)
System.out.println("Fehler");
return fc.compare(left.getRhsType(), right.getRhsType());
}
public int compare (Set<UnifyPair> left, Set<UnifyPair> right) {
Stream<UnifyPair> ls = left.stream().filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT));
Stream<UnifyPair> rs = right.stream().filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT));
BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;};
HashMap<UnifyType,UnifyPair> hm = rs.reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner);
ls = ls.filter(x -> !(hm.get(x.getLhsType()) == null));
Optional<Integer> si = ls.map(x -> compare(x, hm.get(x.getLhsType()))).reduce((x,y)-> { if (x == y) return x; else return 0; } );
if (!si.isPresent()) return 0;
else return si.get();
}
}

View File

@ -4,6 +4,7 @@ import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
/** /**
* A pair which contains two types and an operator, e.q. (Integer <. a). * A pair which contains two types and an operator, e.q. (Integer <. a).
* @author Florian Steurer * @author Florian Steurer