Merge branch 'unify' of ssh://gohorb.ba-horb.de/bahome/projekt/git/JavaCompilerCore into refactoring

This commit is contained in:
JanUlrich 2016-04-28 16:42:16 +02:00
commit 9a51e79f2a
12 changed files with 775 additions and 697 deletions

View File

@ -127,11 +127,11 @@ class LogHistory extends ArrayList<LogLine>{
@Override
public String toString(){
String ret = "";
StringBuilder ret = new StringBuilder();
for(LogLine l : this){
ret += l.toString() + "\n";
ret.append(l.toString() + "\n");
}
return ret;
return ret.toString();
}
}

View File

@ -1,640 +1,16 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
/**
* Implementation of the type unification algorithm
* @author Florian Steurer
*/
public class TypeUnify {
/**
* The implementation of setOps that will be used during the unification
*/
protected ISetOperations setOps = new GuavaSetOperations();
/**
* The implementation of the standard unify that will be used during the unification
*/
protected IUnify stdUnify = new MartelliMontanariUnify();
/**
* The implementation of the rules that will be used during the unification.
*/
protected IRuleSet rules = new RuleSet();
// Scheint momentan eher zu verlangsamen, vermutlich zu viele threads,
// threadpool und task-queue einbauen und minimale problemgröße für neuen thread
protected boolean parallel = false;
/**
* Computes all principal type unifiers for a set of constraints.
* @param eq The set of constraints
* @param fc The finite closure
* @return The set of all principal type unifiers
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
Set<UnifyPair> eq0 = applyTypeUnificationRules(eq, fc);
/*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
*/
Set<UnifyPair> eq1s = new HashSet<>();
Set<UnifyPair> eq2s = new HashSet<>();
splitEq(eq0, eq1s, eq2s);
/*
* Step 4: Create possible typings
*
* "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
* mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
*/
// There are up to 10 toplevel set. 8 of 10 are the result of the
// cartesian product of the sets created by pattern matching.
List<Set<Set<UnifyPair>>> topLevelSets = new ArrayList<>();
if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(eq1s);
topLevelSets.add(wrap); // Add Eq1'
}
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
Set<UnifyPair> bufferSet = eq2s.stream()
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
.collect(Collectors.toSet());
if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(bufferSet);
topLevelSets.add(wrap);
eq2s.removeAll(bufferSet);
}
// Sets that originate from pair pattern matching
// Sets of the "second level"
Set<UnifyPair> undefinedPairs = new HashSet<>();
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs);
// If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible.
if(!undefinedPairs.isEmpty())
return new HashSet<>();
/* Up to here, no cartesian products are calculated.
* filters for pairs and sets can be applied here */
// Sub cartesian products of the second level (pattern matched) sets
// "the big (x)"
// TODO Optimierungsmöglichkeit: Parallelisierung der Schleife möglich (scheint sich nicht zu lohnen)
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
// Flatten and add to top level sets
Set<Set<UnifyPair>> flat = new HashSet<>();
for(List<Set<UnifyPair>> s : cartResult) {
Set<UnifyPair> flat1 = new HashSet<>();
for(Set<UnifyPair> s1 : s)
flat1.addAll(s1);
flat.add(flat1);
}
topLevelSets.add(flat);
}
// Cartesian product over all (up to 10) top level sets
Set<Set<Set<UnifyPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
.stream().map(x -> new HashSet<>(x))
.collect(Collectors.toCollection(HashSet::new));
//System.out.println(result);
// Flatten the cartesian product
// TODO parallelisierung möglich (scheint sich nicht zu lohnen)
Set<Set<UnifyPair>> eqPrimeSetFlat = new HashSet<>();
for(Set<Set<UnifyPair>> setToFlatten : eqPrimeSet) {
Set<UnifyPair> buffer = new HashSet<>();
setToFlatten.stream().forEach(x -> buffer.addAll(x));
eqPrimeSetFlat.add(buffer);
}
/*
* Step 5: Substitution
*/
Set<Set<UnifyPair>> restartSet = new HashSet<>();
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
if(parallel) {
Set<Set<UnifyPair>> restartSetSync = Collections.synchronizedSet(restartSet);
Set<Set<UnifyPair>> eqPrimePrimeSetSync = Collections.synchronizedSet(eqPrimePrimeSet);
eqPrimeSetFlat.parallelStream().forEach(eqPrime -> {
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime);
if (eqPrime.equals(eq))
eqPrimePrimeSetSync.add(eqPrime);
else if(eqPrimePrime.isPresent())
restartSetSync.add(eqPrimePrime.get());
else
restartSetSync.add(eqPrime);
});
}
else {
for(Set<UnifyPair> eqPrime : eqPrimeSetFlat) {
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime);
if (eqPrime.equals(eq))
eqPrimePrimeSet.add(eqPrime);
else if(eqPrimePrime.isPresent())
restartSet.add(eqPrimePrime.get());
else
restartSet.add(eqPrime);
}
}
/*
* Step 6 a) Restart for pairs where subst was applied
* b) Build the union over everything
*/
// TODO parallelisierung möglich (lohnt sich vermutlich)
if(parallel) {
Set<Set<UnifyPair>> eqPrimePrimeSetSync = Collections.synchronizedSet(eqPrimePrimeSet);
restartSet.parallelStream().forEach( x -> eqPrimePrimeSetSync.addAll(unify(x, fc)));
}
else {
for(Set<UnifyPair> eqss : restartSet)
eqPrimePrimeSet.addAll(this.unify(eqss, fc));
}
/*
* Step 7: Filter empty sets;
*/
return eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x)).collect(Collectors.toCollection(HashSet::new));
}
/**
* Checks whether a set of pairs is in solved form.
* @param eqPrimePrime The set of pair
* @return True if in solved form, false otherwise.
*/
protected boolean isSolvedForm(Set<UnifyPair> eqPrimePrime) {
for(UnifyPair pair : eqPrimePrime) {
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof PlaceholderType))
return false;
// If operator is not equals, both sides must be placeholders
if(pair.getPairOp() != PairOperator.EQUALSDOT && !(rhsType instanceof PlaceholderType))
return false;
}
return true;
}
/**
* Repeatedly applies type unification rules to a set of equations.
* This is step one of the unification algorithm.
* @return The set of pairs that results from repeated application of the inference rules.
*/
protected Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Rule Application Strategy:
*
* 1. Swap all pairs and erase all erasable pairs
* 2. Apply all possible rules to a single pair, then move it to the result set.
* Iterating over pairs first, then iterating over rules prevents the application
* of rules to a "finished" pair over and over.
* 2.1 Apply all rules repeatedly except for erase rules. If
* the application of a rule creates new pairs, check immediately
* against the erase rules.
*/
LinkedHashSet<UnifyPair> targetSet = new LinkedHashSet<UnifyPair>();
LinkedList<UnifyPair> eqQueue = new LinkedList<>();
/*
* Swap all pairs and erase all erasable pairs
*/
eq.forEach(x -> swapAddOrErase(x, fc, eqQueue));
/*
* Apply rules until the queue is empty
*/
while(!eqQueue.isEmpty()) {
UnifyPair pair = eqQueue.pollFirst();
// ReduceUp, ReduceLow, ReduceUpLow
Optional<UnifyPair> opt = rules.reduceUpLow(pair);
opt = opt.isPresent() ? opt : rules.reduceLow(pair);
opt = opt.isPresent() ? opt : rules.reduceUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLow(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLowRight(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUpRight(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLowUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUpLow(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLeft(pair);
// Reduce TPH
opt = opt.isPresent() ? opt : rules.reduceTph(pair);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
Optional<Set<UnifyPair>> optSet = rules.reduce1(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduce2(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceEq(pair);
// ReduceTphExt, ReduceTphSup
optSet = optSet.isPresent() ? optSet : rules.reduceTphExt(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceTphSup(pair);
// One of the rules has been applied
if(optSet.isPresent()) {
optSet.get().forEach(x -> swapAddOrErase(x, fc, eqQueue));
continue;
}
// Adapt, AdaptExt, AdaptSup
opt = rules.adapt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptExt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptSup(pair, fc);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// None of the rules has been applied
targetSet.add(pair);
}
return targetSet;
}
/**
* Applies the rule swap to a pair if possible. Then adds the pair to the set if no erase rule applies.
* If an erase rule applies, the pair is not added (erased).
* @param pair The pair to swap and add or erase.
* @param collection The collection to which the pairs are added.
*/
protected void swapAddOrErase(UnifyPair pair, IFiniteClosure fc, Collection<UnifyPair> collection) {
Optional<UnifyPair> opt = rules.swap(pair);
UnifyPair pair2 = opt.isPresent() ? opt.get() : pair;
if(rules.erase1(pair2, fc) || rules.erase3(pair2) || rules.erase2(pair2, fc))
return;
collection.add(pair2);
}
/**
* Splits the equation eq into a set eq1s where both terms are type variables,
* and a set eq2s where one of both terms is not a type variable.
* @param eq Set of pairs to be splitted.
* @param eq1s Subset of eq where both terms are type variables.
* @param eq2s eq/eq1s.
*/
protected void splitEq(Set<UnifyPair> eq, Set<UnifyPair> eq1s, Set<UnifyPair> eq2s) {
for(UnifyPair pair : eq)
if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
eq1s.add(pair);
else
eq2s.add(pair);
}
/**
* Creates sets of pairs specified in the fourth step. Does not calculate cartesian products.
* @param undefined All pairs that did not match one of the 8 cases are added to this set.
* @return The set of the eight cases (without empty sets). Each case is a set, containing sets generated
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
* (as in case 1 where sigma is added to the innermost set).
*/
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, IFiniteClosure fc, Set<UnifyPair> undefined) {
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(8);
// Init all 8 cases
for(int i = 0; i < 8; i++)
result.add(new HashSet<>());
for(UnifyPair pair : eq2s) {
PairOperator pairOp = pair.getPairOp();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
// Case 1: (a <. Theta')
if(pairOp == PairOperator.SMALLERDOT && lhsType instanceof PlaceholderType)
result.get(0).add(unifyCase1((PlaceholderType) pair.getLhsType(), pair.getRhsType(), fc));
// Case 2: (a <.? ? ext Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
result.get(1).add(unifyCase2((PlaceholderType) pair.getLhsType(), (ExtendsType) pair.getRhsType(), fc));
// Case 3: (a <.? ? sup Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
result.get(2).add(unifyCase3((PlaceholderType) lhsType, (SuperType) rhsType, fc));
// Case 4 was replaced by an inference rule
// Case 4: (a <.? Theta')
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType)
// result.get(3).add(unifyCase4((PlaceholderType) lhsType, rhsType, fc));
// Case 5: (Theta <. a)
else if(pairOp == PairOperator.SMALLERDOT && rhsType instanceof PlaceholderType)
result.get(4).add(unifyCase5(lhsType, (PlaceholderType) rhsType, fc));
// Case 6 was replaced by an inference rule.
// Case 6: (? ext Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType)
// result.get(5).add(unifyCase6((ExtendsType) lhsType, (PlaceholderType) rhsType, fc));
// Case 7 was replaced by an inference rule
// Case 7: (? sup Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType)
// result.get(6).add(unifyCase7((SuperType) lhsType, (PlaceholderType) rhsType, fc));
// Case 8: (Theta <.? a)
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
result.get(7).add(unifyCase8(lhsType, (PlaceholderType) rhsType, fc));
// Case unknown: If a pair fits no other case, then the type unification has failed.
// Through application of the rules, every pair should have one of the above forms.
// Pairs that do not have one of the aboves form are contradictory.
else
undefined.add(pair);
}
// Filter empty sets or sets that only contain an empty set.
return result.stream().map(x -> x.stream().filter(y -> y.size() > 0).collect(Collectors.toCollection(HashSet::new)))
.filter(x -> x.size() > 0).collect(Collectors.toCollection(HashSet::new));
}
/**
* Cartesian product Case 1: (a <. Theta')
*/
protected Set<Set<UnifyPair>> unifyCase1(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());
cs.add(thetaPrime);
for(UnifyType c : cs) {
Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
thetaQs.add(thetaPrime);
Set<UnifyType> thetaQPrimes = new HashSet<>();
TypeParams cParams = c.getTypeParams();
if(cParams.size() == 0)
thetaQPrimes.add(c);
else {
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
for(UnifyType param : cParams)
candidateParams.add(fc.grArg(param));
for(TypeParams tp : permuteParams(candidateParams))
thetaQPrimes.add(c.setTypeParams(tp));
}
for(UnifyType tqp : thetaQPrimes) {
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
if (!opt.isPresent())
continue;
Unifier unifier = opt.get();
unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
Set<UnifyPair> substitutionSet = new HashSet<>();
for (Entry<PlaceholderType, UnifyType> sigma : unifier)
substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
List<UnifyType> freshTphs = new ArrayList<>();
for (UnifyType tq : thetaQs) {
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq));
for(UnifyType theta : smaller) {
Set<UnifyPair> resultPrime = new HashSet<>();
for(int i = 0; i < theta.getTypeParams().size(); i++) {
if(freshTphs.size()-1 < i)
freshTphs.add(PlaceholderType.freshPlaceholder());
resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC));
}
UnifyType freshTheta = theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0])));
resultPrime.add(new UnifyPair(a, freshTheta, PairOperator.EQUALSDOT));
resultPrime.addAll(substitutionSet);
result.add(resultPrime);
}
}
}
}
return result;
}
/**
* Cartesian Product Case 2: (a <.? ? ext Theta')
*/
protected Set<Set<UnifyPair>> unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType extAPrime = new ExtendsType(aPrime);
UnifyType thetaPrime = extThetaPrime.getExtendedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 3: (a <.? ? sup Theta')
*/
protected Set<Set<UnifyPair>> unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType thetaPrime = subThetaPrime.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(thetaPrime, aPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 4: (a <.? Theta')
*/
protected Set<Set<UnifyPair>> unifyCase4(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.EQUALSDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 5: (Theta <. a)
*/
protected Set<Set<UnifyPair>> unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
for(UnifyType thetaS : fc.greater(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
UnifyType[] freshTphs = new UnifyType[thetaS.getTypeParams().size()];
for(int i = 0; i < freshTphs.length; i++) {
freshTphs[i] = PlaceholderType.freshPlaceholder();
resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.SMALLERDOTWC));
}
resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT));
result.add(resultPrime);
}
return result;
}
/**
* Cartesian Product Case 6: (? ext Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase6(ExtendsType extTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType extFreshTph = new ExtendsType(freshTph);
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extFreshTph, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(extTheta.getExtendedType(), freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 7: (? sup Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase7(SuperType supTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType theta = supTheta.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, theta, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 8: (Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
//for(UnifyType thetaS : fc.grArg(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
result.add(resultPrime);
UnifyType freshTph = PlaceholderType.freshPlaceholder();
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new SuperType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(freshTph, theta, PairOperator.SMALLERDOT));
result.add(resultPrime);
//}
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
*/
protected Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
Set<TypeParams> result = new HashSet<>();
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
* @param idx Idx for the current permutatiton.
* @param result Set of all permutations found so far
* @param current The permutation of type params that is currently explored
*/
private void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
if(candidates.size() == idx) {
result.add(new TypeParams(Arrays.copyOf(current, current.length)));
return;
}
Set<UnifyType> localCandidates = candidates.get(idx);
for(UnifyType t : localCandidates) {
current[idx] = t;
permuteParams(candidates, idx+1, result, current);
}
public Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc) {
TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
return unifyTask.join();
}
}

View File

@ -0,0 +1,629 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.RecursiveTask;
import java.util.stream.Collectors;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
/**
* Implementation of the type unification algorithm
* @author Florian Steurer
*/
public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
private static final long serialVersionUID = 1L;
/**
* The implementation of setOps that will be used during the unification
*/
protected ISetOperations setOps = new GuavaSetOperations();
/**
* The implementation of the standard unify that will be used during the unification
*/
protected IUnify stdUnify = new MartelliMontanariUnify();
/**
* The implementation of the rules that will be used during the unification.
*/
protected IRuleSet rules = new RuleSet();
protected Set<UnifyPair> eq;
protected IFiniteClosure fc;
public TypeUnifyTask(Set<UnifyPair> eq, IFiniteClosure fc) {
this.eq = eq;
this.fc = fc;
}
@Override
protected Set<Set<UnifyPair>> compute() {
return unify(eq, fc);
}
/**
* Computes all principal type unifiers for a set of constraints.
* @param eq The set of constraints
* @param fc The finite closure
* @return The set of all principal type unifiers
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
Set<UnifyPair> eq0 = applyTypeUnificationRules(eq, fc);
/*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
*/
Set<UnifyPair> eq1s = new HashSet<>();
Set<UnifyPair> eq2s = new HashSet<>();
splitEq(eq0, eq1s, eq2s);
/*
* Step 4: Create possible typings
*
* "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
* mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
*/
// There are up to 10 toplevel set. 8 of 10 are the result of the
// cartesian product of the sets created by pattern matching.
List<Set<Set<UnifyPair>>> topLevelSets = new ArrayList<>();
if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(eq1s);
topLevelSets.add(wrap); // Add Eq1'
}
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
Set<UnifyPair> bufferSet = eq2s.stream()
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
.collect(Collectors.toSet());
if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(bufferSet);
topLevelSets.add(wrap);
eq2s.removeAll(bufferSet);
}
// Sets that originate from pair pattern matching
// Sets of the "second level"
Set<UnifyPair> undefinedPairs = new HashSet<>();
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs);
// If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible.
if(!undefinedPairs.isEmpty())
return new HashSet<>();
/* Up to here, no cartesian products are calculated.
* filters for pairs and sets can be applied here */
// Sub cartesian products of the second level (pattern matched) sets
// "the big (x)"
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
// Flatten and add to top level sets
Set<Set<UnifyPair>> flat = new HashSet<>();
for(List<Set<UnifyPair>> s : cartResult) {
Set<UnifyPair> flat1 = new HashSet<>();
for(Set<UnifyPair> s1 : s)
flat1.addAll(s1);
flat.add(flat1);
}
topLevelSets.add(flat);
}
// Cartesian product over all (up to 10) top level sets
Set<Set<Set<UnifyPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
.stream().map(x -> new HashSet<>(x))
.collect(Collectors.toCollection(HashSet::new));
//System.out.println(result);
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
Set<TypeUnifyTask> forks = new HashSet<>();
for(Set<Set<UnifyPair>> setToFlatten : eqPrimeSet) {
// Flatten the cartesian product
Set<UnifyPair> eqPrime = new HashSet<>();
setToFlatten.stream().forEach(x -> eqPrime.addAll(x));
/*
* Step 5: Substitution
*/
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime);
/*
* Step 6 a) Restart (fork) for pairs where subst was applied
*/
if (eqPrime.equals(eq))
eqPrimePrimeSet.add(eqPrime);
else if(eqPrimePrime.isPresent()) {
TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc);
forks.add(fork);
fork.fork();
}
else {
TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc);
forks.add(fork);
fork.fork();
}
}
/*
* Step 6 b) Build the union over everything.
*/
// TODO Parallel stream und synced set?
for(TypeUnifyTask fork : forks)
eqPrimePrimeSet.addAll(fork.join());
/*
* Step 7: Filter empty sets;
*/
return eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x)).collect(Collectors.toCollection(HashSet::new));
}
/**
* Checks whether a set of pairs is in solved form.
* @param eqPrimePrime The set of pair
* @return True if in solved form, false otherwise.
*/
protected boolean isSolvedForm(Set<UnifyPair> eqPrimePrime) {
for(UnifyPair pair : eqPrimePrime) {
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof PlaceholderType))
return false;
// If operator is not equals, both sides must be placeholders
if(pair.getPairOp() != PairOperator.EQUALSDOT && !(rhsType instanceof PlaceholderType))
return false;
}
return true;
}
/**
* Repeatedly applies type unification rules to a set of equations.
* This is step one of the unification algorithm.
* @return The set of pairs that results from repeated application of the inference rules.
*/
protected Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Rule Application Strategy:
*
* 1. Swap all pairs and erase all erasable pairs
* 2. Apply all possible rules to a single pair, then move it to the result set.
* Iterating over pairs first, then iterating over rules prevents the application
* of rules to a "finished" pair over and over.
* 2.1 Apply all rules repeatedly except for erase rules. If
* the application of a rule creates new pairs, check immediately
* against the erase rules.
*/
LinkedHashSet<UnifyPair> targetSet = new LinkedHashSet<UnifyPair>();
LinkedList<UnifyPair> eqQueue = new LinkedList<>();
/*
* Swap all pairs and erase all erasable pairs
*/
eq.forEach(x -> swapAddOrErase(x, fc, eqQueue));
/*
* Apply rules until the queue is empty
*/
while(!eqQueue.isEmpty()) {
UnifyPair pair = eqQueue.pollFirst();
// ReduceUp, ReduceLow, ReduceUpLow
Optional<UnifyPair> opt = rules.reduceUpLow(pair);
opt = opt.isPresent() ? opt : rules.reduceLow(pair);
opt = opt.isPresent() ? opt : rules.reduceUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLow(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLowRight(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUpRight(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLowUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUpLow(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLeft(pair);
// Reduce TPH
opt = opt.isPresent() ? opt : rules.reduceTph(pair);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
Optional<Set<UnifyPair>> optSet = rules.reduce1(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduce2(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceEq(pair);
// ReduceTphExt, ReduceTphSup
optSet = optSet.isPresent() ? optSet : rules.reduceTphExt(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceTphSup(pair);
// One of the rules has been applied
if(optSet.isPresent()) {
optSet.get().forEach(x -> swapAddOrErase(x, fc, eqQueue));
continue;
}
// Adapt, AdaptExt, AdaptSup
opt = rules.adapt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptExt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptSup(pair, fc);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// None of the rules has been applied
targetSet.add(pair);
}
return targetSet;
}
/**
* Applies the rule swap to a pair if possible. Then adds the pair to the set if no erase rule applies.
* If an erase rule applies, the pair is not added (erased).
* @param pair The pair to swap and add or erase.
* @param collection The collection to which the pairs are added.
*/
protected void swapAddOrErase(UnifyPair pair, IFiniteClosure fc, Collection<UnifyPair> collection) {
Optional<UnifyPair> opt = rules.swap(pair);
UnifyPair pair2 = opt.isPresent() ? opt.get() : pair;
if(rules.erase1(pair2, fc) || rules.erase3(pair2) || rules.erase2(pair2, fc))
return;
collection.add(pair2);
}
/**
* Splits the equation eq into a set eq1s where both terms are type variables,
* and a set eq2s where one of both terms is not a type variable.
* @param eq Set of pairs to be splitted.
* @param eq1s Subset of eq where both terms are type variables.
* @param eq2s eq/eq1s.
*/
protected void splitEq(Set<UnifyPair> eq, Set<UnifyPair> eq1s, Set<UnifyPair> eq2s) {
for(UnifyPair pair : eq)
if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
eq1s.add(pair);
else
eq2s.add(pair);
}
/**
* Creates sets of pairs specified in the fourth step. Does not calculate cartesian products.
* @param undefined All pairs that did not match one of the 8 cases are added to this set.
* @return The set of the eight cases (without empty sets). Each case is a set, containing sets generated
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
* (as in case 1 where sigma is added to the innermost set).
*/
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, IFiniteClosure fc, Set<UnifyPair> undefined) {
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(8);
// Init all 8 cases
for(int i = 0; i < 8; i++)
result.add(new HashSet<>());
for(UnifyPair pair : eq2s) {
PairOperator pairOp = pair.getPairOp();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
// Case 1: (a <. Theta')
if(pairOp == PairOperator.SMALLERDOT && lhsType instanceof PlaceholderType)
result.get(0).add(unifyCase1((PlaceholderType) pair.getLhsType(), pair.getRhsType(), fc));
// Case 2: (a <.? ? ext Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
result.get(1).add(unifyCase2((PlaceholderType) pair.getLhsType(), (ExtendsType) pair.getRhsType(), fc));
// Case 3: (a <.? ? sup Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
result.get(2).add(unifyCase3((PlaceholderType) lhsType, (SuperType) rhsType, fc));
// Case 4 was replaced by an inference rule
// Case 4: (a <.? Theta')
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType)
// result.get(3).add(unifyCase4((PlaceholderType) lhsType, rhsType, fc));
// Case 5: (Theta <. a)
else if(pairOp == PairOperator.SMALLERDOT && rhsType instanceof PlaceholderType)
result.get(4).add(unifyCase5(lhsType, (PlaceholderType) rhsType, fc));
// Case 6 was replaced by an inference rule.
// Case 6: (? ext Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType)
// result.get(5).add(unifyCase6((ExtendsType) lhsType, (PlaceholderType) rhsType, fc));
// Case 7 was replaced by an inference rule
// Case 7: (? sup Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType)
// result.get(6).add(unifyCase7((SuperType) lhsType, (PlaceholderType) rhsType, fc));
// Case 8: (Theta <.? a)
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
result.get(7).add(unifyCase8(lhsType, (PlaceholderType) rhsType, fc));
// Case unknown: If a pair fits no other case, then the type unification has failed.
// Through application of the rules, every pair should have one of the above forms.
// Pairs that do not have one of the aboves form are contradictory.
else
undefined.add(pair);
}
// Filter empty sets or sets that only contain an empty set.
return result.stream().map(x -> x.stream().filter(y -> y.size() > 0).collect(Collectors.toCollection(HashSet::new)))
.filter(x -> x.size() > 0).collect(Collectors.toCollection(HashSet::new));
}
/**
* Cartesian product Case 1: (a <. Theta')
*/
protected Set<Set<UnifyPair>> unifyCase1(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());
cs.add(thetaPrime);
for(UnifyType c : cs) {
Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
thetaQs.add(thetaPrime);
Set<UnifyType> thetaQPrimes = new HashSet<>();
TypeParams cParams = c.getTypeParams();
if(cParams.size() == 0)
thetaQPrimes.add(c);
else {
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
for(UnifyType param : cParams)
candidateParams.add(fc.grArg(param));
for(TypeParams tp : permuteParams(candidateParams))
thetaQPrimes.add(c.setTypeParams(tp));
}
for(UnifyType tqp : thetaQPrimes) {
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
if (!opt.isPresent())
continue;
Unifier unifier = opt.get();
unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
Set<UnifyPair> substitutionSet = new HashSet<>();
for (Entry<PlaceholderType, UnifyType> sigma : unifier)
substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
List<UnifyType> freshTphs = new ArrayList<>();
for (UnifyType tq : thetaQs) {
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq));
for(UnifyType theta : smaller) {
Set<UnifyPair> resultPrime = new HashSet<>();
for(int i = 0; i < theta.getTypeParams().size(); i++) {
if(freshTphs.size()-1 < i)
freshTphs.add(PlaceholderType.freshPlaceholder());
resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC));
}
UnifyType freshTheta = theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0])));
resultPrime.add(new UnifyPair(a, freshTheta, PairOperator.EQUALSDOT));
resultPrime.addAll(substitutionSet);
result.add(resultPrime);
}
}
}
}
return result;
}
/**
* Cartesian Product Case 2: (a <.? ? ext Theta')
*/
protected Set<Set<UnifyPair>> unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType extAPrime = new ExtendsType(aPrime);
UnifyType thetaPrime = extThetaPrime.getExtendedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 3: (a <.? ? sup Theta')
*/
protected Set<Set<UnifyPair>> unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType thetaPrime = subThetaPrime.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(thetaPrime, aPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 4: (a <.? Theta')
*/
protected Set<Set<UnifyPair>> unifyCase4(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.EQUALSDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 5: (Theta <. a)
*/
protected Set<Set<UnifyPair>> unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
for(UnifyType thetaS : fc.greater(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
UnifyType[] freshTphs = new UnifyType[thetaS.getTypeParams().size()];
for(int i = 0; i < freshTphs.length; i++) {
freshTphs[i] = PlaceholderType.freshPlaceholder();
resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.SMALLERDOTWC));
}
resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT));
result.add(resultPrime);
}
return result;
}
/**
* Cartesian Product Case 6: (? ext Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase6(ExtendsType extTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType extFreshTph = new ExtendsType(freshTph);
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extFreshTph, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(extTheta.getExtendedType(), freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 7: (? sup Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase7(SuperType supTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType theta = supTheta.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, theta, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 8: (Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
//for(UnifyType thetaS : fc.grArg(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
result.add(resultPrime);
UnifyType freshTph = PlaceholderType.freshPlaceholder();
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new SuperType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(freshTph, theta, PairOperator.SMALLERDOT));
result.add(resultPrime);
//}
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
*/
protected Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
Set<TypeParams> result = new HashSet<>();
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
* @param idx Idx for the current permutatiton.
* @param result Set of all permutations found so far
* @param current The permutation of type params that is currently explored
*/
private void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
if(candidates.size() == idx) {
result.add(new TypeParams(Arrays.copyOf(current, current.length)));
return;
}
Set<UnifyType> localCandidates = candidates.get(idx);
for(UnifyType t : localCandidates) {
current[idx] = t;
permuteParams(candidates, idx+1, result, current);
}
}
}

View File

@ -14,7 +14,7 @@ public final class ExtendsType extends WildcardType {
* @param extendedType The extended type e.g. Integer in "? extends Integer"
*/
public ExtendsType(UnifyType extendedType) {
super("? extends " + extendedType.getName(), extendedType);
super("? extends " + extendedType.getName(), extendedType);
}
/**
@ -29,6 +29,9 @@ public final class ExtendsType extends WildcardType {
*/
@Override
public UnifyType setTypeParams(TypeParams newTp) {
UnifyType newType = wildcardedType.setTypeParams(newTp);
if(newType == wildcardedType)
return this; // Reduced the amount of objects created
return new ExtendsType(wildcardedType.setTypeParams(newTp));
}
@ -44,7 +47,10 @@ public final class ExtendsType extends WildcardType {
@Override
UnifyType apply(Unifier unif) {
return new ExtendsType(wildcardedType.apply(unif));
UnifyType newType = wildcardedType.apply(unif);
if(newType.hashCode() == wildcardedType.hashCode() && newType.equals(wildcardedType))
return this; // Reduced the amount of objects created
return new ExtendsType(newType);
}
@Override
@ -61,7 +67,12 @@ public final class ExtendsType extends WildcardType {
if(!(obj instanceof ExtendsType))
return false;
if(obj.hashCode() != this.hashCode())
return false;
ExtendsType other = (ExtendsType) obj;
return other.getWildcardedType().equals(wildcardedType);
}

View File

@ -33,7 +33,6 @@ public class FiniteClosure implements IFiniteClosure {
*/
private Set<UnifyPair> pairs;
//TODO Prüfen: Typen ohne Kante im Graph als extra Menge im Konstruktor mitgeben?
/**
* Creates a new instance using the inheritance tree defined in the pairs.
*/

View File

@ -41,6 +41,8 @@ public class FunNType extends UnifyType {
@Override
public UnifyType setTypeParams(TypeParams newTp) {
if(newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
return this;
return getFunNType(newTp);
}
@ -58,7 +60,11 @@ public class FunNType extends UnifyType {
UnifyType apply(Unifier unif) {
// TODO this bypasses the validation of the type parameters.
// Wildcard types can be unified into FunNTypes.
return new FunNType(typeParams.apply(unif));
TypeParams newParams = typeParams.apply(unif);
if(newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
return this;
return new FunNType(newParams);
}
@Override
@ -71,8 +77,11 @@ public class FunNType extends UnifyType {
if(!(obj instanceof FunNType))
return false;
FunNType other = (FunNType) obj;
if(obj.hashCode() != this.hashCode())
return false;
FunNType other = (FunNType) obj;
return other.getTypeParams().equals(typeParams);
}

View File

@ -11,12 +11,19 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
*/
public final class ReferenceType extends UnifyType {
public ReferenceType(String name, UnifyType... typeParams) {
super(name, new TypeParams(typeParams));
/**
* The buffered hashCode
*/
private final int hashCode;
public ReferenceType(String name, UnifyType... params) {
super(name, new TypeParams(params));
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
}
public ReferenceType(String name, TypeParams params) {
super(name, params);
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
}
@Override
@ -31,17 +38,24 @@ public final class ReferenceType extends UnifyType {
@Override
UnifyType apply(Unifier unif) {
return new ReferenceType(typeName, typeParams.apply(unif));
TypeParams newParams = typeParams.apply(unif);
if(newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
return this;
return new ReferenceType(typeName, newParams);
}
@Override
public UnifyType setTypeParams(TypeParams newTp) {
return new ReferenceType(new String(typeName), newTp);
if(newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
return this; // reduced the amount of objects created
return new ReferenceType(typeName, newTp);
}
@Override
public int hashCode() {
return 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
return hashCode;
}
@Override
@ -49,6 +63,9 @@ public final class ReferenceType extends UnifyType {
if(!(obj instanceof ReferenceType))
return false;
if(obj.hashCode() != this.hashCode())
return false;
ReferenceType other = (ReferenceType) obj;
if(!other.getName().equals(typeName))

View File

@ -35,6 +35,9 @@ public final class SuperType extends WildcardType {
*/
@Override
public UnifyType setTypeParams(TypeParams newTp) {
UnifyType newType = wildcardedType.setTypeParams(newTp);
if(newType == wildcardedType)
return this; // Reduced the amount of objects created
return new SuperType(wildcardedType.setTypeParams(newTp));
}
@ -50,7 +53,10 @@ public final class SuperType extends WildcardType {
@Override
UnifyType apply(Unifier unif) {
return new SuperType(wildcardedType.apply(unif));
UnifyType newType = wildcardedType.apply(unif);
if(newType.hashCode() == wildcardedType.hashCode() && newType.equals(wildcardedType))
return this; // Reduced the amount of objects created
return new SuperType(newType);
}
@Override
@ -67,6 +73,9 @@ public final class SuperType extends WildcardType {
if(!(obj instanceof SuperType))
return false;
if(obj.hashCode() != this.hashCode())
return false;
SuperType other = (SuperType) obj;
return other.getSuperedType().equals(wildcardedType);
}

View File

@ -30,7 +30,7 @@ public final class TypeParams implements Iterable<UnifyType>{
typeParams[i] = types.get(i);
// Hashcode calculation is expensive and must be cached.
hashCode = Arrays.hashCode(typeParams);
hashCode = Arrays.deepHashCode(typeParams);
}
/**
@ -39,9 +39,9 @@ public final class TypeParams implements Iterable<UnifyType>{
*/
public TypeParams(UnifyType... types) {
typeParams = types;
// Hashcode calculation is expensive and must be cached.
hashCode = Arrays.hashCode(typeParams);
hashCode = Arrays.deepHashCode(typeParams);
}
/**
@ -73,8 +73,22 @@ public final class TypeParams implements Iterable<UnifyType>{
*/
public TypeParams apply(Unifier unif) {
UnifyType[] newParams = new UnifyType[typeParams.length];
for(int i = 0; i < typeParams.length; i++)
newParams[i] = typeParams[i].apply(unif);
// If true, a type was modified and a new typeparams object has to be created.
// Otherwise it is enough to return this object, since it is immutable
// This reduced the needed TypeParams-Instances for the lambda14-Test from
// 130.000 to 30.000 without a decrease in speed.
boolean isNew = false;
for(int i = 0; i < typeParams.length; i++) {
UnifyType newType = typeParams[i].apply(unif);
newParams[i] = newType;
if(!isNew && (newType.hashCode() != typeParams[i].hashCode() || !newType.equals(typeParams[i])))
isNew = true;
}
if(!isNew)
return this;
return new TypeParams(newParams);
}
@ -107,8 +121,13 @@ public final class TypeParams implements Iterable<UnifyType>{
* @throws ArrayOutOfBoundsException if i > this.size()-1.
*/
public TypeParams set(UnifyType t, int idx) {
// Reduce the creation of new objects for less memory
// Reduced the needed instances of TypeParams in the lambda14-Test from
// 150.000 to 130.000
if(t.hashCode() == typeParams[idx].hashCode() && t.equals(typeParams[idx]))
return this;
UnifyType[] newparams = Arrays.copyOf(typeParams, typeParams.length);
newparams[idx] = t;
newparams[idx] = t;
return new TypeParams(newparams);
}
@ -127,6 +146,9 @@ public final class TypeParams implements Iterable<UnifyType>{
if(!(obj instanceof TypeParams))
return false;
if(obj.hashCode() != this.hashCode())
return false;
TypeParams other = (TypeParams) obj;
if(other.size() != this.size())

View File

@ -63,6 +63,9 @@ public class UnifyPair {
public boolean equals(Object obj) {
if(!(obj instanceof UnifyPair))
return false;
if(obj.hashCode() != this.hashCode())
return false;
UnifyPair other = (UnifyPair) obj;

View File

@ -47,6 +47,9 @@ public abstract class WildcardType extends UnifyType {
if(!(obj instanceof WildcardType))
return false;
if(obj.hashCode() != this.hashCode())
return false;
WildcardType other = (WildcardType) obj;
return other.getWildcardedType().equals(wildcardedType);
}

View File

@ -17,7 +17,7 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import junit.framework.Assert;
public class UnifyTest extends TypeUnify {
public class UnifyTest extends TypeUnify{
/**
* Testing the unification for cases with (n)one pair and without generics.
@ -792,52 +792,52 @@ public class UnifyTest extends TypeUnify {
}
@Test
public void permuteParamsTest() {
TypeFactory tf = new TypeFactory();
ArrayList<Set<UnifyType>> candidates = new ArrayList<>();
UnifyType p11 = tf.getPlaceholderType("p11");
UnifyType p12 = tf.getExtendsType(tf.getSimpleType("p12"));
UnifyType p13 = tf.getSimpleType("p13");
UnifyType p21 = tf.getPlaceholderType("p21");
UnifyType p22 = tf.getPlaceholderType("p22");
UnifyType p31 = tf.getSimpleType("p31", "T");
Set<UnifyType> p1 = new HashSet<>();
p1.add(p11);
p1.add(p12);
p1.add(p13);
Set<UnifyType> p2 = new HashSet<>();
p2.add(p21);
p2.add(p22);
Set<UnifyType> p3 = new HashSet<>();
p3.add(p31);
candidates.add(p1);
candidates.add(p2);
candidates.add(p3);
/*
* Expected Result:
* {<x, y, z> | x in { p11, p12, p13}, y in { p21, p22 }, z in { p31 }}
*/
Set<TypeParams> expected = Arrays.stream(new TypeParams[] {
new TypeParams(p11, p21, p31),
new TypeParams(p11, p22, p31),
new TypeParams(p12, p21, p31),
new TypeParams(p12, p22, p31),
new TypeParams(p13, p21, p31),
new TypeParams(p13, p22, p31)
}).collect(Collectors.toSet());
Set<TypeParams> actual = permuteParams(candidates);
Assert.assertEquals(expected, actual);
}
// @Test
// public void permuteParamsTest() {
// TypeFactory tf = new TypeFactory();
// ArrayList<Set<UnifyType>> candidates = new ArrayList<>();
//
// UnifyType p11 = tf.getPlaceholderType("p11");
// UnifyType p12 = tf.getExtendsType(tf.getSimpleType("p12"));
// UnifyType p13 = tf.getSimpleType("p13");
// UnifyType p21 = tf.getPlaceholderType("p21");
// UnifyType p22 = tf.getPlaceholderType("p22");
// UnifyType p31 = tf.getSimpleType("p31", "T");
//
// Set<UnifyType> p1 = new HashSet<>();
// p1.add(p11);
// p1.add(p12);
// p1.add(p13);
//
// Set<UnifyType> p2 = new HashSet<>();
// p2.add(p21);
// p2.add(p22);
//
// Set<UnifyType> p3 = new HashSet<>();
// p3.add(p31);
//
// candidates.add(p1);
// candidates.add(p2);
// candidates.add(p3);
//
//
// /*
// * Expected Result:
// * {<x, y, z> | x in { p11, p12, p13}, y in { p21, p22 }, z in { p31 }}
// */
// Set<TypeParams> expected = Arrays.stream(new TypeParams[] {
// new TypeParams(p11, p21, p31),
// new TypeParams(p11, p22, p31),
// new TypeParams(p12, p21, p31),
// new TypeParams(p12, p22, p31),
// new TypeParams(p13, p21, p31),
// new TypeParams(p13, p22, p31)
// }).collect(Collectors.toSet());
//
// Set<TypeParams> actual = permuteParams(candidates);
//
// Assert.assertEquals(expected, actual);
// }
private Set<Set<UnifyPair>> filterGeneratedTPHsMultiple(Set<Set<UnifyPair>> set) {
return set.stream().map(x -> filterGeneratedTPHs(x)).collect(Collectors.toSet());