Parallelisierung mit Fork-Join Pool

This commit is contained in:
Florian Steurer 2016-04-22 16:51:05 +02:00
parent 8fa8fc8758
commit 7d75f18319
3 changed files with 684 additions and 642 deletions

View File

@ -1,605 +1,16 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
/**
* Implementation of the type unification algorithm
* @author Florian Steurer
*/
public class TypeUnify {
/**
* The implementation of setOps that will be used during the unification
*/
protected ISetOperations setOps = new GuavaSetOperations();
/**
* The implementation of the standard unify that will be used during the unification
*/
protected IUnify stdUnify = new MartelliMontanariUnify();
/**
* The implementation of the rules that will be used during the unification.
*/
protected IRuleSet rules = new RuleSet();
/**
* Computes all principal type unifiers for a set of constraints.
* @param eq The set of constraints
* @param fc The finite closure
* @return The set of all principal type unifiers
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
Set<UnifyPair> eq0 = applyTypeUnificationRules(eq, fc);
/*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
*/
Set<UnifyPair> eq1s = new HashSet<>();
Set<UnifyPair> eq2s = new HashSet<>();
splitEq(eq0, eq1s, eq2s);
/*
* Step 4: Create possible typings
*
* "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
* mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
*/
// There are up to 10 toplevel set. 8 of 10 are the result of the
// cartesian product of the sets created by pattern matching.
List<Set<Set<UnifyPair>>> topLevelSets = new ArrayList<>();
if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(eq1s);
topLevelSets.add(wrap); // Add Eq1'
}
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
Set<UnifyPair> bufferSet = eq2s.stream()
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
.collect(Collectors.toSet());
if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(bufferSet);
topLevelSets.add(wrap);
eq2s.removeAll(bufferSet);
}
// Sets that originate from pair pattern matching
// Sets of the "second level"
Set<UnifyPair> undefinedPairs = new HashSet<>();
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs);
// If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible.
if(!undefinedPairs.isEmpty())
return new HashSet<>();
/* Up to here, no cartesian products are calculated.
* filters for pairs and sets can be applied here */
// Sub cartesian products of the second level (pattern matched) sets
// "the big (x)"
// TODO Optimierungsmöglichkeit: Parallelisierung der Schleife möglich (scheint sich nicht zu lohnen)
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
// Flatten and add to top level sets
Set<Set<UnifyPair>> flat = new HashSet<>();
for(List<Set<UnifyPair>> s : cartResult) {
Set<UnifyPair> flat1 = new HashSet<>();
for(Set<UnifyPair> s1 : s)
flat1.addAll(s1);
flat.add(flat1);
}
topLevelSets.add(flat);
}
// Cartesian product over all (up to 10) top level sets
Set<Set<Set<UnifyPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
.stream().map(x -> new HashSet<>(x))
.collect(Collectors.toCollection(HashSet::new));
//System.out.println(result);
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
// TODO parallelisierung möglich (scheint sich nicht zu lohnen)
/*
* Die Parallelisierung über parallelStream() ist langsamer als die serielle Ausführung,
* vermutlich wird zuviel thread-overhead erzeugt.
* Vermutlich ist die beste Lösung hier ein Fork-Join-Framework.
*/
for(Set<Set<UnifyPair>> setToFlatten : eqPrimeSet) {
// Flatten the cartesian product
Set<UnifyPair> eqPrime = new HashSet<>();
setToFlatten.stream().forEach(x -> eqPrime.addAll(x));
/*
* Step 5: Substitution
*/
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime);
/*
* Step 6 a) Restart for pairs where subst was applied
* b) Build the union over everything
*/
if (eqPrime.equals(eq))
eqPrimePrimeSet.add(eqPrime);
else if(eqPrimePrime.isPresent())
eqPrimePrimeSet.addAll(this.unify(eqPrimePrime.get(), fc));
else
eqPrimePrimeSet.addAll(this.unify(eqPrime, fc));
}
/*
* Step 7: Filter empty sets;
*/
return eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x)).collect(Collectors.toCollection(HashSet::new));
}
/**
* Checks whether a set of pairs is in solved form.
* @param eqPrimePrime The set of pair
* @return True if in solved form, false otherwise.
*/
protected boolean isSolvedForm(Set<UnifyPair> eqPrimePrime) {
for(UnifyPair pair : eqPrimePrime) {
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof PlaceholderType))
return false;
// If operator is not equals, both sides must be placeholders
if(pair.getPairOp() != PairOperator.EQUALSDOT && !(rhsType instanceof PlaceholderType))
return false;
}
return true;
}
/**
* Repeatedly applies type unification rules to a set of equations.
* This is step one of the unification algorithm.
* @return The set of pairs that results from repeated application of the inference rules.
*/
protected Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Rule Application Strategy:
*
* 1. Swap all pairs and erase all erasable pairs
* 2. Apply all possible rules to a single pair, then move it to the result set.
* Iterating over pairs first, then iterating over rules prevents the application
* of rules to a "finished" pair over and over.
* 2.1 Apply all rules repeatedly except for erase rules. If
* the application of a rule creates new pairs, check immediately
* against the erase rules.
*/
LinkedHashSet<UnifyPair> targetSet = new LinkedHashSet<UnifyPair>();
LinkedList<UnifyPair> eqQueue = new LinkedList<>();
/*
* Swap all pairs and erase all erasable pairs
*/
eq.forEach(x -> swapAddOrErase(x, fc, eqQueue));
/*
* Apply rules until the queue is empty
*/
while(!eqQueue.isEmpty()) {
UnifyPair pair = eqQueue.pollFirst();
// ReduceUp, ReduceLow, ReduceUpLow
Optional<UnifyPair> opt = rules.reduceUpLow(pair);
opt = opt.isPresent() ? opt : rules.reduceLow(pair);
opt = opt.isPresent() ? opt : rules.reduceUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLow(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLowRight(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUpRight(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLowUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUpLow(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLeft(pair);
// Reduce TPH
opt = opt.isPresent() ? opt : rules.reduceTph(pair);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
Optional<Set<UnifyPair>> optSet = rules.reduce1(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduce2(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceEq(pair);
// ReduceTphExt, ReduceTphSup
optSet = optSet.isPresent() ? optSet : rules.reduceTphExt(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceTphSup(pair);
// One of the rules has been applied
if(optSet.isPresent()) {
optSet.get().forEach(x -> swapAddOrErase(x, fc, eqQueue));
continue;
}
// Adapt, AdaptExt, AdaptSup
opt = rules.adapt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptExt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptSup(pair, fc);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// None of the rules has been applied
targetSet.add(pair);
}
return targetSet;
}
/**
* Applies the rule swap to a pair if possible. Then adds the pair to the set if no erase rule applies.
* If an erase rule applies, the pair is not added (erased).
* @param pair The pair to swap and add or erase.
* @param collection The collection to which the pairs are added.
*/
protected void swapAddOrErase(UnifyPair pair, IFiniteClosure fc, Collection<UnifyPair> collection) {
Optional<UnifyPair> opt = rules.swap(pair);
UnifyPair pair2 = opt.isPresent() ? opt.get() : pair;
if(rules.erase1(pair2, fc) || rules.erase3(pair2) || rules.erase2(pair2, fc))
return;
collection.add(pair2);
}
/**
* Splits the equation eq into a set eq1s where both terms are type variables,
* and a set eq2s where one of both terms is not a type variable.
* @param eq Set of pairs to be splitted.
* @param eq1s Subset of eq where both terms are type variables.
* @param eq2s eq/eq1s.
*/
protected void splitEq(Set<UnifyPair> eq, Set<UnifyPair> eq1s, Set<UnifyPair> eq2s) {
for(UnifyPair pair : eq)
if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
eq1s.add(pair);
else
eq2s.add(pair);
}
/**
* Creates sets of pairs specified in the fourth step. Does not calculate cartesian products.
* @param undefined All pairs that did not match one of the 8 cases are added to this set.
* @return The set of the eight cases (without empty sets). Each case is a set, containing sets generated
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
* (as in case 1 where sigma is added to the innermost set).
*/
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, IFiniteClosure fc, Set<UnifyPair> undefined) {
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(8);
// Init all 8 cases
for(int i = 0; i < 8; i++)
result.add(new HashSet<>());
for(UnifyPair pair : eq2s) {
PairOperator pairOp = pair.getPairOp();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
// Case 1: (a <. Theta')
if(pairOp == PairOperator.SMALLERDOT && lhsType instanceof PlaceholderType)
result.get(0).add(unifyCase1((PlaceholderType) pair.getLhsType(), pair.getRhsType(), fc));
// Case 2: (a <.? ? ext Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
result.get(1).add(unifyCase2((PlaceholderType) pair.getLhsType(), (ExtendsType) pair.getRhsType(), fc));
// Case 3: (a <.? ? sup Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
result.get(2).add(unifyCase3((PlaceholderType) lhsType, (SuperType) rhsType, fc));
// Case 4 was replaced by an inference rule
// Case 4: (a <.? Theta')
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType)
// result.get(3).add(unifyCase4((PlaceholderType) lhsType, rhsType, fc));
// Case 5: (Theta <. a)
else if(pairOp == PairOperator.SMALLERDOT && rhsType instanceof PlaceholderType)
result.get(4).add(unifyCase5(lhsType, (PlaceholderType) rhsType, fc));
// Case 6 was replaced by an inference rule.
// Case 6: (? ext Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType)
// result.get(5).add(unifyCase6((ExtendsType) lhsType, (PlaceholderType) rhsType, fc));
// Case 7 was replaced by an inference rule
// Case 7: (? sup Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType)
// result.get(6).add(unifyCase7((SuperType) lhsType, (PlaceholderType) rhsType, fc));
// Case 8: (Theta <.? a)
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
result.get(7).add(unifyCase8(lhsType, (PlaceholderType) rhsType, fc));
// Case unknown: If a pair fits no other case, then the type unification has failed.
// Through application of the rules, every pair should have one of the above forms.
// Pairs that do not have one of the aboves form are contradictory.
else
undefined.add(pair);
}
// Filter empty sets or sets that only contain an empty set.
return result.stream().map(x -> x.stream().filter(y -> y.size() > 0).collect(Collectors.toCollection(HashSet::new)))
.filter(x -> x.size() > 0).collect(Collectors.toCollection(HashSet::new));
}
/**
* Cartesian product Case 1: (a <. Theta')
*/
protected Set<Set<UnifyPair>> unifyCase1(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());
cs.add(thetaPrime);
for(UnifyType c : cs) {
Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
thetaQs.add(thetaPrime);
Set<UnifyType> thetaQPrimes = new HashSet<>();
TypeParams cParams = c.getTypeParams();
if(cParams.size() == 0)
thetaQPrimes.add(c);
else {
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
for(UnifyType param : cParams)
candidateParams.add(fc.grArg(param));
for(TypeParams tp : permuteParams(candidateParams))
thetaQPrimes.add(c.setTypeParams(tp));
}
for(UnifyType tqp : thetaQPrimes) {
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
if (!opt.isPresent())
continue;
Unifier unifier = opt.get();
unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
Set<UnifyPair> substitutionSet = new HashSet<>();
for (Entry<PlaceholderType, UnifyType> sigma : unifier)
substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
List<UnifyType> freshTphs = new ArrayList<>();
for (UnifyType tq : thetaQs) {
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq));
for(UnifyType theta : smaller) {
Set<UnifyPair> resultPrime = new HashSet<>();
for(int i = 0; i < theta.getTypeParams().size(); i++) {
if(freshTphs.size()-1 < i)
freshTphs.add(PlaceholderType.freshPlaceholder());
resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC));
}
UnifyType freshTheta = theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0])));
resultPrime.add(new UnifyPair(a, freshTheta, PairOperator.EQUALSDOT));
resultPrime.addAll(substitutionSet);
result.add(resultPrime);
}
}
}
}
return result;
}
/**
* Cartesian Product Case 2: (a <.? ? ext Theta')
*/
protected Set<Set<UnifyPair>> unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType extAPrime = new ExtendsType(aPrime);
UnifyType thetaPrime = extThetaPrime.getExtendedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 3: (a <.? ? sup Theta')
*/
protected Set<Set<UnifyPair>> unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType thetaPrime = subThetaPrime.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(thetaPrime, aPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 4: (a <.? Theta')
*/
protected Set<Set<UnifyPair>> unifyCase4(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.EQUALSDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 5: (Theta <. a)
*/
protected Set<Set<UnifyPair>> unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
for(UnifyType thetaS : fc.greater(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
UnifyType[] freshTphs = new UnifyType[thetaS.getTypeParams().size()];
for(int i = 0; i < freshTphs.length; i++) {
freshTphs[i] = PlaceholderType.freshPlaceholder();
resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.SMALLERDOTWC));
}
resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT));
result.add(resultPrime);
}
return result;
}
/**
* Cartesian Product Case 6: (? ext Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase6(ExtendsType extTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType extFreshTph = new ExtendsType(freshTph);
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extFreshTph, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(extTheta.getExtendedType(), freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 7: (? sup Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase7(SuperType supTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType theta = supTheta.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, theta, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 8: (Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
//for(UnifyType thetaS : fc.grArg(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
result.add(resultPrime);
UnifyType freshTph = PlaceholderType.freshPlaceholder();
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new SuperType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(freshTph, theta, PairOperator.SMALLERDOT));
result.add(resultPrime);
//}
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
*/
protected Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
Set<TypeParams> result = new HashSet<>();
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
* @param idx Idx for the current permutatiton.
* @param result Set of all permutations found so far
* @param current The permutation of type params that is currently explored
*/
private void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
if(candidates.size() == idx) {
result.add(new TypeParams(Arrays.copyOf(current, current.length)));
return;
}
Set<UnifyType> localCandidates = candidates.get(idx);
for(UnifyType t : localCandidates) {
current[idx] = t;
permuteParams(candidates, idx+1, result, current);
}
TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
return unifyTask.join();
}
}

View File

@ -0,0 +1,631 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.RecursiveTask;
import java.util.stream.Collectors;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
/**
* Implementation of the type unification algorithm
* @author Florian Steurer
*/
public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/**
* The implementation of setOps that will be used during the unification
*/
protected ISetOperations setOps = new GuavaSetOperations();
/**
* The implementation of the standard unify that will be used during the unification
*/
protected IUnify stdUnify = new MartelliMontanariUnify();
/**
* The implementation of the rules that will be used during the unification.
*/
protected IRuleSet rules = new RuleSet();
protected Set<UnifyPair> eq;
protected IFiniteClosure fc;
public TypeUnifyTask(Set<UnifyPair> eq, IFiniteClosure fc) {
this.eq = eq;
this.fc = fc;
}
@Override
protected Set<Set<UnifyPair>> compute() {
return unify(eq, fc);
}
/**
* Computes all principal type unifiers for a set of constraints.
* @param eq The set of constraints
* @param fc The finite closure
* @return The set of all principal type unifiers
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
Set<UnifyPair> eq0 = applyTypeUnificationRules(eq, fc);
/*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
*/
Set<UnifyPair> eq1s = new HashSet<>();
Set<UnifyPair> eq2s = new HashSet<>();
splitEq(eq0, eq1s, eq2s);
/*
* Step 4: Create possible typings
*
* "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
* mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
*/
// There are up to 10 toplevel set. 8 of 10 are the result of the
// cartesian product of the sets created by pattern matching.
List<Set<Set<UnifyPair>>> topLevelSets = new ArrayList<>();
if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(eq1s);
topLevelSets.add(wrap); // Add Eq1'
}
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
Set<UnifyPair> bufferSet = eq2s.stream()
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
.collect(Collectors.toSet());
if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(bufferSet);
topLevelSets.add(wrap);
eq2s.removeAll(bufferSet);
}
// Sets that originate from pair pattern matching
// Sets of the "second level"
Set<UnifyPair> undefinedPairs = new HashSet<>();
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs);
// If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible.
if(!undefinedPairs.isEmpty())
return new HashSet<>();
/* Up to here, no cartesian products are calculated.
* filters for pairs and sets can be applied here */
// Sub cartesian products of the second level (pattern matched) sets
// "the big (x)"
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
// Flatten and add to top level sets
Set<Set<UnifyPair>> flat = new HashSet<>();
for(List<Set<UnifyPair>> s : cartResult) {
Set<UnifyPair> flat1 = new HashSet<>();
for(Set<UnifyPair> s1 : s)
flat1.addAll(s1);
flat.add(flat1);
}
topLevelSets.add(flat);
}
// Cartesian product over all (up to 10) top level sets
Set<Set<Set<UnifyPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
.stream().map(x -> new HashSet<>(x))
.collect(Collectors.toCollection(HashSet::new));
//System.out.println(result);
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
Set<TypeUnifyTask> forks = new HashSet<>();
for(Set<Set<UnifyPair>> setToFlatten : eqPrimeSet) {
// Flatten the cartesian product
Set<UnifyPair> eqPrime = new HashSet<>();
setToFlatten.stream().forEach(x -> eqPrime.addAll(x));
/*
* Step 5: Substitution
*/
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime);
/*
* Step 6 a) Restart (fork) for pairs where subst was applied
*/
Set<TypeUnifyTask> restart = new HashSet<>();
if (eqPrime.equals(eq))
eqPrimePrimeSet.add(eqPrime);
else if(eqPrimePrime.isPresent()) {
TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc);
forks.add(fork);
fork.fork();
}
//eqPrimePrimeSet.addAll(this.unify(eqPrimePrime.get(), fc));
else {
TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc);
forks.add(fork);
fork.fork();
// eqPrimePrimeSet.addAll(this.unify(eqPrime, fc));
}
}
/*
* Step 6 b) Build the union over everything.
*/
// TODO Parallel stream und synced set?
for(TypeUnifyTask fork : forks)
eqPrimePrimeSet.addAll(fork.join());
/*
* Step 7: Filter empty sets;
*/
return eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x)).collect(Collectors.toCollection(HashSet::new));
}
/**
* Checks whether a set of pairs is in solved form.
* @param eqPrimePrime The set of pair
* @return True if in solved form, false otherwise.
*/
protected boolean isSolvedForm(Set<UnifyPair> eqPrimePrime) {
for(UnifyPair pair : eqPrimePrime) {
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof PlaceholderType))
return false;
// If operator is not equals, both sides must be placeholders
if(pair.getPairOp() != PairOperator.EQUALSDOT && !(rhsType instanceof PlaceholderType))
return false;
}
return true;
}
/**
* Repeatedly applies type unification rules to a set of equations.
* This is step one of the unification algorithm.
* @return The set of pairs that results from repeated application of the inference rules.
*/
protected Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Rule Application Strategy:
*
* 1. Swap all pairs and erase all erasable pairs
* 2. Apply all possible rules to a single pair, then move it to the result set.
* Iterating over pairs first, then iterating over rules prevents the application
* of rules to a "finished" pair over and over.
* 2.1 Apply all rules repeatedly except for erase rules. If
* the application of a rule creates new pairs, check immediately
* against the erase rules.
*/
LinkedHashSet<UnifyPair> targetSet = new LinkedHashSet<UnifyPair>();
LinkedList<UnifyPair> eqQueue = new LinkedList<>();
/*
* Swap all pairs and erase all erasable pairs
*/
eq.forEach(x -> swapAddOrErase(x, fc, eqQueue));
/*
* Apply rules until the queue is empty
*/
while(!eqQueue.isEmpty()) {
UnifyPair pair = eqQueue.pollFirst();
// ReduceUp, ReduceLow, ReduceUpLow
Optional<UnifyPair> opt = rules.reduceUpLow(pair);
opt = opt.isPresent() ? opt : rules.reduceLow(pair);
opt = opt.isPresent() ? opt : rules.reduceUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLow(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLowRight(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUpRight(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLowUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUpLow(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLeft(pair);
// Reduce TPH
opt = opt.isPresent() ? opt : rules.reduceTph(pair);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
Optional<Set<UnifyPair>> optSet = rules.reduce1(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduce2(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceEq(pair);
// ReduceTphExt, ReduceTphSup
optSet = optSet.isPresent() ? optSet : rules.reduceTphExt(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceTphSup(pair);
// One of the rules has been applied
if(optSet.isPresent()) {
optSet.get().forEach(x -> swapAddOrErase(x, fc, eqQueue));
continue;
}
// Adapt, AdaptExt, AdaptSup
opt = rules.adapt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptExt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptSup(pair, fc);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// None of the rules has been applied
targetSet.add(pair);
}
return targetSet;
}
/**
* Applies the rule swap to a pair if possible. Then adds the pair to the set if no erase rule applies.
* If an erase rule applies, the pair is not added (erased).
* @param pair The pair to swap and add or erase.
* @param collection The collection to which the pairs are added.
*/
protected void swapAddOrErase(UnifyPair pair, IFiniteClosure fc, Collection<UnifyPair> collection) {
Optional<UnifyPair> opt = rules.swap(pair);
UnifyPair pair2 = opt.isPresent() ? opt.get() : pair;
if(rules.erase1(pair2, fc) || rules.erase3(pair2) || rules.erase2(pair2, fc))
return;
collection.add(pair2);
}
/**
* Splits the equation eq into a set eq1s where both terms are type variables,
* and a set eq2s where one of both terms is not a type variable.
* @param eq Set of pairs to be splitted.
* @param eq1s Subset of eq where both terms are type variables.
* @param eq2s eq/eq1s.
*/
protected void splitEq(Set<UnifyPair> eq, Set<UnifyPair> eq1s, Set<UnifyPair> eq2s) {
for(UnifyPair pair : eq)
if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
eq1s.add(pair);
else
eq2s.add(pair);
}
/**
* Creates sets of pairs specified in the fourth step. Does not calculate cartesian products.
* @param undefined All pairs that did not match one of the 8 cases are added to this set.
* @return The set of the eight cases (without empty sets). Each case is a set, containing sets generated
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
* (as in case 1 where sigma is added to the innermost set).
*/
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, IFiniteClosure fc, Set<UnifyPair> undefined) {
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(8);
// Init all 8 cases
for(int i = 0; i < 8; i++)
result.add(new HashSet<>());
for(UnifyPair pair : eq2s) {
PairOperator pairOp = pair.getPairOp();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
// Case 1: (a <. Theta')
if(pairOp == PairOperator.SMALLERDOT && lhsType instanceof PlaceholderType)
result.get(0).add(unifyCase1((PlaceholderType) pair.getLhsType(), pair.getRhsType(), fc));
// Case 2: (a <.? ? ext Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
result.get(1).add(unifyCase2((PlaceholderType) pair.getLhsType(), (ExtendsType) pair.getRhsType(), fc));
// Case 3: (a <.? ? sup Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
result.get(2).add(unifyCase3((PlaceholderType) lhsType, (SuperType) rhsType, fc));
// Case 4 was replaced by an inference rule
// Case 4: (a <.? Theta')
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType)
// result.get(3).add(unifyCase4((PlaceholderType) lhsType, rhsType, fc));
// Case 5: (Theta <. a)
else if(pairOp == PairOperator.SMALLERDOT && rhsType instanceof PlaceholderType)
result.get(4).add(unifyCase5(lhsType, (PlaceholderType) rhsType, fc));
// Case 6 was replaced by an inference rule.
// Case 6: (? ext Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType)
// result.get(5).add(unifyCase6((ExtendsType) lhsType, (PlaceholderType) rhsType, fc));
// Case 7 was replaced by an inference rule
// Case 7: (? sup Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType)
// result.get(6).add(unifyCase7((SuperType) lhsType, (PlaceholderType) rhsType, fc));
// Case 8: (Theta <.? a)
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
result.get(7).add(unifyCase8(lhsType, (PlaceholderType) rhsType, fc));
// Case unknown: If a pair fits no other case, then the type unification has failed.
// Through application of the rules, every pair should have one of the above forms.
// Pairs that do not have one of the aboves form are contradictory.
else
undefined.add(pair);
}
// Filter empty sets or sets that only contain an empty set.
return result.stream().map(x -> x.stream().filter(y -> y.size() > 0).collect(Collectors.toCollection(HashSet::new)))
.filter(x -> x.size() > 0).collect(Collectors.toCollection(HashSet::new));
}
/**
* Cartesian product Case 1: (a <. Theta')
*/
protected Set<Set<UnifyPair>> unifyCase1(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());
cs.add(thetaPrime);
for(UnifyType c : cs) {
Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
thetaQs.add(thetaPrime);
Set<UnifyType> thetaQPrimes = new HashSet<>();
TypeParams cParams = c.getTypeParams();
if(cParams.size() == 0)
thetaQPrimes.add(c);
else {
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
for(UnifyType param : cParams)
candidateParams.add(fc.grArg(param));
for(TypeParams tp : permuteParams(candidateParams))
thetaQPrimes.add(c.setTypeParams(tp));
}
for(UnifyType tqp : thetaQPrimes) {
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
if (!opt.isPresent())
continue;
Unifier unifier = opt.get();
unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
Set<UnifyPair> substitutionSet = new HashSet<>();
for (Entry<PlaceholderType, UnifyType> sigma : unifier)
substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
List<UnifyType> freshTphs = new ArrayList<>();
for (UnifyType tq : thetaQs) {
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq));
for(UnifyType theta : smaller) {
Set<UnifyPair> resultPrime = new HashSet<>();
for(int i = 0; i < theta.getTypeParams().size(); i++) {
if(freshTphs.size()-1 < i)
freshTphs.add(PlaceholderType.freshPlaceholder());
resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC));
}
UnifyType freshTheta = theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0])));
resultPrime.add(new UnifyPair(a, freshTheta, PairOperator.EQUALSDOT));
resultPrime.addAll(substitutionSet);
result.add(resultPrime);
}
}
}
}
return result;
}
/**
* Cartesian Product Case 2: (a <.? ? ext Theta')
*/
protected Set<Set<UnifyPair>> unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType extAPrime = new ExtendsType(aPrime);
UnifyType thetaPrime = extThetaPrime.getExtendedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 3: (a <.? ? sup Theta')
*/
protected Set<Set<UnifyPair>> unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType thetaPrime = subThetaPrime.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(thetaPrime, aPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 4: (a <.? Theta')
*/
protected Set<Set<UnifyPair>> unifyCase4(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.EQUALSDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 5: (Theta <. a)
*/
protected Set<Set<UnifyPair>> unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
for(UnifyType thetaS : fc.greater(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
UnifyType[] freshTphs = new UnifyType[thetaS.getTypeParams().size()];
for(int i = 0; i < freshTphs.length; i++) {
freshTphs[i] = PlaceholderType.freshPlaceholder();
resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.SMALLERDOTWC));
}
resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT));
result.add(resultPrime);
}
return result;
}
/**
* Cartesian Product Case 6: (? ext Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase6(ExtendsType extTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType extFreshTph = new ExtendsType(freshTph);
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extFreshTph, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(extTheta.getExtendedType(), freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 7: (? sup Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase7(SuperType supTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType theta = supTheta.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, theta, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 8: (Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
//for(UnifyType thetaS : fc.grArg(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
result.add(resultPrime);
UnifyType freshTph = PlaceholderType.freshPlaceholder();
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new SuperType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(freshTph, theta, PairOperator.SMALLERDOT));
result.add(resultPrime);
//}
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
*/
protected Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
Set<TypeParams> result = new HashSet<>();
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
* @param idx Idx for the current permutatiton.
* @param result Set of all permutations found so far
* @param current The permutation of type params that is currently explored
*/
private void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
if(candidates.size() == idx) {
result.add(new TypeParams(Arrays.copyOf(current, current.length)));
return;
}
Set<UnifyType> localCandidates = candidates.get(idx);
for(UnifyType t : localCandidates) {
current[idx] = t;
permuteParams(candidates, idx+1, result, current);
}
}
}

View File

@ -792,52 +792,52 @@ public class UnifyTest extends TypeUnify {
}
@Test
public void permuteParamsTest() {
TypeFactory tf = new TypeFactory();
ArrayList<Set<UnifyType>> candidates = new ArrayList<>();
UnifyType p11 = tf.getPlaceholderType("p11");
UnifyType p12 = tf.getExtendsType(tf.getSimpleType("p12"));
UnifyType p13 = tf.getSimpleType("p13");
UnifyType p21 = tf.getPlaceholderType("p21");
UnifyType p22 = tf.getPlaceholderType("p22");
UnifyType p31 = tf.getSimpleType("p31", "T");
Set<UnifyType> p1 = new HashSet<>();
p1.add(p11);
p1.add(p12);
p1.add(p13);
Set<UnifyType> p2 = new HashSet<>();
p2.add(p21);
p2.add(p22);
Set<UnifyType> p3 = new HashSet<>();
p3.add(p31);
candidates.add(p1);
candidates.add(p2);
candidates.add(p3);
/*
* Expected Result:
* {<x, y, z> | x in { p11, p12, p13}, y in { p21, p22 }, z in { p31 }}
*/
Set<TypeParams> expected = Arrays.stream(new TypeParams[] {
new TypeParams(p11, p21, p31),
new TypeParams(p11, p22, p31),
new TypeParams(p12, p21, p31),
new TypeParams(p12, p22, p31),
new TypeParams(p13, p21, p31),
new TypeParams(p13, p22, p31)
}).collect(Collectors.toSet());
Set<TypeParams> actual = permuteParams(candidates);
Assert.assertEquals(expected, actual);
}
// @Test
// public void permuteParamsTest() {
// TypeFactory tf = new TypeFactory();
// ArrayList<Set<UnifyType>> candidates = new ArrayList<>();
//
// UnifyType p11 = tf.getPlaceholderType("p11");
// UnifyType p12 = tf.getExtendsType(tf.getSimpleType("p12"));
// UnifyType p13 = tf.getSimpleType("p13");
// UnifyType p21 = tf.getPlaceholderType("p21");
// UnifyType p22 = tf.getPlaceholderType("p22");
// UnifyType p31 = tf.getSimpleType("p31", "T");
//
// Set<UnifyType> p1 = new HashSet<>();
// p1.add(p11);
// p1.add(p12);
// p1.add(p13);
//
// Set<UnifyType> p2 = new HashSet<>();
// p2.add(p21);
// p2.add(p22);
//
// Set<UnifyType> p3 = new HashSet<>();
// p3.add(p31);
//
// candidates.add(p1);
// candidates.add(p2);
// candidates.add(p3);
//
//
// /*
// * Expected Result:
// * {<x, y, z> | x in { p11, p12, p13}, y in { p21, p22 }, z in { p31 }}
// */
// Set<TypeParams> expected = Arrays.stream(new TypeParams[] {
// new TypeParams(p11, p21, p31),
// new TypeParams(p11, p22, p31),
// new TypeParams(p12, p21, p31),
// new TypeParams(p12, p22, p31),
// new TypeParams(p13, p21, p31),
// new TypeParams(p13, p22, p31)
// }).collect(Collectors.toSet());
//
// Set<TypeParams> actual = permuteParams(candidates);
//
// Assert.assertEquals(expected, actual);
// }
private Set<Set<UnifyPair>> filterGeneratedTPHsMultiple(Set<Set<UnifyPair>> set) {
return set.stream().map(x -> filterGeneratedTPHs(x)).collect(Collectors.toSet());