Implement first version of Unify2 with simple test
This commit is contained in:
parent
9a0de3f193
commit
3019a0f513
@ -0,0 +1,108 @@
|
||||
package de.dhbwstuttgart.unify2;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Implementation of the Martelli-Montanari unification algorithm.
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class MartelliMontanariUnify {
|
||||
|
||||
/**
|
||||
* Finds the most general unifier sigma of the set {t1 =. t1',...,tn =. tn'} so that
|
||||
* sigma(t1) = sigma(t1') , ... sigma(tn) = sigma(tn').
|
||||
* @param terms The set of terms to be unified
|
||||
* @return An optional of the most general unifier if it exists or an empty optional if there is no unifier.
|
||||
*/
|
||||
public static Optional<Unifier> unify(UnifyType... terms) {
|
||||
return unify(Arrays.stream(terms).collect(Collectors.toSet()));
|
||||
}
|
||||
|
||||
public static Optional<Unifier> unify(Set<UnifyType> terms) {
|
||||
// Sets with less than 2 terms are trivially unified
|
||||
if(terms.size() < 2)
|
||||
return Optional.of(Unifier.identity());
|
||||
|
||||
// For the the set of terms {t1,...,tn},
|
||||
// build a list of equations {(t1 = t2), (t2 = t3), (t3 = t4), ....}
|
||||
ArrayList<UnifyPair> termsList = new ArrayList<UnifyPair>();
|
||||
Iterator<UnifyType> iter = terms.iterator();
|
||||
UnifyType prev = iter.next();
|
||||
while(iter.hasNext()) {
|
||||
UnifyType next = iter.next();
|
||||
termsList.add(new UnifyPair(prev, next, PairOperator.EQUALSDOT));
|
||||
prev = next;
|
||||
}
|
||||
|
||||
// Start with the identity unifier. Substitutions will be added later.
|
||||
Unifier mgu = Unifier.identity();
|
||||
|
||||
// Apply rules while possible
|
||||
int idx = 0;
|
||||
while(idx < termsList.size()) {
|
||||
UnifyPair pair = termsList.get(idx);
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
TypeParams rhsTypeParams = rhsType.getTypeParams();
|
||||
TypeParams lhsTypeParams = lhsType.getTypeParams();
|
||||
|
||||
// REDUCE - Rule
|
||||
if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
|
||||
Set<UnifyPair> result = new HashSet<>();
|
||||
|
||||
// f<...> = g<...> with f != g are not unifiable
|
||||
if(!rhsType.getName().equals(lhsType.getName()))
|
||||
return Optional.empty(); // conflict
|
||||
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
|
||||
if(rhsTypeParams.size() != lhsTypeParams.size())
|
||||
return Optional.empty(); // conflict
|
||||
// f = g is not unifiable (cannot be f = f because erase rule would have been applied)
|
||||
//if(rhsTypeParams.size() == 0)
|
||||
//return Optional.empty();
|
||||
|
||||
// Unpack the arguments
|
||||
for(int i = 0; i < rhsTypeParams.size(); i++)
|
||||
result.add(new UnifyPair(rhsTypeParams.get(i), lhsTypeParams.get(i), PairOperator.EQUALSDOT));
|
||||
|
||||
termsList.remove(idx);
|
||||
termsList.addAll(result);
|
||||
continue;
|
||||
}
|
||||
|
||||
// DELETE - Rule
|
||||
if(pair.getRhsType().equals(pair.getLhsType())) {
|
||||
termsList.remove(idx);
|
||||
continue;
|
||||
}
|
||||
|
||||
// SWAP - Rule
|
||||
if(!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
|
||||
termsList.remove(idx);
|
||||
termsList.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT));
|
||||
continue;
|
||||
}
|
||||
|
||||
// OCCURS-CHECK
|
||||
if(pair.getLhsType() instanceof PlaceholderType
|
||||
&& pair.getRhsType().getTypeParams().occurs((PlaceholderType) pair.getLhsType()))
|
||||
return Optional.empty();
|
||||
|
||||
// SUBST - Rule
|
||||
if(lhsType instanceof PlaceholderType) {
|
||||
mgu.add((PlaceholderType) lhsType, rhsType);
|
||||
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
|
||||
termsList = termsList.stream().map(x -> mgu.apply(x)).collect(Collectors.toCollection(ArrayList::new));
|
||||
idx = idx+1 == termsList.size() ? 0 : idx+1;
|
||||
continue;
|
||||
}
|
||||
|
||||
idx++;
|
||||
}
|
||||
|
||||
return Optional.of(mgu);
|
||||
}
|
||||
}
|
@ -28,7 +28,7 @@ public class RuleSet {
|
||||
* This is step one of the unification algorithm.
|
||||
* @return The set of pairs that results from repeated application of the inference rules.
|
||||
*/
|
||||
protected Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) {
|
||||
public static Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) {
|
||||
|
||||
/*
|
||||
* Rule Application Strategy:
|
||||
@ -683,82 +683,6 @@ public class RuleSet {
|
||||
return succ ? permutation : new int[0];
|
||||
}
|
||||
|
||||
static Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs) {
|
||||
return subst(pairs, new ArrayList<>());
|
||||
}
|
||||
|
||||
static Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Constraint<UnifyPair>>> oderConstraints) {
|
||||
HashMap<UnifyType, Integer> typeMap = new HashMap<>();
|
||||
|
||||
Stack<UnifyType> occuringTypes = new Stack<>();
|
||||
|
||||
for(UnifyPair pair : pairs) {
|
||||
occuringTypes.push(pair.getLhsType());
|
||||
occuringTypes.push(pair.getRhsType());
|
||||
}
|
||||
|
||||
while(!occuringTypes.isEmpty()) {
|
||||
UnifyType t1 = occuringTypes.pop();
|
||||
if(!typeMap.containsKey(t1))
|
||||
typeMap.put(t1, 0);
|
||||
typeMap.put(t1, typeMap.get(t1)+1);
|
||||
|
||||
if(t1 instanceof ExtendsType)
|
||||
occuringTypes.push(((ExtendsType) t1).getExtendedType());
|
||||
if(t1 instanceof SuperType)
|
||||
occuringTypes.push(((SuperType) t1).getSuperedType());
|
||||
else
|
||||
t1.getTypeParams().forEach(x -> occuringTypes.push(x));
|
||||
}
|
||||
|
||||
Queue<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
|
||||
ArrayList<UnifyPair> result = new ArrayList<UnifyPair>();
|
||||
boolean applied = false;
|
||||
|
||||
while(!result1.isEmpty()) {
|
||||
UnifyPair pair = result1.poll();
|
||||
PlaceholderType lhsType = null;
|
||||
UnifyType rhsType;
|
||||
|
||||
if(pair.getPairOp() == PairOperator.EQUALSDOT
|
||||
&& pair.getLhsType() instanceof PlaceholderType)
|
||||
lhsType = (PlaceholderType) pair.getLhsType();
|
||||
rhsType = pair.getRhsType(); //PL eingefuegt 2017-09-29 statt !((rhsType = pair.getRhsType()) instanceof PlaceholderType)
|
||||
if(lhsType != null
|
||||
//&& !((rhsType = pair.getRhsType()) instanceof PlaceholderType) //PL geloescht am 2017-09-29 Begründung: auch Typvariablen muessen ersetzt werden.
|
||||
&& typeMap.get(lhsType) > 1 // The type occurs in more pairs in the set than just the recent pair.
|
||||
&& !rhsType.getTypeParams().occurs(lhsType)
|
||||
&& !((rhsType instanceof WildcardType) && ((WildcardType)rhsType).getWildcardedType().equals(lhsType))) //PL eigefuegt 2018-02-18
|
||||
{
|
||||
Unifier uni = new Unifier(lhsType, rhsType);
|
||||
result = result.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(ArrayList::new));
|
||||
result1 = result1.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(LinkedList::new));
|
||||
|
||||
Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map(
|
||||
x -> uni.apply(pair,x)).collect(Collectors.toCollection((b.getExtendConstraint() != null)
|
||||
? () -> new Constraint<UnifyPair>(
|
||||
b.isInherited(),
|
||||
b.getExtendConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(Constraint::new)))
|
||||
: () -> new Constraint<UnifyPair>(b.isInherited())
|
||||
));
|
||||
oderConstraints.replaceAll(oc -> oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new)));
|
||||
/*
|
||||
oderConstraints = oderConstraints.stream().map(
|
||||
a -> a.stream().map(applyUni
|
||||
//b -> b.stream().map(
|
||||
// x -> uni.apply(pair,x)).collect(Collectors.toCollection(HashSet::new) )
|
||||
).collect(Collectors.toCollection(HashSet::new))
|
||||
).collect(Collectors.toList(ArrayList::new));
|
||||
}
|
||||
*/
|
||||
applied = true;
|
||||
}
|
||||
result.add(pair);
|
||||
}
|
||||
|
||||
return applied ? Optional.of(new HashSet<>(result)) : Optional.empty();
|
||||
}
|
||||
|
||||
static Optional<UnifyPair> reduceWildcardLow(UnifyPair pair) {
|
||||
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
|
||||
return Optional.empty();
|
||||
|
@ -1,375 +1,54 @@
|
||||
package de.dhbwstuttgart.unify2;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
import de.dhbwstuttgart.unify2.model.UnifyConstraintSet;
|
||||
import de.dhbwstuttgart.unify2.model.UnifyConstraintSetBuilder;
|
||||
import de.dhbwstuttgart.unify2.model.UnifyOderConstraint;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class TypeUnify {
|
||||
|
||||
public void unifyOrConstraints(UnifyConstraintSet eq, FiniteClosure fc){
|
||||
|
||||
eq.cartesianProductParallel().map(eqPrime -> unify(eqPrime, fc));
|
||||
public static Optional<Set<UnifyPair>> unifyOrConstraints(UnifyConstraintSet eq, FiniteClosure fc){
|
||||
return eq.cartesianProductParallel().map(eqPrime -> unify(eqPrime, fc)).filter(Optional::isPresent).map(Optional::get).findAny();
|
||||
}
|
||||
|
||||
public Optional<UnifyResultModel> unify(Set<UnifyPair> eq, FiniteClosure fc){
|
||||
public static Optional<Set<UnifyPair>> unify(Set<UnifyPair> eq, FiniteClosure fc){
|
||||
/*
|
||||
TODO: Hier könnte man prüfen, ob es überhaupt einen Sinn macht mit eq weiterzumachen
|
||||
Es könnte eine über threads geteiltes Objekt geben (Feld in TypeUnify), welches unmögliche Klauseln lernt
|
||||
*/
|
||||
|
||||
//Apply Reduce und Apply rules
|
||||
Set<UnifyPair> res = applyTypeUnificationRules(eq, fc);
|
||||
Set<UnifyPair> res = RuleSet.applyTypeUnificationRules(eq, fc);
|
||||
//Split result
|
||||
|
||||
UnifyConstraintSet constraintSet = step4(res, fc);
|
||||
return constraintSet.cartesianProductParallel().map(toSubst -> {
|
||||
//TODO: try subst
|
||||
toSubst = toSubst; //hier substituieren
|
||||
//if it changed:
|
||||
if (true) {
|
||||
return unify(toSubst, fc);
|
||||
}else{
|
||||
//TODO: return the result
|
||||
return Optional.<UnifyResultModel>empty();
|
||||
}
|
||||
}).filter(it -> it.isPresent()).map(Optional::get).findAny();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Repeatedly applies type unification rules to a set of equations.
|
||||
* This is step one of the unification algorithm.
|
||||
* @return The set of pairs that results from repeated application of the inference rules.
|
||||
*/
|
||||
protected Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, FiniteClosure fc) {
|
||||
|
||||
/*
|
||||
* Rule Application Strategy:
|
||||
*
|
||||
* 1. Swap all pairs and erase all erasable pairs
|
||||
* 2. Apply all possible rules to a single pair, then move it to the result set.
|
||||
* Iterating over pairs first, then iterating over rules prevents the application
|
||||
* of rules to a "finished" pair over and over.
|
||||
* 2.1 Apply all rules repeatedly except for erase rules. If
|
||||
* the application of a rule creates new pairs, check immediately
|
||||
* against the erase rules.
|
||||
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
|
||||
*/
|
||||
|
||||
|
||||
LinkedHashSet<UnifyPair> targetSet = new LinkedHashSet<UnifyPair>();
|
||||
LinkedList<UnifyPair> eqQueue = new LinkedList<>();
|
||||
|
||||
/*
|
||||
* Swap all pairs and erase all erasable pairs
|
||||
*/
|
||||
eq.forEach(x -> swapAddOrErase(x, fc, eqQueue));
|
||||
|
||||
/*
|
||||
* Apply rules until the queue is empty
|
||||
*/
|
||||
while(!eqQueue.isEmpty()) {
|
||||
UnifyPair pair = eqQueue.pollFirst();
|
||||
|
||||
// ReduceUp, ReduceLow, ReduceUpLow
|
||||
Optional<UnifyPair> opt = rules.reduceUpLow(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceLow(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceUp(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceWildcardLow(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceWildcardLowRight(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceWildcardUp(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceWildcardUpRight(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceWildcardLowUp(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceWildcardUpLow(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceWildcardLeft(pair);
|
||||
|
||||
// Reduce TPH
|
||||
opt = opt.isPresent() ? opt : rules.reduceTph(pair);
|
||||
|
||||
// One of the rules has been applied
|
||||
if(opt.isPresent()) {
|
||||
swapAddOrErase(opt.get(), fc, eqQueue);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
|
||||
Optional<Set<UnifyPair>> optSet = rules.reduce1(pair, fc);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduce2(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair, fc);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair, fc);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceEq(pair);
|
||||
|
||||
// ReduceTphExt, ReduceTphSup
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceTphExt(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceTphSup(pair);
|
||||
|
||||
|
||||
// FunN Rules
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceFunN(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.greaterFunN(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.smallerFunN(pair);
|
||||
|
||||
// One of the rules has been applied
|
||||
if(optSet.isPresent()) {
|
||||
optSet.get().forEach(x -> swapAddOrErase(x, fc, eqQueue));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Adapt, AdaptExt, AdaptSup
|
||||
opt = rules.adapt(pair, fc);
|
||||
opt = opt.isPresent() ? opt : rules.adaptExt(pair, fc);
|
||||
opt = opt.isPresent() ? opt : rules.adaptSup(pair, fc);
|
||||
|
||||
// One of the rules has been applied
|
||||
if(opt.isPresent()) {
|
||||
swapAddOrErase(opt.get(), fc, eqQueue);
|
||||
continue;
|
||||
}
|
||||
|
||||
// None of the rules has been applied
|
||||
targetSet.add(pair);
|
||||
}
|
||||
|
||||
return targetSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates sets of pairs specified in the fourth step. Does not calculate cartesian products.
|
||||
* @return The set of the eight cases (without empty sets). Each case is a set, containing sets generated
|
||||
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
|
||||
* (as in case 1 where sigma is added to the innermost set).
|
||||
*/
|
||||
protected UnifyConstraintSet step4(Set<UnifyPair> eq2s, FiniteClosure fc) {
|
||||
Set<UnifyOderConstraint> result = new HashSet<>(8);
|
||||
|
||||
for(UnifyPair pair : eq2s) {
|
||||
PairOperator pairOp = pair.getPairOp();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
|
||||
// Case 1: (a <. Theta')
|
||||
if(pairOp == PairOperator.SMALLERDOT && lhsType instanceof PlaceholderType)
|
||||
result.add(unifyCase1((PlaceholderType) pair.getLhsType(), pair.getRhsType(), fc));
|
||||
|
||||
// Case 2: (a <.? ? ext Theta')
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
|
||||
result.add(unifyCase2((PlaceholderType) pair.getLhsType(), (ExtendsType) pair.getRhsType(), fc));
|
||||
|
||||
// Case 3: (a <.? ? sup Theta')
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
|
||||
result.add(unifyCase3((PlaceholderType) lhsType, (SuperType) rhsType, fc));
|
||||
|
||||
// Case 4 was replaced by an inference rule
|
||||
// Case 4: (a <.? Theta')
|
||||
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType)
|
||||
// result.get(3).add(unifyCase4((PlaceholderType) lhsType, rhsType, fc));
|
||||
|
||||
// Case 5: (Theta <. a)
|
||||
else if(pairOp == PairOperator.SMALLERDOT && rhsType instanceof PlaceholderType)
|
||||
result.add(unifyCase5(lhsType, (PlaceholderType) rhsType, fc));
|
||||
|
||||
// Case 6 was replaced by an inference rule.
|
||||
// Case 6: (? ext Theta <.? a)
|
||||
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType)
|
||||
// result.get(5).add(unifyCase6((ExtendsType) lhsType, (PlaceholderType) rhsType, fc));
|
||||
|
||||
// Case 7 was replaced by an inference rule
|
||||
// Case 7: (? sup Theta <.? a)
|
||||
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType)
|
||||
// result.get(6).add(unifyCase7((SuperType) lhsType, (PlaceholderType) rhsType, fc));
|
||||
|
||||
// Case 8: (Theta <.? a)
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
|
||||
result.add(unifyCase8(lhsType, (PlaceholderType) rhsType, fc));
|
||||
// Case unknown: If a pair fits no other case, then the type unification has failed.
|
||||
// Through application of the rules, every pair should have one of the above forms.
|
||||
// Pairs that do not have one of the aboves form are contradictory.
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter empty sets or sets that only contain an empty set.
|
||||
//Andi: Why? Should they exist? this should be an error then
|
||||
return new UnifyConstraintSet(result);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Cartesian product Case 1: (a <. Theta')
|
||||
*/
|
||||
protected UnifyOderConstraint unifyCase1(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
|
||||
boolean allGen = thetaPrime.getTypeParams().size() > 0;
|
||||
for(UnifyType t : thetaPrime.getTypeParams())
|
||||
if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
|
||||
allGen = false;
|
||||
break;
|
||||
}
|
||||
|
||||
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());
|
||||
cs.add(thetaPrime);
|
||||
|
||||
for(UnifyType c : cs) {
|
||||
Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
|
||||
//thetaQs.add(thetaPrime);
|
||||
Set<UnifyType> thetaQPrimes = new HashSet<>();
|
||||
TypeParams cParams = c.getTypeParams();
|
||||
if(cParams.size() == 0)
|
||||
thetaQPrimes.add(c);
|
||||
else {
|
||||
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
|
||||
|
||||
for(UnifyType param : cParams)
|
||||
candidateParams.add(fc.grArg(param));
|
||||
|
||||
for(TypeParams tp : permuteParams(candidateParams))
|
||||
thetaQPrimes.add(c.setTypeParams(tp));
|
||||
}
|
||||
|
||||
for(UnifyType tqp : thetaQPrimes) {
|
||||
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
|
||||
if (!opt.isPresent())
|
||||
continue;
|
||||
|
||||
Unifier unifier = opt.get();
|
||||
unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
|
||||
Set<UnifyPair> substitutionSet = new HashSet<>();
|
||||
for (Entry<PlaceholderType, UnifyType> sigma : unifier)
|
||||
substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
|
||||
|
||||
List<UnifyType> freshTphs = new ArrayList<>();
|
||||
for (UnifyType tq : thetaQs) {
|
||||
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq));
|
||||
for(UnifyType theta : smaller) {
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
|
||||
for(int i = 0; !allGen && i < theta.getTypeParams().size(); i++) {
|
||||
if(freshTphs.size()-1 < i)
|
||||
freshTphs.add(PlaceholderType.freshPlaceholder());
|
||||
resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC));
|
||||
}
|
||||
|
||||
if(allGen)
|
||||
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
|
||||
else
|
||||
resultPrime.add(new UnifyPair(a, theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0]))), PairOperator.EQUALSDOT));
|
||||
resultPrime.addAll(substitutionSet);
|
||||
result.add(resultPrime);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return new UnifyOderConstraint(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cartesian Product Case 2: (a <.? ? ext Theta')
|
||||
*/
|
||||
private UnifyOderConstraint unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
|
||||
UnifyType aPrime = PlaceholderType.freshPlaceholder();
|
||||
UnifyType extAPrime = new ExtendsType(aPrime);
|
||||
UnifyType thetaPrime = extThetaPrime.getExtendedType();
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
|
||||
resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT));
|
||||
resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
return new UnifyOderConstraint(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cartesian Product Case 3: (a <.? ? sup Theta')
|
||||
*/
|
||||
private UnifyOderConstraint unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
|
||||
UnifyType aPrime = PlaceholderType.freshPlaceholder();
|
||||
UnifyType supAPrime = new SuperType(aPrime);
|
||||
UnifyType thetaPrime = subThetaPrime.getSuperedType();
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
|
||||
resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
|
||||
resultPrime.add(new UnifyPair(thetaPrime, aPrime, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
|
||||
return new UnifyOderConstraint(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cartesian Product Case 5: (Theta <. a)
|
||||
*/
|
||||
private UnifyOderConstraint unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
|
||||
boolean allGen = theta.getTypeParams().size() > 0;
|
||||
for(UnifyType t : theta.getTypeParams())
|
||||
if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
|
||||
allGen = false;
|
||||
break;
|
||||
}
|
||||
|
||||
for(UnifyType thetaS : fc.greater(theta)) {
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
|
||||
UnifyType[] freshTphs = new UnifyType[thetaS.getTypeParams().size()];
|
||||
for(int i = 0; !allGen && i < freshTphs.length; i++) {
|
||||
freshTphs[i] = PlaceholderType.freshPlaceholder();
|
||||
resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.SMALLERDOTWC));
|
||||
}
|
||||
|
||||
if(allGen)
|
||||
resultPrime.add(new UnifyPair(a, thetaS, PairOperator.EQUALSDOT));
|
||||
Set<UnifyPair> eq1s = new HashSet<>();
|
||||
Set<UnifyPair> eq2s = new HashSet<>();
|
||||
for(UnifyPair pair : res) {
|
||||
if (pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
|
||||
eq1s.add(pair);
|
||||
else
|
||||
resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT));
|
||||
result.add(resultPrime);
|
||||
eq2s.add(pair);
|
||||
}
|
||||
|
||||
return new UnifyOderConstraint(result);
|
||||
Optional<UnifyConstraintSet> step4Res = Unify.step4(eq1s, eq2s, fc);
|
||||
|
||||
//Falls step4 etwas liefert, dann subst und rekursiver unify aufruf anwenden:
|
||||
return step4Res.flatMap(constraintSet ->
|
||||
constraintSet.cartesianProductParallel().map(toSubst -> {
|
||||
Optional<Set<UnifyPair>> substitutionResult = Unify.subst(toSubst); //hier substituieren
|
||||
//if it changed:
|
||||
if (substitutionResult.isPresent()) {
|
||||
return unify(substitutionResult.get(), fc);
|
||||
}else{
|
||||
//TODO: return the result
|
||||
return Optional.of(toSubst);
|
||||
}
|
||||
}).filter(it -> it.isPresent()).map(Optional::get).findAny());
|
||||
}
|
||||
|
||||
/**
|
||||
* Cartesian Product Case 8: (Theta <.? a)
|
||||
*/
|
||||
private UnifyOderConstraint unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
//for(UnifyType thetaS : fc.grArg(theta)) {
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
|
||||
result.add(resultPrime);
|
||||
|
||||
UnifyType freshTph = PlaceholderType.freshPlaceholder();
|
||||
resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT));
|
||||
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
|
||||
resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, new SuperType(freshTph), PairOperator.EQUALSDOT));
|
||||
resultPrime.add(new UnifyPair(freshTph, theta, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
//}
|
||||
|
||||
return new UnifyOderConstraint(result);
|
||||
}
|
||||
|
||||
}
|
||||
|
340
src/main/java/de/dhbwstuttgart/unify2/Unify.java
Normal file
340
src/main/java/de/dhbwstuttgart/unify2/Unify.java
Normal file
@ -0,0 +1,340 @@
|
||||
package de.dhbwstuttgart.unify2;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
import de.dhbwstuttgart.unify2.model.UnifyConstraintSet;
|
||||
import de.dhbwstuttgart.unify2.model.UnifyOderConstraint;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class Unify {
|
||||
|
||||
/**
|
||||
* Creates sets of pairs specified in the fourth step. Does not calculate cartesian products.
|
||||
* @return The set of the eight cases (without empty sets). Each case is a set, containing sets generated
|
||||
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
|
||||
* (as in case 1 where sigma is added to the innermost set).
|
||||
*/
|
||||
public static Optional<UnifyConstraintSet> step4(Set<UnifyPair> eq1s, Set<UnifyPair> eq2s, FiniteClosure fc) {
|
||||
Set<UnifyOderConstraint> result = new HashSet<>(8);
|
||||
|
||||
for(UnifyPair pair : eq2s) {
|
||||
PairOperator pairOp = pair.getPairOp();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
|
||||
// Case 1: (a <. Theta')
|
||||
if(pairOp == PairOperator.SMALLERDOT && lhsType instanceof PlaceholderType)
|
||||
result.add(unifyCase1((PlaceholderType) pair.getLhsType(), pair.getRhsType(), fc));
|
||||
|
||||
// Case 2: (a <.? ? ext Theta')
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
|
||||
result.add(unifyCase2((PlaceholderType) pair.getLhsType(), (ExtendsType) pair.getRhsType(), fc));
|
||||
|
||||
// Case 3: (a <.? ? sup Theta')
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
|
||||
result.add(unifyCase3((PlaceholderType) lhsType, (SuperType) rhsType, fc));
|
||||
|
||||
// Case 4 was replaced by an inference rule
|
||||
// Case 4: (a <.? Theta')
|
||||
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType)
|
||||
// result.get(3).add(unifyCase4((PlaceholderType) lhsType, rhsType, fc));
|
||||
|
||||
// Case 5: (Theta <. a)
|
||||
else if(pairOp == PairOperator.SMALLERDOT && rhsType instanceof PlaceholderType)
|
||||
result.add(unifyCase5(lhsType, (PlaceholderType) rhsType, fc));
|
||||
|
||||
// Case 6 was replaced by an inference rule.
|
||||
// Case 6: (? ext Theta <.? a)
|
||||
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType)
|
||||
// result.get(5).add(unifyCase6((ExtendsType) lhsType, (PlaceholderType) rhsType, fc));
|
||||
|
||||
// Case 7 was replaced by an inference rule
|
||||
// Case 7: (? sup Theta <.? a)
|
||||
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType)
|
||||
// result.get(6).add(unifyCase7((SuperType) lhsType, (PlaceholderType) rhsType, fc));
|
||||
|
||||
// Case 8: (Theta <.? a)
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
|
||||
result.add(unifyCase8(lhsType, (PlaceholderType) rhsType, fc));
|
||||
// Case unknown: If a pair fits no other case, then the type unification has failed.
|
||||
// Through application of the rules, every pair should have one of the above forms.
|
||||
// Pairs that do not have one of the aboves form are contradictory.
|
||||
else {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
result.add(new UnifyOderConstraint(Set.of(eq1s)));
|
||||
// Filter empty sets or sets that only contain an empty set.
|
||||
//Andi: Why? Should they exist? this should be an error then
|
||||
return Optional.of(new UnifyConstraintSet(result));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Cartesian product Case 1: (a <. Theta')
|
||||
*/
|
||||
static UnifyOderConstraint unifyCase1(PlaceholderType a, UnifyType thetaPrime, FiniteClosure fc) {
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
|
||||
boolean allGen = thetaPrime.getTypeParams().size() > 0;
|
||||
for(UnifyType t : thetaPrime.getTypeParams())
|
||||
if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
|
||||
allGen = false;
|
||||
break;
|
||||
}
|
||||
|
||||
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());
|
||||
cs.add(thetaPrime);
|
||||
|
||||
for(UnifyType c : cs) {
|
||||
Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
|
||||
//thetaQs.add(thetaPrime);
|
||||
Set<UnifyType> thetaQPrimes = new HashSet<>();
|
||||
TypeParams cParams = c.getTypeParams();
|
||||
if(cParams.size() == 0)
|
||||
thetaQPrimes.add(c);
|
||||
else {
|
||||
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
|
||||
|
||||
for(UnifyType param : cParams)
|
||||
candidateParams.add(fc.grArg(param, new HashSet<>()));
|
||||
|
||||
for(TypeParams tp : permuteParams(candidateParams))
|
||||
thetaQPrimes.add(c.setTypeParams(tp));
|
||||
}
|
||||
|
||||
for(UnifyType tqp : thetaQPrimes) {
|
||||
Optional<Unifier> opt = MartelliMontanariUnify.unify(tqp, thetaPrime);
|
||||
if (!opt.isPresent())
|
||||
continue;
|
||||
|
||||
Unifier unifier = opt.get();
|
||||
unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
|
||||
Set<UnifyPair> substitutionSet = new HashSet<>();
|
||||
for (Map.Entry<PlaceholderType, UnifyType> sigma : unifier)
|
||||
substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
|
||||
|
||||
List<UnifyType> freshTphs = new ArrayList<>();
|
||||
for (UnifyType tq : thetaQs) {
|
||||
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq), new HashSet<>());
|
||||
for(UnifyType theta : smaller) {
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
|
||||
for(int i = 0; !allGen && i < theta.getTypeParams().size(); i++) {
|
||||
if(freshTphs.size()-1 < i)
|
||||
freshTphs.add(PlaceholderType.freshPlaceholder());
|
||||
resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC));
|
||||
}
|
||||
|
||||
if(allGen)
|
||||
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
|
||||
else
|
||||
resultPrime.add(new UnifyPair(a, theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0]))), PairOperator.EQUALSDOT));
|
||||
resultPrime.addAll(substitutionSet);
|
||||
result.add(resultPrime);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return new UnifyOderConstraint(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a set of candidates for each position and computes all possible permutations.
|
||||
* @param candidates The length of the list determines the number of type params. Each set
|
||||
* contains the candidates for the corresponding position.
|
||||
*/
|
||||
static Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
|
||||
Set<TypeParams> result = new HashSet<>();
|
||||
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a set of candidates for each position and computes all possible permutations.
|
||||
* @param candidates The length of the list determines the number of type params. Each set
|
||||
* contains the candidates for the corresponding position.
|
||||
* @param idx Idx for the current permutatiton.
|
||||
* @param result Set of all permutations found so far
|
||||
* @param current The permutation of type params that is currently explored
|
||||
*/
|
||||
static void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
|
||||
if(candidates.size() == idx) {
|
||||
result.add(new TypeParams(Arrays.copyOf(current, current.length)));
|
||||
return;
|
||||
}
|
||||
|
||||
Set<UnifyType> localCandidates = candidates.get(idx);
|
||||
|
||||
for(UnifyType t : localCandidates) {
|
||||
current[idx] = t;
|
||||
permuteParams(candidates, idx+1, result, current);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cartesian Product Case 2: (a <.? ? ext Theta')
|
||||
*/
|
||||
static UnifyOderConstraint unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
|
||||
UnifyType aPrime = PlaceholderType.freshPlaceholder();
|
||||
UnifyType extAPrime = new ExtendsType(aPrime);
|
||||
UnifyType thetaPrime = extThetaPrime.getExtendedType();
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
|
||||
resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT));
|
||||
resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
return new UnifyOderConstraint(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cartesian Product Case 3: (a <.? ? sup Theta')
|
||||
*/
|
||||
static UnifyOderConstraint unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
|
||||
UnifyType aPrime = PlaceholderType.freshPlaceholder();
|
||||
UnifyType supAPrime = new SuperType(aPrime);
|
||||
UnifyType thetaPrime = subThetaPrime.getSuperedType();
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
|
||||
resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
|
||||
resultPrime.add(new UnifyPair(thetaPrime, aPrime, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
|
||||
return new UnifyOderConstraint(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cartesian Product Case 5: (Theta <. a)
|
||||
*/
|
||||
static UnifyOderConstraint unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
|
||||
boolean allGen = theta.getTypeParams().size() > 0;
|
||||
for(UnifyType t : theta.getTypeParams())
|
||||
if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
|
||||
allGen = false;
|
||||
break;
|
||||
}
|
||||
|
||||
for(UnifyType thetaS : fc.greater(theta, new HashSet<>())) {
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
|
||||
UnifyType[] freshTphs = new UnifyType[thetaS.getTypeParams().size()];
|
||||
for(int i = 0; !allGen && i < freshTphs.length; i++) {
|
||||
freshTphs[i] = PlaceholderType.freshPlaceholder();
|
||||
resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.SMALLERDOTWC));
|
||||
}
|
||||
|
||||
if(allGen)
|
||||
resultPrime.add(new UnifyPair(a, thetaS, PairOperator.EQUALSDOT));
|
||||
else
|
||||
resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT));
|
||||
result.add(resultPrime);
|
||||
}
|
||||
|
||||
return new UnifyOderConstraint(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cartesian Product Case 8: (Theta <.? a)
|
||||
*/
|
||||
static UnifyOderConstraint unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
//for(UnifyType thetaS : fc.grArg(theta)) {
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
|
||||
result.add(resultPrime);
|
||||
|
||||
UnifyType freshTph = PlaceholderType.freshPlaceholder();
|
||||
resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT));
|
||||
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
|
||||
resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, new SuperType(freshTph), PairOperator.EQUALSDOT));
|
||||
resultPrime.add(new UnifyPair(freshTph, theta, PairOperator.SMALLERDOT));
|
||||
result.add(resultPrime);
|
||||
//}
|
||||
|
||||
return new UnifyOderConstraint(result);
|
||||
}
|
||||
|
||||
static Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs) {
|
||||
HashMap<UnifyType, Integer> typeMap = new HashMap<>();
|
||||
|
||||
Stack<UnifyType> occuringTypes = new Stack<>();
|
||||
|
||||
for(UnifyPair pair : pairs) {
|
||||
occuringTypes.push(pair.getLhsType());
|
||||
occuringTypes.push(pair.getRhsType());
|
||||
}
|
||||
|
||||
while(!occuringTypes.isEmpty()) {
|
||||
UnifyType t1 = occuringTypes.pop();
|
||||
if(!typeMap.containsKey(t1))
|
||||
typeMap.put(t1, 0);
|
||||
typeMap.put(t1, typeMap.get(t1)+1);
|
||||
|
||||
if(t1 instanceof ExtendsType)
|
||||
occuringTypes.push(((ExtendsType) t1).getExtendedType());
|
||||
if(t1 instanceof SuperType)
|
||||
occuringTypes.push(((SuperType) t1).getSuperedType());
|
||||
else
|
||||
t1.getTypeParams().forEach(x -> occuringTypes.push(x));
|
||||
}
|
||||
|
||||
Queue<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
|
||||
ArrayList<UnifyPair> result = new ArrayList<UnifyPair>();
|
||||
boolean applied = false;
|
||||
|
||||
while(!result1.isEmpty()) {
|
||||
UnifyPair pair = result1.poll();
|
||||
PlaceholderType lhsType = null;
|
||||
UnifyType rhsType;
|
||||
|
||||
if(pair.getPairOp() == PairOperator.EQUALSDOT
|
||||
&& pair.getLhsType() instanceof PlaceholderType)
|
||||
lhsType = (PlaceholderType) pair.getLhsType();
|
||||
rhsType = pair.getRhsType(); //PL eingefuegt 2017-09-29 statt !((rhsType = pair.getRhsType()) instanceof PlaceholderType)
|
||||
if(lhsType != null
|
||||
//&& !((rhsType = pair.getRhsType()) instanceof PlaceholderType) //PL geloescht am 2017-09-29 Begründung: auch Typvariablen muessen ersetzt werden.
|
||||
&& typeMap.get(lhsType) > 1 // The type occurs in more pairs in the set than just the recent pair.
|
||||
&& !rhsType.getTypeParams().occurs(lhsType)
|
||||
&& !((rhsType instanceof WildcardType) && ((WildcardType)rhsType).getWildcardedType().equals(lhsType))) //PL eigefuegt 2018-02-18
|
||||
{
|
||||
Unifier uni = new Unifier(lhsType, rhsType);
|
||||
result = result.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(ArrayList::new));
|
||||
result1 = result1.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(LinkedList::new));
|
||||
|
||||
Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map(
|
||||
x -> uni.apply(pair,x)).collect(Collectors.toCollection((b.getExtendConstraint() != null)
|
||||
? () -> new Constraint<UnifyPair>(
|
||||
b.getExtendConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(Constraint::new)))
|
||||
: () -> new Constraint<UnifyPair>()
|
||||
));
|
||||
applied = true;
|
||||
}
|
||||
result.add(pair);
|
||||
}
|
||||
|
||||
return applied ? Optional.of(new HashSet<>(result)) : Optional.empty();
|
||||
}
|
||||
|
||||
}
|
11
src/main/java/de/dhbwstuttgart/unify2/UnifyResult.java
Normal file
11
src/main/java/de/dhbwstuttgart/unify2/UnifyResult.java
Normal file
@ -0,0 +1,11 @@
|
||||
package de.dhbwstuttgart.unify2;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
public class UnifyResult {
|
||||
public UnifyResult(Set<UnifyPair> toSubst) {
|
||||
|
||||
}
|
||||
}
|
4
src/test/java/unify/RuleSetTest.java
Normal file
4
src/test/java/unify/RuleSetTest.java
Normal file
@ -0,0 +1,4 @@
|
||||
package unify;
|
||||
|
||||
public class RuleSetTest {
|
||||
}
|
47
src/test/java/unify/TypeUnifyTest.java
Normal file
47
src/test/java/unify/TypeUnifyTest.java
Normal file
@ -0,0 +1,47 @@
|
||||
package unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.unify2.TypeUnify;
|
||||
import de.dhbwstuttgart.unify2.model.UnifyConstraintSet;
|
||||
import de.dhbwstuttgart.unify2.model.UnifyOderConstraint;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
public class TypeUnifyTest {
|
||||
@Test
|
||||
public void emptyInput(){
|
||||
UnifyConstraintSet empty = new UnifyConstraintSet(new HashSet<>());
|
||||
TypeUnify.unifyOrConstraints(empty, emptyFC());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleOrConstraintOnlyTPHs(){
|
||||
Set<UnifyPair> pairs = Set.of(new UnifyPair(PlaceholderType.freshPlaceholder(), PlaceholderType.freshPlaceholder(), PairOperator.EQUALSDOT));
|
||||
UnifyOderConstraint orConstraint = new UnifyOderConstraint(Set.of(pairs));
|
||||
UnifyConstraintSet input = new UnifyConstraintSet(Set.of(orConstraint));
|
||||
Optional<Set<UnifyPair>> res = TypeUnify.unifyOrConstraints(input, emptyFC());
|
||||
assert res.isPresent();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void unifyTest1(){
|
||||
PlaceholderType tph1 = PlaceholderType.freshPlaceholder();
|
||||
PlaceholderType tph2 = PlaceholderType.freshPlaceholder();
|
||||
UnifyPair p1 = new UnifyPair(tph1, tph2, PairOperator.SMALLERDOT);
|
||||
Set<UnifyPair> pairs = Set.of(p1);
|
||||
UnifyOderConstraint orConstraint = new UnifyOderConstraint(Set.of(pairs));
|
||||
UnifyConstraintSet input = new UnifyConstraintSet(Set.of(orConstraint));
|
||||
Optional<Set<UnifyPair>> res = TypeUnify.unifyOrConstraints(input, emptyFC());
|
||||
assert res.isPresent();
|
||||
}
|
||||
|
||||
private FiniteClosure emptyFC(){
|
||||
return new FiniteClosure(new HashSet<>(), null);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user