comments & refactoring
This commit is contained in:
parent
aa692c2f25
commit
044e6fbc3f
@ -7,10 +7,16 @@ import com.google.common.collect.Sets;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
|
||||
|
||||
/**
|
||||
* Implements set operations using google guava.
|
||||
* @author DH10STF
|
||||
*
|
||||
*/
|
||||
public class GuavaSetOperations implements ISetOperations {
|
||||
|
||||
@Override
|
||||
public <B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets) {
|
||||
// Wraps the call to google guava
|
||||
return Sets.cartesianProduct(sets);
|
||||
}
|
||||
|
||||
|
@ -23,69 +23,77 @@ public class MartelliMontanariUnify implements IUnify {
|
||||
|
||||
@Override
|
||||
public Optional<Unifier> unify(Set<UnifyType> terms) {
|
||||
// Sets with less than 2 terms are trivially unified
|
||||
if(terms.size() < 2)
|
||||
return Optional.of(Unifier.Identity());
|
||||
|
||||
ArrayList<UnifyPair> termsQ = new ArrayList<UnifyPair>();
|
||||
// For the the set of terms {t1,...,tn},
|
||||
// build a list of equations {(t1 = t2), (t2 = t3), (t3 = t4), ....}
|
||||
ArrayList<UnifyPair> termsList = new ArrayList<UnifyPair>();
|
||||
Iterator<UnifyType> iter = terms.iterator();
|
||||
UnifyType prev = iter.next();
|
||||
while(iter.hasNext()) {
|
||||
UnifyType next = iter.next();
|
||||
termsQ.add(new UnifyPair(prev, next, PairOperator.EQUALSDOT));
|
||||
termsList.add(new UnifyPair(prev, next, PairOperator.EQUALSDOT));
|
||||
prev = next;
|
||||
}
|
||||
|
||||
// Start with the identity unifier. Substitutions will be added later.
|
||||
Unifier mgu = Unifier.Identity();
|
||||
|
||||
// Apply rules while possible
|
||||
int idx = 0;
|
||||
while(idx < termsQ.size()) {
|
||||
UnifyPair pair = termsQ.get(idx);
|
||||
while(idx < termsList.size()) {
|
||||
UnifyPair pair = termsList.get(idx);
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
TypeParams rhsTypeParams = rhsType.getTypeParams();
|
||||
TypeParams lhsTypeParams = lhsType.getTypeParams();
|
||||
|
||||
// DELETE
|
||||
// DELETE - Rule
|
||||
if(pair.getRhsType().equals(pair.getLhsType())) {
|
||||
termsQ.remove(idx);
|
||||
termsList.remove(idx);
|
||||
continue;
|
||||
}
|
||||
|
||||
// REDUCE
|
||||
// REDUCE - Rule
|
||||
if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)
|
||||
&& (rhsTypeParams.size() != 0 || lhsTypeParams.size() != 0)) {
|
||||
Set<UnifyPair> result = new HashSet<>();
|
||||
|
||||
// f<...> = g<...> with f != g are not unifiable
|
||||
if(!rhsType.getName().equals(lhsType.getName()))
|
||||
return Optional.empty(); // conflict
|
||||
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
|
||||
if(rhsTypeParams.size() != lhsTypeParams.size())
|
||||
return Optional.empty(); // conflict
|
||||
|
||||
// Unpack the arguments
|
||||
for(int i = 0; i < rhsTypeParams.size(); i++)
|
||||
result.add(new UnifyPair(rhsTypeParams.get(i), lhsTypeParams.get(i), PairOperator.EQUALSDOT));
|
||||
|
||||
termsQ.addAll(result);
|
||||
idx = idx+1 == termsQ.size() ? 0 : idx+1;
|
||||
termsList.remove(idx);
|
||||
termsList.addAll(result);
|
||||
continue;
|
||||
}
|
||||
|
||||
// SWAP
|
||||
// SWAP - Rule
|
||||
if(!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
|
||||
termsQ.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT));
|
||||
idx = idx+1 == termsQ.size() ? 0 : idx+1;
|
||||
termsList.remove(idx);
|
||||
termsList.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Occurs-Check
|
||||
// OCCURS-CHECK
|
||||
if(pair.getLhsType() instanceof PlaceholderType
|
||||
&& pair.getRhsType().getTypeParams().occurs((PlaceholderType) pair.getLhsType()))
|
||||
return Optional.empty();
|
||||
|
||||
// SUBST
|
||||
// SUBST - Rule
|
||||
if(lhsType instanceof PlaceholderType) {
|
||||
mgu.Add((PlaceholderType) lhsType, rhsType);
|
||||
termsQ = termsQ.stream().map(mgu::apply).collect(Collectors.toCollection(ArrayList::new));
|
||||
idx = idx+1 == termsQ.size() ? 0 : idx+1;
|
||||
termsList = termsList.stream().map(mgu::apply).collect(Collectors.toCollection(ArrayList::new));
|
||||
idx = idx+1 == termsList.size() ? 0 : idx+1;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -25,16 +25,26 @@ import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
|
||||
/**
|
||||
* Implementation of the type inference rules.
|
||||
* @author Florian Steurer
|
||||
*
|
||||
*/
|
||||
public class RuleSet implements IRuleSet{
|
||||
|
||||
protected IFiniteClosure finiteClosure;
|
||||
|
||||
/**
|
||||
* Creates a new instance that uses the specified FC for greater, grArg, etc.
|
||||
* @param fc The FC that is used for greater, grArg, etc.
|
||||
*/
|
||||
public RuleSet(IFiniteClosure fc) {
|
||||
finiteClosure = fc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<UnifyPair> reduceUp(UnifyPair pair) {
|
||||
// Check if reduce up is applicable
|
||||
if(pair.getPairOp() != PairOperator.SMALLERDOT)
|
||||
return Optional.empty();
|
||||
|
||||
@ -46,11 +56,13 @@ public class RuleSet implements IRuleSet{
|
||||
if(!(lhsType instanceof ReferenceType) && !(lhsType instanceof PlaceholderType))
|
||||
return Optional.empty();
|
||||
|
||||
// Rule is applicable, unpack the SuperType
|
||||
return Optional.of(new UnifyPair(lhsType, ((SuperType) rhsType).getSuperedType(), PairOperator.SMALLERDOT));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<UnifyPair> reduceLow(UnifyPair pair) {
|
||||
// Check if rule is applicable
|
||||
if(pair.getPairOp() != PairOperator.SMALLERDOT)
|
||||
return Optional.empty();
|
||||
|
||||
@ -62,11 +74,13 @@ public class RuleSet implements IRuleSet{
|
||||
if(!(rhsType instanceof ReferenceType) && !(rhsType instanceof PlaceholderType))
|
||||
return Optional.empty();
|
||||
|
||||
// Rule is applicable, unpack the ExtendsType
|
||||
return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(), rhsType, PairOperator.SMALLERDOT));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<UnifyPair> reduceUpLow(UnifyPair pair) {
|
||||
// Check if rule is applicable
|
||||
if(pair.getPairOp() != PairOperator.SMALLERDOT)
|
||||
return Optional.empty();
|
||||
|
||||
@ -78,6 +92,7 @@ public class RuleSet implements IRuleSet{
|
||||
if(!(rhsType instanceof SuperType))
|
||||
return Optional.empty();
|
||||
|
||||
// Rule is applicable, unpack both sides
|
||||
return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(),((SuperType) rhsType).getSuperedType(), PairOperator.SMALLERDOT));
|
||||
}
|
||||
|
||||
|
@ -32,6 +32,8 @@ import de.dhbwstuttgart.typeinference.unify.model.Unifier;
|
||||
*/
|
||||
public class Unify {
|
||||
|
||||
protected ISetOperations setOps = new GuavaSetOperations();
|
||||
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc) {
|
||||
/*
|
||||
* Step 1: Repeated application of reduce, adapt, erase, swap
|
||||
@ -89,8 +91,6 @@ public class Unify {
|
||||
/* Up to here, no cartesian products are calculated.
|
||||
* filters for pairs and sets can be applied here */
|
||||
|
||||
ISetOperations setOps = new GuavaSetOperations();
|
||||
|
||||
// Sub cartesian products of the second level (pattern matched) sets
|
||||
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
|
||||
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
|
||||
|
@ -18,7 +18,7 @@ public class FiniteClosure implements IFiniteClosure {
|
||||
private HashMap<UnifyType, Node<UnifyType>> inheritanceGraph;
|
||||
private HashMap<String, HashSet<Node<UnifyType>>> strInheritanceGraph;
|
||||
private Set<UnifyPair> pairs;
|
||||
private Set<UnifyType> basicTypes;
|
||||
//private Set<UnifyType> basicTypes;
|
||||
//TODO im konstruktor mitgeben um typenabzuhandeln die keine extends beziehung haben. (Damit die FC diese Typen auch kennt)
|
||||
//(ALternative: immer die extends zu object beziehung einfügen)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user