Merge branch 'unify' into refactoring

This commit is contained in:
JanUlrich 2016-04-14 14:03:15 +02:00
commit 3cd7dba316
28 changed files with 275 additions and 170 deletions

View File

@ -49,7 +49,7 @@ import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.typedeployment.TypeInsertPoint;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import org.apache.commons.bcel6.generic.*;
import org.apache.commons.bcel6.classfile.*;

View File

@ -53,7 +53,7 @@ import de.dhbwstuttgart.typeinference.assumptions.ParameterAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
@ -263,7 +263,7 @@ public class SourceFile
//UnmÃgliche ConstraintsSets aussortieren durch Unifizierung
Function<Menge<UnifyPair>,Menge<Menge<UnifyPair>>> unifier = (pairs)->{
Menge<Menge<UnifyPair>> retValue = new Menge<>();
Set<Set<UnifyPair>> unifiedPairs = new Unify().unify(pairs, finiteClosure);
Set<Set<UnifyPair>> unifiedPairs = new TypeUnify().unify(pairs, finiteClosure);
return retValue;};
//oderConstraints.filterWrongConstraints(unifier);
@ -360,7 +360,7 @@ public class SourceFile
});
*/
typinferenzLog.debug("\nUnifiziere Constraints:\n"+constraints, Section.TYPEINFERENCE);
Set<Set<UnifyPair>> unifyResult = new Unify().unify(constraints, finiteClosure);
Set<Set<UnifyPair>> unifyResult = new TypeUnify().unify(constraints, finiteClosure);
Menge<Menge<Pair>> convertedResult = unifyResult.parallelStream().<Menge<Pair>>map((Set<UnifyPair> resultSet)->{
Menge<Pair> innerConvert = resultSet.stream().map((UnifyPair mp)->UnifyTypeFactory.convert(mp))

View File

@ -17,7 +17,7 @@ import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -13,7 +13,7 @@ import de.dhbwstuttgart.syntaxtree.statement.Expr;
import de.dhbwstuttgart.syntaxtree.statement.Null;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -20,7 +20,7 @@ import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -15,7 +15,7 @@ import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
// ino.class.MulOp.24231.declaration
public abstract class MulOp extends Operator

View File

@ -12,7 +12,7 @@ import de.dhbwstuttgart.syntaxtree.statement.Expr;
import de.dhbwstuttgart.syntaxtree.statement.Null;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
// ino.class.NotEqualOp.24241.declaration
public class NotEqualOp extends RelOp

View File

@ -22,7 +22,7 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
public abstract class Operator extends SyntaxTreeNode

View File

@ -16,7 +16,7 @@ import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
// ino.class.RelOp.24299.declaration
public abstract class RelOp extends Operator
// ino.end

View File

@ -36,7 +36,7 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -12,7 +12,7 @@ import de.dhbwstuttgart.core.MyCompiler;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
// ino.class.ExprStmt.25265.declaration
public abstract class ExprStmt extends Statement

View File

@ -36,7 +36,7 @@ import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -47,7 +47,7 @@ import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -31,7 +31,7 @@ import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.FieldAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -28,7 +28,7 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -28,7 +28,7 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -28,7 +28,7 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -33,7 +33,7 @@ import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -28,7 +28,7 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -28,7 +28,7 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -32,7 +32,7 @@ import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;

View File

@ -1,6 +1,6 @@
package de.dhbwstuttgart.typeinference;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;

View File

@ -57,8 +57,7 @@ public class MartelliMontanariUnify implements IUnify {
}
// REDUCE - Rule
if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)
&& (rhsTypeParams.size() != 0 || lhsTypeParams.size() != 0)) {
if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
Set<UnifyPair> result = new HashSet<>();
// f<...> = g<...> with f != g are not unifiable
@ -67,7 +66,10 @@ public class MartelliMontanariUnify implements IUnify {
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
if(rhsTypeParams.size() != lhsTypeParams.size())
return Optional.empty(); // conflict
// f = g is not unifiable (cannot be f = f because erase rule would have been applied)
//if(rhsTypeParams.size() == 0)
//return Optional.empty();
// Unpack the arguments
for(int i = 0; i < rhsTypeParams.size(); i++)
result.add(new UnifyPair(rhsTypeParams.get(i), lhsTypeParams.get(i), PairOperator.EQUALSDOT));

View File

@ -30,17 +30,7 @@ import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
* @author Florian Steurer
*
*/
public class RuleSet implements IRuleSet{
protected IFiniteClosure finiteClosure;
/**
* Creates a new instance that uses the specified FC for greater, grArg, etc.
* @param fc The FC that is used for greater, grArg, etc.
*/
public RuleSet(IFiniteClosure fc) {
finiteClosure = fc;
}
public class RuleSet implements IRuleSet{
@Override
public Optional<UnifyPair> reduceUp(UnifyPair pair) {
@ -97,7 +87,7 @@ public class RuleSet implements IRuleSet{
}
@Override
public Optional<Set<UnifyPair>> reduceExt(UnifyPair pair) {
public Optional<Set<UnifyPair>> reduceExt(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
@ -119,7 +109,7 @@ public class RuleSet implements IRuleSet{
if(x.getTypeParams().empty() || extY.getTypeParams().size() != x.getTypeParams().size())
return Optional.empty();
UnifyType xFromFc = finiteClosure.getLeftHandedType(sTypeX.getName()).orElse(null);
UnifyType xFromFc = fc.getLeftHandedType(sTypeX.getName()).orElse(null);
if(xFromFc == null || !xFromFc.getTypeParams().arePlaceholders())
return Optional.empty();
@ -127,7 +117,7 @@ public class RuleSet implements IRuleSet{
if(x instanceof ExtendsType)
xFromFc = new ExtendsType(xFromFc);
UnifyType extYFromFc = finiteClosure.grArg(xFromFc).stream().filter(t -> t.getName().equals(extY.getName())).filter(t -> t.getTypeParams().arePlaceholders()).findAny().orElse(null);
UnifyType extYFromFc = fc.grArg(xFromFc).stream().filter(t -> t.getName().equals(extY.getName())).filter(t -> t.getTypeParams().arePlaceholders()).findAny().orElse(null);
if(extYFromFc == null || extYFromFc.getTypeParams() != xFromFc.getTypeParams())
return Optional.empty();
@ -149,7 +139,7 @@ public class RuleSet implements IRuleSet{
}
@Override
public Optional<Set<UnifyPair>> reduceSup(UnifyPair pair) {
public Optional<Set<UnifyPair>> reduceSup(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
@ -171,7 +161,7 @@ public class RuleSet implements IRuleSet{
if(x.getTypeParams().empty() || supY.getTypeParams().size() != x.getTypeParams().size())
return Optional.empty();
UnifyType xFromFc = finiteClosure.getLeftHandedType(sTypeX.getName()).orElse(null);
UnifyType xFromFc = fc.getLeftHandedType(sTypeX.getName()).orElse(null);
if(xFromFc == null || !xFromFc.getTypeParams().arePlaceholders())
return Optional.empty();
@ -179,7 +169,7 @@ public class RuleSet implements IRuleSet{
if(x instanceof SuperType)
xFromFc = new SuperType(xFromFc);
UnifyType supYFromFc = finiteClosure.grArg(xFromFc).stream().filter(t -> t.getName().equals(supY.getName())).filter(t -> t.getTypeParams().arePlaceholders()).findAny().orElse(null);
UnifyType supYFromFc = fc.grArg(xFromFc).stream().filter(t -> t.getName().equals(supY.getName())).filter(t -> t.getTypeParams().arePlaceholders()).findAny().orElse(null);
if(supYFromFc == null || supYFromFc.getTypeParams() != xFromFc.getTypeParams())
return Optional.empty();
@ -231,7 +221,7 @@ public class RuleSet implements IRuleSet{
}
@Override
public Optional<Set<UnifyPair>> reduce1(UnifyPair pair) {
public Optional<Set<UnifyPair>> reduce1(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
@ -249,12 +239,12 @@ public class RuleSet implements IRuleSet{
if(lhsSType.getTypeParams().empty() || lhsSType.getTypeParams().size() != rhsSType.getTypeParams().size())
return Optional.empty();
UnifyType cFromFc = finiteClosure.getLeftHandedType(c.getName()).orElse(null);
UnifyType cFromFc = fc.getLeftHandedType(c.getName()).orElse(null);
if(cFromFc == null || !cFromFc.getTypeParams().arePlaceholders())
return Optional.empty();
UnifyType dFromFc = finiteClosure.getAncestors(cFromFc).stream().filter(x -> x.getName().equals(d.getName())).findAny().orElse(null);
UnifyType dFromFc = fc.getAncestors(cFromFc).stream().filter(x -> x.getName().equals(d.getName())).findAny().orElse(null);
if(dFromFc == null || !dFromFc.getTypeParams().arePlaceholders() || dFromFc.getTypeParams().size() != cFromFc.getTypeParams().size())
return Optional.empty();
@ -330,7 +320,7 @@ public class RuleSet implements IRuleSet{
}
@Override
public boolean erase1(UnifyPair pair) {
public boolean erase1(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return false;
@ -342,18 +332,18 @@ public class RuleSet implements IRuleSet{
if(!(rhsType instanceof ReferenceType) && !(rhsType instanceof PlaceholderType))
return false;
return finiteClosure.greater(lhsType).contains(rhsType);
return fc.greater(lhsType).contains(rhsType);
}
@Override
public boolean erase2(UnifyPair pair) {
public boolean erase2(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return false;
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
return finiteClosure.grArg(lhsType).contains(rhsType);
return fc.grArg(lhsType).contains(rhsType);
}
@Override
@ -379,7 +369,7 @@ public class RuleSet implements IRuleSet{
}
@Override
public Optional<UnifyPair> adapt(UnifyPair pair) {
public Optional<UnifyPair> adapt(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
@ -398,7 +388,7 @@ public class RuleSet implements IRuleSet{
return Optional.empty();
Optional<UnifyType> opt = finiteClosure.getLeftHandedType(typeD.getName());
Optional<UnifyType> opt = fc.getLeftHandedType(typeD.getName());
if(!opt.isPresent())
return Optional.empty();
@ -406,7 +396,7 @@ public class RuleSet implements IRuleSet{
UnifyType typeDgen = opt.get();
// Actually greater+ because the types are ensured to have different names
Set<UnifyType> greater = finiteClosure.getAncestors(typeDgen);
Set<UnifyType> greater = fc.getAncestors(typeDgen);
opt = greater.stream().filter(x -> x.getName().equals(typeDs.getName())).findAny();
if(!opt.isPresent())
@ -425,7 +415,7 @@ public class RuleSet implements IRuleSet{
}
@Override
public Optional<UnifyPair> adaptExt(UnifyPair pair) {
public Optional<UnifyPair> adaptExt(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
@ -442,16 +432,16 @@ public class RuleSet implements IRuleSet{
UnifyType typeDgen;
if(typeD instanceof ReferenceType)
typeDgen = finiteClosure.getLeftHandedType(typeD.getName()).orElse(null);
typeDgen = fc.getLeftHandedType(typeD.getName()).orElse(null);
else {
Optional<UnifyType> opt = finiteClosure.getLeftHandedType(((ExtendsType) typeD).getExtendedType().getName());
Optional<UnifyType> opt = fc.getLeftHandedType(((ExtendsType) typeD).getExtendedType().getName());
typeDgen = opt.isPresent() ? new ExtendsType(opt.get()) : null;
}
if(typeDgen == null)
return Optional.empty();
Set<UnifyType> grArg = finiteClosure.grArg(typeDgen);
Set<UnifyType> grArg = fc.grArg(typeDgen);
Optional<UnifyType> opt = grArg.stream().filter(x -> x.getName().equals(typeExtDs.getName())).findAny();
@ -471,7 +461,7 @@ public class RuleSet implements IRuleSet{
}
@Override
public Optional<UnifyPair> adaptSup(UnifyPair pair) {
public Optional<UnifyPair> adaptSup(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
@ -487,7 +477,7 @@ public class RuleSet implements IRuleSet{
return Optional.empty();
Optional<UnifyType> opt = finiteClosure.getLeftHandedType(((SuperType) typeSupD).getSuperedType().getName());
Optional<UnifyType> opt = fc.getLeftHandedType(((SuperType) typeSupD).getSuperedType().getName());
if(!opt.isPresent())
return Optional.empty();
@ -497,7 +487,7 @@ public class RuleSet implements IRuleSet{
// Use of smArg instead of grArg because
// a in grArg(b) => b in smArg(a)
Set<UnifyType> smArg = finiteClosure.smArg(typeSupDgen);
Set<UnifyType> smArg = fc.smArg(typeSupDgen);
opt = smArg.stream().filter(x -> x.getName().equals(typeDs.getName())).findAny();
if(!opt.isPresent())

View File

@ -30,21 +30,38 @@ import de.dhbwstuttgart.typeinference.unify.model.Unifier;
* Implementation of the type unification algorithm
* @author Florian Steurer
*/
public class Unify {
public class TypeUnify {
/**
* The implementation of setOps that will be used during the unification
*/
protected ISetOperations setOps = new GuavaSetOperations();
/**
* The implementation of the standard unify that will be used during the unification
*/
protected IUnify stdUnify = new MartelliMontanariUnify();
/**
* The implementation of the rules that will be used during the unification.
*/
protected IRuleSet rules = new RuleSet();
/**
* Computes all principal type unifiers for a set of constraints.
* @param eq The set of constraints
* @param fc The finite closure
* @return The set of all principal type unifiers
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
Set<UnifyPair> eq0 = applyTypeUnificationRules(eq, fc);
/*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
*/
Set<UnifyPair> eq1s = new HashSet<>();
Set<UnifyPair> eq2s = new HashSet<>();
splitEq(eq0, eq1s, eq2s);
@ -60,7 +77,7 @@ public class Unify {
// cartesian product of the sets created by pattern matching.
List<Set<Set<UnifyPair>>> topLevelSets = new ArrayList<>();
if(eq1s.size() != 0) {
if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(eq1s);
topLevelSets.add(wrap); // Add Eq1'
@ -71,7 +88,7 @@ public class Unify {
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
.collect(Collectors.toSet());
if(bufferSet.size() != 0) {
if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(bufferSet);
topLevelSets.add(wrap);
@ -92,10 +109,13 @@ public class Unify {
* filters for pairs and sets can be applied here */
// Sub cartesian products of the second level (pattern matched) sets
// "the big (x)"
// TODO Optimierungsmöglichkeit: Parallelisierung der Schleife möglich
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
// Flatten and add to top level sets
Set<Set<UnifyPair>> flat = new HashSet<>();
for(List<Set<UnifyPair>> s : cartResult) {
Set<UnifyPair> flat1 = new HashSet<>();
@ -112,10 +132,8 @@ public class Unify {
.collect(Collectors.toCollection(HashSet::new));
//System.out.println(result);
/*
* Step 5: Substitution
*/
// Flatten the cartesian product
// TODO parallelisierung möglich
Set<Set<UnifyPair>> eqPrimeSetFlat = new HashSet<>();
for(Set<Set<UnifyPair>> setToFlatten : eqPrimeSet) {
Set<UnifyPair> buffer = new HashSet<>();
@ -123,7 +141,10 @@ public class Unify {
eqPrimeSetFlat.add(buffer);
}
IRuleSet rules = new RuleSet(fc);
/*
* Step 5: Substitution
*/
Set<Set<UnifyPair>> restartSet = new HashSet<>();
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
@ -150,7 +171,7 @@ public class Unify {
* Step 6 a) Restart for pairs where subst was applied
* b) Build the union over everything
*/
// TODO parallelisierung möglich (lohnt sich vermutlich)
for(Set<UnifyPair> eqss : restartSet)
eqPrimePrimeSet.addAll(this.unify(eqss, fc));
@ -161,6 +182,11 @@ public class Unify {
}
/**
* Checks whether a set of pairs is in solved form.
* @param eqPrimePrime The set of pair
* @return True if in solved form, false otherwise.
*/
protected boolean isSolvedForm(Set<UnifyPair> eqPrimePrime) {
for(UnifyPair pair : eqPrimePrime) {
UnifyType lhsType = pair.getLhsType();
@ -169,13 +195,18 @@ public class Unify {
if(!(lhsType instanceof PlaceholderType))
return false;
// If operator is not equals, both sides must be placeholders
if(pair.getPairOp() != PairOperator.EQUALSDOT && !(rhsType instanceof PlaceholderType))
return false;
}
return true;
}
/**
* Repeatedly applies type unification rules to a set of equations.
* This is step one of the unification algorithm.
* @return The set of pairs that results from repeated application of the inference rules.
*/
protected Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
@ -193,12 +224,11 @@ public class Unify {
LinkedHashSet<UnifyPair> targetSet = new LinkedHashSet<UnifyPair>();
LinkedList<UnifyPair> eqQueue = new LinkedList<>();
IRuleSet rules = new RuleSet(fc);
/*
* Swap all pairs and erase all erasable pairs
*/
eq.forEach(x -> swapAddOrErase(x, rules, eqQueue));
eq.forEach(x -> swapAddOrErase(x, fc, eqQueue));
/*
* Apply rules until the queue is empty
@ -220,31 +250,31 @@ public class Unify {
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), rules, eqQueue);
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
Optional<Set<UnifyPair>> optSet = rules.reduce1(pair);
Optional<Set<UnifyPair>> optSet = rules.reduce1(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduce2(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceEq(pair);
// One of the rules has been applied
if(optSet.isPresent()) {
optSet.get().forEach(x -> swapAddOrErase(x, rules, eqQueue));
optSet.get().forEach(x -> swapAddOrErase(x, fc, eqQueue));
continue;
}
// Adapt, AdaptExt, AdaptSup
opt = rules.adapt(pair);
opt = opt.isPresent() ? opt : rules.adaptExt(pair);
opt = opt.isPresent() ? opt : rules.adaptSup(pair);
opt = rules.adapt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptExt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptSup(pair, fc);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), rules, eqQueue);
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
@ -255,16 +285,29 @@ public class Unify {
return targetSet;
}
protected void swapAddOrErase(UnifyPair pair, IRuleSet rules, Collection<UnifyPair> collection) {
/**
* Applies the rule swap to a pair if possible. Then adds the pair to the set if no erase rule applies.
* If an erase rule applies, the pair is not added (erased).
* @param pair The pair to swap and add or erase.
* @param collection The collection to which the pairs are added.
*/
protected void swapAddOrErase(UnifyPair pair, IFiniteClosure fc, Collection<UnifyPair> collection) {
Optional<UnifyPair> opt = rules.swap(pair);
UnifyPair pair2 = opt.isPresent() ? opt.get() : pair;
if(rules.erase1(pair2) || rules.erase3(pair2) || rules.erase2(pair2))
if(rules.erase1(pair2, fc) || rules.erase3(pair2) || rules.erase2(pair2, fc))
return;
collection.add(pair2);
}
/**
* Splits the equation eq into a set eq1s where both terms are type variables,
* and a set eq2s where one of both terms is not a type variable.
* @param eq Set of pairs to be splitted.
* @param eq1s Subset of eq where both terms are type variables.
* @param eq2s eq/eq1s.
*/
protected void splitEq(Set<UnifyPair> eq, Set<UnifyPair> eq1s, Set<UnifyPair> eq2s) {
for(UnifyPair pair : eq)
if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
@ -273,16 +316,21 @@ public class Unify {
eq2s.add(pair);
}
/**
* Creates sets of pairs specified in the fourth step. Does not calculate cartesian products.
* @param undefined All pairs that did not match one of the 8 cases are added to this set.
* @return The set of the eight cases (without empty sets). Each case is a set, containing sets generated
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
* (as in case 1 where sigma is added to the innermost set).
*/
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, IFiniteClosure fc, Set<UnifyPair> undefined) {
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>();
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(8);
// Init all 8 cases
for(int i = 0; i < 8; i++)
result.add(new HashSet<>());
for(UnifyPair pair : eq2s) {
PairOperator pairOp = pair.getPairOp();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
@ -323,17 +371,19 @@ public class Unify {
// Pairs that do not have one of the aboves form are contradictory.
else
undefined.add(pair);
}
// Filter empty sets or sets that only contain an empty set.
return result.stream().map(x -> x.stream().filter(y -> y.size() > 0).collect(Collectors.toCollection(HashSet::new)))
.filter(x -> x.size() > 0).collect(Collectors.toCollection(HashSet::new));
}
/**
* Cartesian product Case 1: (a <. Theta')
*/
protected Set<Set<UnifyPair>> unifyCase1(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
IUnify unify = new MartelliMontanariUnify();
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());
cs.add(thetaPrime);
@ -375,7 +425,7 @@ public class Unify {
}
for(UnifyType tqp : thetaQPrimes) {
Optional<Unifier> opt = unify.unify(tqp, thetaPrime);
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
if (!opt.isPresent())
continue;
@ -411,6 +461,9 @@ public class Unify {
return result;
}
/**
* Cartesian Product Case 2: (a <.? ? ext Theta')
*/
protected Set<Set<UnifyPair>> unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
@ -483,6 +536,9 @@ public class Unify {
// return result;
}
/**
* Cartesian Product Case 3: (a <.? ? sup Theta')
*/
protected Set<Set<UnifyPair>> unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
@ -504,6 +560,9 @@ public class Unify {
return result;
}
/**
* Cartesian Product Case 4: (a <.? Theta')
*/
protected Set<Set<UnifyPair>> unifyCase4(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyPair> resultPrime = new HashSet<>();
@ -513,6 +572,9 @@ public class Unify {
return result;
}
/**
* Cartesian Product Case 5: (Theta <. a)
*/
protected Set<Set<UnifyPair>> unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
for(UnifyType thetaS : fc.greater(theta)) {
@ -531,6 +593,9 @@ public class Unify {
return result;
}
/**
* Cartesian Product Case 6: (? ext Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase6(ExtendsType extTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
//for(UnifyType thetaS : fc.smaller(extTheta.getExtendedType())) {
@ -546,6 +611,9 @@ public class Unify {
return result;
}
/**
* Cartesian Product Case 7: (? sup Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase7(SuperType supTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
@ -613,6 +681,9 @@ public class Unify {
// return result;
}
/**
* Cartesian Product Case 8: (Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
//for(UnifyType thetaS : fc.grArg(theta)) {
@ -635,12 +706,25 @@ public class Unify {
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
*/
protected Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
Set<TypeParams> result = new HashSet<>();
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
* @param idx Idx for the current permutatiton.
* @param result Set of all permutations found so far
* @param current The permutation of type params that is currently explored
*/
private void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
if(candidates.size() == idx) {
result.add(new TypeParams(Arrays.copyOf(current, current.length)));

View File

@ -14,10 +14,10 @@ public interface IRuleSet {
public Optional<UnifyPair> reduceUp(UnifyPair pair);
public Optional<UnifyPair> reduceLow(UnifyPair pair);
public Optional<UnifyPair> reduceUpLow(UnifyPair pair);
public Optional<Set<UnifyPair>> reduceExt(UnifyPair pair);
public Optional<Set<UnifyPair>> reduceSup(UnifyPair pair);
public Optional<Set<UnifyPair>> reduceExt(UnifyPair pair, IFiniteClosure fc);
public Optional<Set<UnifyPair>> reduceSup(UnifyPair pair, IFiniteClosure fc);
public Optional<Set<UnifyPair>> reduceEq(UnifyPair pair);
public Optional<Set<UnifyPair>> reduce1(UnifyPair pair);
public Optional<Set<UnifyPair>> reduce1(UnifyPair pair, IFiniteClosure fc);
public Optional<Set<UnifyPair>> reduce2(UnifyPair pair);
/*
@ -42,13 +42,13 @@ public interface IRuleSet {
* Checks whether the erase1-Rule applies to the pair.
* @return True if the pair is erasable, false otherwise.
*/
public boolean erase1(UnifyPair pair);
public boolean erase1(UnifyPair pair, IFiniteClosure fc);
/**
* Checks whether the erase2-Rule applies to the pair.
* @return True if the pair is erasable, false otherwise.
*/
public boolean erase2(UnifyPair pair);
public boolean erase2(UnifyPair pair, IFiniteClosure fc);
/**
* Checks whether the erase3-Rule applies to the pair.
@ -58,9 +58,9 @@ public interface IRuleSet {
public Optional<UnifyPair> swap(UnifyPair pair);
public Optional<UnifyPair> adapt(UnifyPair pair);
public Optional<UnifyPair> adaptExt(UnifyPair pair);
public Optional<UnifyPair> adaptSup(UnifyPair pair);
public Optional<UnifyPair> adapt(UnifyPair pair, IFiniteClosure fc);
public Optional<UnifyPair> adaptExt(UnifyPair pair, IFiniteClosure fc);
public Optional<UnifyPair> adaptSup(UnifyPair pair, IFiniteClosure fc);
/**
* Applies the subst-Rule to a set of pairs (usually Eq').

View File

@ -8,20 +8,35 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.unify.MartelliMontanariUnify;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
/**
* The finite closure for the type unification
* @author Florian Steurer
*/
public class FiniteClosure implements IFiniteClosure {
private HashMap<UnifyType, Node<UnifyType>> inheritanceGraph;
private HashMap<String, HashSet<Node<UnifyType>>> strInheritanceGraph;
private Set<UnifyPair> pairs;
//private Set<UnifyType> basicTypes;
//TODO im konstruktor mitgeben um typenabzuhandeln die keine extends beziehung haben. (Damit die FC diese Typen auch kennt)
//(ALternative: immer die extends zu object beziehung einfügen)
/**
* A map that maps every type to the node in the inheritance graph that contains that type.
*/
private HashMap<UnifyType, Node<UnifyType>> inheritanceGraph;
/**
* A map that maps every typename to the nodes of the inheritance graph that contain a type with that name.
*/
private HashMap<String, HashSet<Node<UnifyType>>> strInheritanceGraph;
/**
* The initial pairs of that define the inheritance tree
*/
private Set<UnifyPair> pairs;
//TODO Prüfen: Typen ohne Kante im Graph als extra Menge im Konstruktor mitgeben?
/**
* Creates a new instance using the inheritance tree defined in the pairs.
*/
public FiniteClosure(Set<UnifyPair> pairs) {
this.pairs = new HashSet<>(pairs);
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
@ -49,7 +64,6 @@ public class FiniteClosure implements IFiniteClosure {
}
// Build the alternative representation with strings as keys
strInheritanceGraph = new HashMap<>();
for(UnifyType key : inheritanceGraph.keySet()) {
if(!strInheritanceGraph.containsKey(key.getName()))
@ -71,7 +85,12 @@ public class FiniteClosure implements IFiniteClosure {
return computeSmaller(type);
}
/**
* Computes the smaller functions for every type except FunNTypes.
*/
private Set<UnifyType> computeSmaller(UnifyType type) {
// Base Case: The type is in the inheritance tree. Add all children.
// This is Case 1 in the definition of the subtyping relation.
if(inheritanceGraph.containsKey(type)) {
Set<UnifyType> result = new HashSet<>();
result.add(type);
@ -85,22 +104,22 @@ public class FiniteClosure implements IFiniteClosure {
// if T = T' then T <=* T'
result1.add(type);
// Permute all params with values that are in smArg() of that type.
// This corresponds to Case 3 in the definition of the subtyping relation.
{ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
for (UnifyType param : type.getTypeParams())
paramCandidates.add(smArg(param));
Set<TypeParams> permResult = permuteParams(paramCandidates);
for (TypeParams newParams : permResult)
result1.add(type.setTypeParams(newParams));}
permuteParams(paramCandidates).forEach(x -> result1.add(type.setTypeParams(x)));}
// This is case 2 of the definition of the subtyping relation.
Set<UnifyType> result2 = new HashSet<>();
if (strInheritanceGraph.containsKey(type.getName())) {
HashSet<UnifyType> candidates = new HashSet<>();
// All types with the same name
strInheritanceGraph.get(type.getName()).forEach(x -> candidates.add(x.getContent()));
for(UnifyType typePrime : result1) {
for (UnifyType theta2 : candidates) {
// Find the substitution
Optional<Unifier> sigma2Opt = unify.unify(typePrime, theta2);
if (!sigma2Opt.isPresent())
continue;
@ -120,6 +139,8 @@ public class FiniteClosure implements IFiniteClosure {
else
result2 = result1;
// Permute the params again.
// This corresponds again to Case 3 of the definition of the subtyping relation.
Set<UnifyType> result3 = new HashSet<>();
for(UnifyType t : result2) {
ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
@ -141,22 +162,23 @@ public class FiniteClosure implements IFiniteClosure {
return result3;
}
/**
* Computes the smaller-Function for FunNTypes.
*/
private Set<UnifyType> computeSmallerFunN(FunNType type) {
Set<UnifyType> result = new HashSet<>();
// if T = T' then T <=* T'
result.add(type);
// Because real function types are implicitly variant
// it is enough to permute the params with the values of greater / smaller.
ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
paramCandidates.add(smaller(type.getTypeParams().get(0)));
for (int i = 1; i < type.getTypeParams().size(); i++)
paramCandidates.add(greater(type.getTypeParams().get(i)));
paramCandidates.add(greater(type.getTypeParams().get(i)));
Set<TypeParams> permResult = permuteParams(paramCandidates);
for (TypeParams newParams : permResult)
result.add(type.setTypeParams(newParams));
permuteParams(paramCandidates).forEach(x -> result.add(type.setTypeParams(x)));
return result;
}
@ -170,40 +192,44 @@ public class FiniteClosure implements IFiniteClosure {
return computeGreaterFunN((FunNType) type);
return computeGreater(type);
}
/**
* Computes the greater function for all types except function types.
*/
protected Set<UnifyType> computeGreater(UnifyType type) {
IUnify unify = new MartelliMontanariUnify();
Set<UnifyType> result1 = new HashSet<>();
// The type is in the inheritance tree. Add all children.
// This is Case 1 in the definition of the subtyping relation.
if(inheritanceGraph.containsKey(type))
result1.addAll(inheritanceGraph.get(type).getContentOfPredecessors());
// if T = T' then T <=* T'
result1.add(type);
// Permute all params with values that are in smArg() of that type.
// This corresponds to Case 3 in the definition of the subtyping relation.
{ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
for (UnifyType param : type.getTypeParams())
paramCandidates.add(grArg(param));
Set<TypeParams> permResult = new HashSet<>();
permuteParams(paramCandidates, 0, permResult, new UnifyType[paramCandidates.size()]);
for (TypeParams newParams : permResult)
result1.add(type.setTypeParams(newParams));}
permuteParams(paramCandidates).forEach(x -> result1.add(type.setTypeParams(x)));}
// This is case 2 of the definition of the subtyping relation.
Set<UnifyType> result2 = new HashSet<>();
if (strInheritanceGraph.containsKey(type.getName()) && !inheritanceGraph.containsKey(type)) {
HashSet<UnifyType> candidates = new HashSet<>();
// All types with the same name
strInheritanceGraph.get(type.getName()).forEach(x -> candidates.add(x.getContent()));
for(UnifyType typePrime : result1) {
for (UnifyType theta2 : candidates) {
// Find the substitution
Optional<Unifier> sigma2Opt = unify.unify(typePrime, theta2);
if (!sigma2Opt.isPresent())
continue;
if(type.equals(theta2))
continue;
Unifier sigma2 = sigma2Opt.get();
sigma2.swapPlaceholderSubstitutions(typePrime.getTypeParams());
Set<UnifyType> theta1s = greater(theta2);
@ -216,18 +242,17 @@ public class FiniteClosure implements IFiniteClosure {
}
}
result2.addAll(result1);
result2.addAll(result1);
// Permute the params again.
// This corresponds again to Case 3 of the definition of the subtyping relation.
Set<UnifyType> result3 = new HashSet<>();
for(UnifyType t : result2) {
ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
for (UnifyType param : t.getTypeParams())
paramCandidates.add(grArg(param));
Set<TypeParams> permResult = new HashSet<>();
permuteParams(paramCandidates, 0, permResult, new UnifyType[paramCandidates.size()]);
for (TypeParams newParams : permResult) {
for (TypeParams newParams : permuteParams(paramCandidates)) {
UnifyType tPrime = t.setTypeParams(newParams);
if(tPrime.equals(t))
result3.add(t);
@ -240,22 +265,22 @@ public class FiniteClosure implements IFiniteClosure {
return result3;
}
/**
* Computes the greater function for FunN-Types
*/
protected Set<UnifyType> computeGreaterFunN(FunNType type) {
Set<UnifyType> result = new HashSet<>();
// if T = T' then T <=* T'
result.add(type);
// Because real function types are implicitly variant
// it is enough to permute the params with the values of greater / smaller.
ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
paramCandidates.add(greater(type.getTypeParams().get(0)));
for (int i = 1; i < type.getTypeParams().size(); i++)
paramCandidates.add(smaller(type.getTypeParams().get(i)));
Set<TypeParams> permResult = permuteParams(paramCandidates);
for (TypeParams newParams : permResult)
result.add(type.setTypeParams(newParams));
permuteParams(paramCandidates).forEach(x -> result.add(type.setTypeParams(x)));
return result;
}
@ -267,29 +292,29 @@ public class FiniteClosure implements IFiniteClosure {
@Override
public Set<UnifyType> grArg(ReferenceType type) {
Set<UnifyType> result = new HashSet<UnifyType>();
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
smaller(type).forEach(x -> result.add(new SuperType(x)));
greater(type).forEach(x -> result.add(new ExtendsType(x)));
return result;
}
@Override
public Set<UnifyType> grArg(FunNType type) {
throw new NotImplementedException();
// TODO ist das richtig?
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
smaller(type).forEach(x -> result.add(new SuperType(x)));
greater(type).forEach(x -> result.add(new ExtendsType(x)));
return result;
}
@Override
public Set<UnifyType> grArg(ExtendsType type) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
result.add(type);
UnifyType t = type.getExtendedType();
greater(t).forEach(x -> result.add(new ExtendsType(x)));
return result;
}
@ -297,20 +322,15 @@ public class FiniteClosure implements IFiniteClosure {
public Set<UnifyType> grArg(SuperType type) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getSuperedType();
UnifyType t = type.getSuperedType();
smaller(t).forEach(x -> result.add(new SuperType(x)));
return result;
}
@Override
public Set<UnifyType> grArg(PlaceholderType type) {
HashSet<UnifyType> result = new HashSet<>();
result.add(type);
//result.add(new SuperType(type));
//result.add(new ExtendsType(type));
result.add(type);
return result;
}
@ -321,30 +341,29 @@ public class FiniteClosure implements IFiniteClosure {
@Override
public Set<UnifyType> smArg(ReferenceType type) {
Set<UnifyType> result = new HashSet<UnifyType>();
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
return result;
}
@Override
public Set<UnifyType> smArg(FunNType type) {
throw new NotImplementedException();
// TODO ist das richtig?
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
return result;
}
@Override
public Set<UnifyType> smArg(ExtendsType type) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getExtendedType();
result.add(t);
smaller(t).forEach(x -> {
result.add(new ExtendsType(x));
result.add(x);
});
return result;
}
@ -352,16 +371,13 @@ public class FiniteClosure implements IFiniteClosure {
@Override
public Set<UnifyType> smArg(SuperType type) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
result.add(type);
UnifyType t = type.getSuperedType();
result.add(t);
greater(t).forEach(x -> {
result.add(new SuperType(x));
result.add(x);
});
return result;
}
@ -409,12 +425,25 @@ public class FiniteClosure implements IFiniteClosure {
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
*/
protected Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
Set<TypeParams> result = new HashSet<>();
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
* @param idx Idx for the current permutatiton.
* @param result Set of all permutations found so far
* @param current The permutation of type params that is currently explored
*/
protected void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
if(candidates.size() == idx) {
result.add(new TypeParams(Arrays.copyOf(current, current.length)));

View File

@ -8,7 +8,7 @@ import java.util.stream.Collectors;
import org.junit.Test;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
@ -17,7 +17,7 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import junit.framework.Assert;
public class UnifyTest extends Unify {
public class UnifyTest extends TypeUnify {
/**
* Testing the unification for cases with (n)one pair and without generics.