step 5 and 6 implemented

This commit is contained in:
Florian Steurer 2015-12-26 18:49:11 +01:00
parent 8eecda2a8f
commit 039dd3b3f4
4 changed files with 42 additions and 24 deletions

View File

@ -27,5 +27,5 @@ public interface IRuleSet {
public Optional<MPair> adaptExt(MPair pair); public Optional<MPair> adaptExt(MPair pair);
public Optional<MPair> adaptSup(MPair pair); public Optional<MPair> adaptSup(MPair pair);
public Set<MPair> subst(Set<MPair> pair); public Optional<Set<MPair>> subst(Set<MPair> pair);
} }

View File

@ -1,6 +1,7 @@
package de.dhbwstuttgart.typeinference.unifynew; package de.dhbwstuttgart.typeinference.unifynew;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
@ -519,12 +520,24 @@ public class RuleSet implements IRuleSet{
} }
@Override @Override
public Set<MPair> subst(Set<MPair> pairs) { public Optional<Set<MPair>> subst(Set<MPair> pairs) {
HashSet<Type> allTypes = new HashSet<>(); HashMap<Type, Integer> typeMap = new HashMap<>();
pairs.forEach(x -> { allTypes.add(x.getLhsType()); allTypes.add(x.getRhsType()); });
for(MPair pair : pairs) {
Type t1 = pair.getLhsType();
Type t2 = pair.getRhsType();
if(!typeMap.containsKey(t1))
typeMap.put(t1, 0);
if(!typeMap.containsKey(t2))
typeMap.put(t2, 0);
typeMap.put(t1, typeMap.get(t1)+1);
typeMap.put(t2, typeMap.get(t2)+1);
}
Queue<MPair> result = new LinkedList<MPair>(pairs); Queue<MPair> result = new LinkedList<MPair>(pairs);
boolean applied = false;
for(int i = 0; i < result.size(); i++) { for(int i = 0; i < result.size(); i++) {
MPair pair = result.poll(); MPair pair = result.poll();
Type lhsType; Type lhsType;
@ -532,21 +545,16 @@ public class RuleSet implements IRuleSet{
if(pair.getPairOp() == PairOperator.EQUALSDOT if(pair.getPairOp() == PairOperator.EQUALSDOT
&& ((lhsType = pair.getLhsType()) instanceof PlaceholderType) && ((lhsType = pair.getLhsType()) instanceof PlaceholderType)
&& !((rhsType = pair.getRhsType()) instanceof PlaceholderType) && !((rhsType = pair.getRhsType()) instanceof PlaceholderType)
&& occursInSet(lhsType, allTypes) && typeMap.get(lhsType) > 1 // The type occurs in more pairs in the set than just the recent pair.
&& !occurs(lhsType, rhsType)) { && !occurs(lhsType, rhsType)) {
Unifier uni = new Unifier(lhsType, rhsType); Unifier uni = new Unifier(lhsType, rhsType);
result = result.stream().map(uni::apply).collect(Collectors.toCollection(LinkedList::new)); result = result.stream().map(uni::apply).collect(Collectors.toCollection(LinkedList::new));
applied = true;
} }
result.add(pair); result.add(pair);
} }
return new HashSet<>(result); return applied ? Optional.of(new HashSet<>(result)) : Optional.empty();
}
private boolean occursInSet(Type t, Set<Type> types) {
int origSize = types.size();
types.add(t);
return types.size() == origSize;
} }
private boolean occurs(Type t1, Type t2) { private boolean occurs(Type t1, Type t2) {

View File

@ -30,7 +30,7 @@ import de.dhbwstuttgart.typinference.unify.model.Type;
*/ */
public class Unify { public class Unify {
public Menge<Menge<Pair>> unify(Set<MPair> eq, IFiniteClosure fc) { public Set<Set<MPair>> unify(Set<MPair> eq, IFiniteClosure fc) {
/* /*
* Step 1: Repeated application of reduce, adapt, erase, swap * Step 1: Repeated application of reduce, adapt, erase, swap
*/ */
@ -77,37 +77,47 @@ public class Unify {
ISetOperations setOps = new GuavaSetOperations(); ISetOperations setOps = new GuavaSetOperations();
// Calculate the cartesian products // Calculate the cartesian products
Set<Set<MPair>> result = setOps.cartesianProduct(sets).stream()
.map(x -> new HashSet<MPair>(x)).collect(Collectors.toCollection(HashSet::new));
Set<Set<MPair>> result = setOps.cartesianProduct(sets).stream().map(x -> new HashSet<MPair>(x)).collect(Collectors.toCollection(HashSet::new)); //System.out.println(result);
System.out.println(result);
/* /*
* Step 5: Substitution * Step 5: Substitution
*/ */
/* /*
* TODO * TODO
* Im Paper wird Eq'' genannt, es wird also von einer Menge in einer Menge in einer Menge ausgegangen. * Im Paper wird Eq'' genannt, es wird also von einer Menge in einer Menge in einer Menge ausgegangen.
* Durch das flache Kartesische Produkt gibt es hier aber nur Mengen in Mengen. * Durch das flache Kartesische Produkt gibt es hier aber nur Mengen in Mengen.
* Richtig so? * Richtig so?
*/ */
IRuleSet rules = new RuleSet(fc);
Set<Set<MPair>> changed = new HashSet<>();
Set<Set<MPair>> unchanged = new HashSet<>();
for(Set<MPair> eqss : result) {
Optional<Set<MPair>> newEqss = rules.subst(eqss);
if(newEqss.isPresent())
changed.add(newEqss.get());
else
unchanged.add(eqss);
}
/* /*
* Step 6: a) Restart for pairs where subst was applied * Step 6 a) Restart for pairs where subst was applied
* b) Union over everything * b) Build the union over everything
*/ */
for(Set<MPair> eqss : changed)
unchanged.addAll(this.unify(eqss, fc));
/* /*
* Step 7: Filter result for solved pairs * Step 7: Filter result for solved pairs
*/ */
return null; return unchanged;
} }

View File

@ -36,7 +36,7 @@ public class UnifyTest extends Unify {
eq.add(new MPair(tf.getSimpleType("Double"), tf.getPlaceholderType("B"), PairOperator.SMALLERDOT)); eq.add(new MPair(tf.getSimpleType("Double"), tf.getPlaceholderType("B"), PairOperator.SMALLERDOT));
//eq.add(new MPair(tf.getPlaceholderType("B"), tf.getSimpleType("Object"), PairOperator.EQUALSDOT)); //eq.add(new MPair(tf.getPlaceholderType("B"), tf.getSimpleType("Object"), PairOperator.EQUALSDOT));
this.unify(eq, fc); System.out.println(this.unify(eq, fc));
} }