forked from JavaTX/JavaCompilerCore
step 5 and 6 implemented
This commit is contained in:
parent
8eecda2a8f
commit
039dd3b3f4
@ -27,5 +27,5 @@ public interface IRuleSet {
|
||||
public Optional<MPair> adaptExt(MPair pair);
|
||||
public Optional<MPair> adaptSup(MPair pair);
|
||||
|
||||
public Set<MPair> subst(Set<MPair> pair);
|
||||
public Optional<Set<MPair>> subst(Set<MPair> pair);
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
package de.dhbwstuttgart.typeinference.unifynew;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
@ -519,12 +520,24 @@ public class RuleSet implements IRuleSet{
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<MPair> subst(Set<MPair> pairs) {
|
||||
HashSet<Type> allTypes = new HashSet<>();
|
||||
pairs.forEach(x -> { allTypes.add(x.getLhsType()); allTypes.add(x.getRhsType()); });
|
||||
public Optional<Set<MPair>> subst(Set<MPair> pairs) {
|
||||
HashMap<Type, Integer> typeMap = new HashMap<>();
|
||||
|
||||
for(MPair pair : pairs) {
|
||||
Type t1 = pair.getLhsType();
|
||||
Type t2 = pair.getRhsType();
|
||||
if(!typeMap.containsKey(t1))
|
||||
typeMap.put(t1, 0);
|
||||
if(!typeMap.containsKey(t2))
|
||||
typeMap.put(t2, 0);
|
||||
typeMap.put(t1, typeMap.get(t1)+1);
|
||||
typeMap.put(t2, typeMap.get(t2)+1);
|
||||
}
|
||||
|
||||
Queue<MPair> result = new LinkedList<MPair>(pairs);
|
||||
|
||||
boolean applied = false;
|
||||
|
||||
for(int i = 0; i < result.size(); i++) {
|
||||
MPair pair = result.poll();
|
||||
Type lhsType;
|
||||
@ -532,21 +545,16 @@ public class RuleSet implements IRuleSet{
|
||||
if(pair.getPairOp() == PairOperator.EQUALSDOT
|
||||
&& ((lhsType = pair.getLhsType()) instanceof PlaceholderType)
|
||||
&& !((rhsType = pair.getRhsType()) instanceof PlaceholderType)
|
||||
&& occursInSet(lhsType, allTypes)
|
||||
&& typeMap.get(lhsType) > 1 // The type occurs in more pairs in the set than just the recent pair.
|
||||
&& !occurs(lhsType, rhsType)) {
|
||||
Unifier uni = new Unifier(lhsType, rhsType);
|
||||
result = result.stream().map(uni::apply).collect(Collectors.toCollection(LinkedList::new));
|
||||
applied = true;
|
||||
}
|
||||
result.add(pair);
|
||||
}
|
||||
|
||||
return new HashSet<>(result);
|
||||
}
|
||||
|
||||
private boolean occursInSet(Type t, Set<Type> types) {
|
||||
int origSize = types.size();
|
||||
types.add(t);
|
||||
return types.size() == origSize;
|
||||
return applied ? Optional.of(new HashSet<>(result)) : Optional.empty();
|
||||
}
|
||||
|
||||
private boolean occurs(Type t1, Type t2) {
|
||||
|
@ -30,7 +30,7 @@ import de.dhbwstuttgart.typinference.unify.model.Type;
|
||||
*/
|
||||
public class Unify {
|
||||
|
||||
public Menge<Menge<Pair>> unify(Set<MPair> eq, IFiniteClosure fc) {
|
||||
public Set<Set<MPair>> unify(Set<MPair> eq, IFiniteClosure fc) {
|
||||
/*
|
||||
* Step 1: Repeated application of reduce, adapt, erase, swap
|
||||
*/
|
||||
@ -77,37 +77,47 @@ public class Unify {
|
||||
ISetOperations setOps = new GuavaSetOperations();
|
||||
|
||||
// Calculate the cartesian products
|
||||
|
||||
|
||||
Set<Set<MPair>> result = setOps.cartesianProduct(sets).stream().map(x -> new HashSet<MPair>(x)).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
System.out.println(result);
|
||||
Set<Set<MPair>> result = setOps.cartesianProduct(sets).stream()
|
||||
.map(x -> new HashSet<MPair>(x)).collect(Collectors.toCollection(HashSet::new));
|
||||
//System.out.println(result);
|
||||
|
||||
/*
|
||||
* Step 5: Substitution
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
* TODO
|
||||
* Im Paper wird Eq'' genannt, es wird also von einer Menge in einer Menge in einer Menge ausgegangen.
|
||||
* Durch das flache Kartesische Produkt gibt es hier aber nur Mengen in Mengen.
|
||||
* Richtig so?
|
||||
*/
|
||||
|
||||
|
||||
IRuleSet rules = new RuleSet(fc);
|
||||
Set<Set<MPair>> changed = new HashSet<>();
|
||||
Set<Set<MPair>> unchanged = new HashSet<>();
|
||||
|
||||
for(Set<MPair> eqss : result) {
|
||||
Optional<Set<MPair>> newEqss = rules.subst(eqss);
|
||||
if(newEqss.isPresent())
|
||||
changed.add(newEqss.get());
|
||||
else
|
||||
unchanged.add(eqss);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Step 6: a) Restart for pairs where subst was applied
|
||||
* b) Union over everything
|
||||
* Step 6 a) Restart for pairs where subst was applied
|
||||
* b) Build the union over everything
|
||||
*/
|
||||
|
||||
for(Set<MPair> eqss : changed)
|
||||
unchanged.addAll(this.unify(eqss, fc));
|
||||
|
||||
/*
|
||||
* Step 7: Filter result for solved pairs
|
||||
*/
|
||||
|
||||
return null;
|
||||
return unchanged;
|
||||
|
||||
}
|
||||
|
||||
|
@ -36,7 +36,7 @@ public class UnifyTest extends Unify {
|
||||
eq.add(new MPair(tf.getSimpleType("Double"), tf.getPlaceholderType("B"), PairOperator.SMALLERDOT));
|
||||
//eq.add(new MPair(tf.getPlaceholderType("B"), tf.getSimpleType("Object"), PairOperator.EQUALSDOT));
|
||||
|
||||
this.unify(eq, fc);
|
||||
System.out.println(this.unify(eq, fc));
|
||||
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user