step 4 grundgerüst

This commit is contained in:
Florian Steurer 2015-11-22 21:57:16 +01:00
parent 6fd382580a
commit e2ba4490b1
5 changed files with 69 additions and 2 deletions

View File

@ -27,5 +27,5 @@ public interface IRuleSet {
public Optional<MPair> adaptExt(MPair pair); public Optional<MPair> adaptExt(MPair pair);
public Optional<MPair> adaptSup(MPair pair); public Optional<MPair> adaptSup(MPair pair);
public Optional<Unifier> subst(MPair pair); public Set<MPair> subst(Set<MPair> pair);
} }

View File

@ -0,0 +1,8 @@
package de.dhbwstuttgart.typeinference.unify.interfaces;
import java.util.List;
import java.util.Set;
public interface ISetOperations {
<B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets);
}

View File

@ -0,0 +1,17 @@
package de.dhbwstuttgart.typeinference.unifynew;
import java.util.List;
import java.util.Set;
import com.google.common.collect.Sets;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
public class GuavaSetOperations implements ISetOperations {
@Override
public <B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets) {
return Sets.cartesianProduct(sets);
}
}

View File

@ -516,7 +516,7 @@ public class RuleSet implements IRuleSet{
} }
@Override @Override
public Optional<Unifier> subst(MPair pair) { public Set<MPair> subst(Set<MPair> pair) {
// TODO Auto-generated method stub // TODO Auto-generated method stub
return null; return null;
} }

View File

@ -1,20 +1,26 @@
package de.dhbwstuttgart.typeinference.unifynew; package de.dhbwstuttgart.typeinference.unifynew;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
import de.dhbwstuttgart.typeinference.Menge; import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.typeinference.Pair; import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException; import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet; import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
import de.dhbwstuttgart.typinference.unify.model.MPair; import de.dhbwstuttgart.typinference.unify.model.MPair;
import de.dhbwstuttgart.typinference.unify.model.MPair.PairOperator;
import de.dhbwstuttgart.typinference.unify.model.PlaceholderType; import de.dhbwstuttgart.typinference.unify.model.PlaceholderType;
/** /**
* Implementierung des Unifikationsalgorithmus. * Implementierung des Unifikationsalgorithmus.
* @author Florian Steurer * @author Florian Steurer
@ -25,18 +31,21 @@ public class Unify {
/* /*
* Preparations: Create Mapping * Preparations: Create Mapping
*/ */
Set<MPair> eq0 = null; Set<MPair> eq0 = null;
/* /*
* Step 1: Repeated application of reduce, adapt, erase, swap * Step 1: Repeated application of reduce, adapt, erase, swap
*/ */
eq0 = applyTypeUnificationRules(eq0, fc); eq0 = applyTypeUnificationRules(eq0, fc);
/* /*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs * Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
*/ */
Set<MPair> eq1s = new HashSet<>(); Set<MPair> eq1s = new HashSet<>();
Set<MPair> eq2s = new HashSet<>(); Set<MPair> eq2s = new HashSet<>();
splitEq(eq0, eq1s, eq2s); splitEq(eq0, eq1s, eq2s);
@ -45,10 +54,39 @@ public class Unify {
* Step 4: Magic * Step 4: Magic
*/ */
// Sets that originate from pair pattern matching
// Sets of the "second level"
Set<List<Set<MPair>>> pairSets = new HashSet<List<Set<MPair>>>();
for(MPair pair : eq2s)
pairSets.add(calculateSets(pair));
// The sets of the "first level"
Set<Set<MPair>> sets = new HashSet<Set<MPair>>();
// Add Eq1'
sets.add(eq1s);
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
sets.add(eq2s.stream()
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
.collect(Collectors.toSet()));
/*
* Around here, filters for pairs and sets can be applied
*/
ISetOperations setOps = new GuavaSetOperations();
for(List<Set<MPair>> pairSet : pairSets)
setOps.cartesianProduct(pairSet).forEach(x -> sets.add(new HashSet<MPair>(x)));
// Prüfen ob addAll stimmt oder ob hier eigentlich nur 1 set sein sollte
Set<List<MPair>> eqsSet = setOps.cartesianProduct(new ArrayList<>(sets));
/* /*
* Step 5: Repeated substitution * Step 5: Repeated substitution
*/ */
/* /*
* Step 6: a) Restart for pairs where subst was applied * Step 6: a) Restart for pairs where subst was applied
* b) Union over everything * b) Union over everything
@ -61,6 +99,10 @@ public class Unify {
throw new NotImplementedException(); throw new NotImplementedException();
} }
protected List<Set<MPair>> calculateSets(MPair pair) {
return null;
}
protected void splitEq(Set<MPair> eq, Set<MPair> eq1s, Set<MPair> eq2s) { protected void splitEq(Set<MPair> eq, Set<MPair> eq1s, Set<MPair> eq2s) {
for(MPair pair : eq) for(MPair pair : eq)
if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType) if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)