forked from JavaTX/JavaCompilerCore
application of the rules
This commit is contained in:
parent
a263ba5fd4
commit
84641d4abf
@ -1,14 +1,20 @@
|
||||
package de.dhbwstuttgart.typeinference.unifynew;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
import de.dhbwstuttgart.typeinference.Pair;
|
||||
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
||||
import de.dhbwstuttgart.typinference.unify.model.MPair;
|
||||
import de.dhbwstuttgart.typinference.unify.model.PlaceholderType;
|
||||
|
||||
/**
|
||||
* Implementierung des Unifikationsalgorithmus.
|
||||
@ -26,16 +32,15 @@ public class Unify {
|
||||
/*
|
||||
* Step 1: Repeated application of reduce, adapt, erase, swap
|
||||
*/
|
||||
Set<MPair> eq1 = applyTypeUnificationRules(eq0, fc);
|
||||
eq0 = applyTypeUnificationRules(eq0, fc);
|
||||
|
||||
|
||||
/*
|
||||
* Step 2: Create subset of pairs where both sides are TPH
|
||||
*/
|
||||
|
||||
/*
|
||||
* Step 3: Create subset of pairs that where not included in Step 2
|
||||
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
|
||||
*/
|
||||
Set<MPair> eq1s = new HashSet<>();
|
||||
Set<MPair> eq2s = new HashSet<>();
|
||||
splitEq(eq0, eq1s, eq2s);
|
||||
|
||||
/*
|
||||
* Step 4: Magic
|
||||
@ -57,60 +62,84 @@ public class Unify {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
private LinkedHashSet<MPair> applyTypeUnificationRules(Set<MPair> eq, IFiniteClosure fc) {
|
||||
private void splitEq(Set<MPair> eq, Set<MPair> eq1s, Set<MPair> eq2s) {
|
||||
for(MPair pair : eq)
|
||||
if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
|
||||
eq1s.add(pair);
|
||||
else
|
||||
eq2s.add(pair);
|
||||
}
|
||||
|
||||
private Set<MPair> applyTypeUnificationRules(Set<MPair> eq, IFiniteClosure fc) {
|
||||
|
||||
/*
|
||||
* Strategy for better performance
|
||||
* Rule Application Strategy:
|
||||
*
|
||||
* 1. Erase all erasable rules
|
||||
* 1. Swap all pairs and erase all erasable pairs
|
||||
* 2. Apply all possible rules to a single pair, then move it to the result set.
|
||||
* Iterating over pairs first, then iterating over rules prevents the algorithm
|
||||
* from trying to apply rules to a "finished" pair over and over.
|
||||
* Iterating over pairs first, then iterating over rules prevents the application
|
||||
* of rules to a "finished" pair over and over.
|
||||
* 2.1 Apply all rules repeatedly except for erase rules. If
|
||||
* the application of a rule creates new pairs, check immediately
|
||||
* against the erase rules.
|
||||
*
|
||||
* Regel funktioniert so nicht
|
||||
* 2.2 Always use the ordering (IComparable) of the mapped types as the permutation.
|
||||
* This is saving the time to generate and test permutations.
|
||||
*/
|
||||
|
||||
|
||||
LinkedHashSet<MPair> targetSet = new LinkedHashSet<MPair>();
|
||||
|
||||
ArrayList<MPair> eqQueue = new ArrayList<>();
|
||||
LinkedList<MPair> eqQueue = new LinkedList<>();
|
||||
IRuleSet rules = new RuleSet(fc);
|
||||
|
||||
/*
|
||||
* Erase all erasable pairs or add them to the queue for further processing
|
||||
* Swap all pairs and erase all erasable pairs
|
||||
*/
|
||||
for(MPair pair : eq)
|
||||
if(!(erase1(pair) || erase2(pair) || erase3(pair)))
|
||||
eqQueue.add(pair);
|
||||
eq.forEach(x -> swapAddOrErase(x, rules, eqQueue));
|
||||
|
||||
while(!eq.isEmpty()) {
|
||||
boolean ruleWasApplied = true;
|
||||
/*
|
||||
* Apply rules until the queue is empty
|
||||
*/
|
||||
while(!eqQueue.isEmpty()) {
|
||||
MPair pair = eqQueue.getFirst();
|
||||
|
||||
// ReduceUp, ReduceLow, ReduceUpLow
|
||||
Optional<MPair> opt = rules.reduceUp(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceLow(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceUpLow(pair);
|
||||
|
||||
MPair pair = eqQueue.get(0);
|
||||
|
||||
while(ruleWasApplied) {
|
||||
ruleWasApplied = false;
|
||||
|
||||
|
||||
// One of the rules has been applied
|
||||
if(opt.isPresent()) {
|
||||
swapAddOrErase(opt.get(), rules, eqQueue);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
|
||||
Optional<Set<MPair>> optSet = rules.reduce1(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduce2(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceEq(pair);
|
||||
|
||||
// One of the rules has been applied
|
||||
if(optSet.isPresent()) {
|
||||
optSet.get().forEach(x -> swapAddOrErase(x, rules, eqQueue));
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO adapt Rules
|
||||
|
||||
// None of the rules has been applied
|
||||
targetSet.add(pair);
|
||||
}
|
||||
|
||||
throw new NotImplementedException();
|
||||
return targetSet;
|
||||
}
|
||||
|
||||
private boolean erase1(MPair pair) {
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean erase2(MPair pair) {
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean erase3(MPair pair) {
|
||||
return true;
|
||||
private void swapAddOrErase(MPair pair, IRuleSet rules, Collection<MPair> collection) {
|
||||
Optional<MPair> opt = rules.swap(pair);
|
||||
MPair pair2 = opt.isPresent() ? opt.get() : pair;
|
||||
|
||||
if(rules.erase1(pair2) || rules.erase3(pair2) || rules.erase2(pair2))
|
||||
return;
|
||||
|
||||
collection.add(pair2);
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user