forked from JavaTX/JavaCompilerCore
Merge branch 'unify-test' of ssh://gohorb.ba-horb.de/bahome/projekt/git/JavaCompilerCore into unify-test
This commit is contained in:
commit
95043455fa
@ -103,7 +103,7 @@ public class JavaTXCompiler {
|
|||||||
return new ArrayList<>(allClasses);
|
return new ArrayList<>(allClasses);
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<ResultSet> typeInference() throws ClassNotFoundException {
|
public List<ResultSet> typeInferenceOld() throws ClassNotFoundException {
|
||||||
List<ClassOrInterface> allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
|
List<ClassOrInterface> allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
|
||||||
//Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
|
//Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
|
||||||
for(SourceFile sf : this.sourceFiles.values()) {
|
for(SourceFile sf : this.sourceFiles.values()) {
|
||||||
@ -176,10 +176,7 @@ public class JavaTXCompiler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
return x;//HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS ANDERE SEITE
|
return x;//HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS ANDERE SEITE
|
||||||
})
|
}).map( y -> {
|
||||||
/* PL 2018-11-07 wird in varianceInheritance erledigt
|
|
||||||
.map( y -> {
|
|
||||||
|
|
||||||
if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) {
|
if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) {
|
||||||
if (((PlaceholderType)y.getLhsType()).getVariance() != 0 && ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
if (((PlaceholderType)y.getLhsType()).getVariance() != 0 && ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
||||||
((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType()).getVariance());
|
((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType()).getVariance());
|
||||||
@ -189,20 +186,16 @@ public class JavaTXCompiler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
return y; } )
|
return y; } )
|
||||||
*/
|
|
||||||
.collect(Collectors.toCollection(HashSet::new));
|
.collect(Collectors.toCollection(HashSet::new));
|
||||||
varianceInheritance(xConsSet);
|
varianceInheritance(xConsSet);
|
||||||
|
Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
|
||||||
|
//Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||||
|
System.out.println("RESULT: " + result);
|
||||||
Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
|
logFile.write("RES: " + result.toString()+"\n");
|
||||||
//Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
logFile.flush();
|
||||||
System.out.println("RESULT: " + result);
|
results.addAll(result);
|
||||||
logFile.write("RES: " + result.toString()+"\n");
|
|
||||||
logFile.flush();
|
|
||||||
results.addAll(result);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
results = results.stream().map(x -> {
|
results = results.stream().map(x -> {
|
||||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
||||||
@ -223,10 +216,11 @@ public class JavaTXCompiler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Vererbt alle Variancen
|
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a)
|
||||||
|
* wenn a eine Variance !=0 hat auf alle Typvariablen in Theta.
|
||||||
* @param eq The set of constraints
|
* @param eq The set of constraints
|
||||||
*/
|
*/
|
||||||
private void varianceInheritance(Set<UnifyPair> eq) {
|
private void varianceInheritance(Set<UnifyPair> eq) {
|
||||||
Set<PlaceholderType> usedTPH = new HashSet<>();
|
Set<PlaceholderType> usedTPH = new HashSet<>();
|
||||||
Set<PlaceholderType> phSet = eq.stream().map(x -> {
|
Set<PlaceholderType> phSet = eq.stream().map(x -> {
|
||||||
Set<PlaceholderType> pair = new HashSet<>();
|
Set<PlaceholderType> pair = new HashSet<>();
|
||||||
@ -252,6 +246,171 @@ public class JavaTXCompiler {
|
|||||||
phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x)));
|
phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public List<ResultSet> typeInference() throws ClassNotFoundException {
|
||||||
|
List<ClassOrInterface> allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
|
||||||
|
//Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
|
||||||
|
for(SourceFile sf : this.sourceFiles.values()) {
|
||||||
|
allClasses.addAll(getAvailableClasses(sf));
|
||||||
|
allClasses.addAll(sf.getClasses());
|
||||||
|
}
|
||||||
|
|
||||||
|
final ConstraintSet<Pair> cons = getConstraints();
|
||||||
|
|
||||||
|
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses);
|
||||||
|
System.out.println(finiteClosure);
|
||||||
|
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons);
|
||||||
|
|
||||||
|
TypeUnify unify = new TypeUnify();
|
||||||
|
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||||
|
try {
|
||||||
|
FileWriter logFile = new FileWriter(new File(System.getProperty("user.dir")+"/test/logFiles/"+"log"));
|
||||||
|
logFile.write("FC:\\" + finiteClosure.toString()+"\n");
|
||||||
|
for(SourceFile sf : this.sourceFiles.values()) {
|
||||||
|
logFile.write(ASTTypePrinter.print(sf));
|
||||||
|
}
|
||||||
|
logFile.flush();
|
||||||
|
|
||||||
|
Set<String> paraTypeVarNames = allClasses.stream().map(x -> x.getMethods().stream().map(y -> y.getParameterList().getFormalparalist()
|
||||||
|
.stream().filter(z -> z.getType() instanceof TypePlaceholder)
|
||||||
|
.map(z -> ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(HashSet::new)))
|
||||||
|
.reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return a;}, (a,b) -> { a.addAll(b); return a;} ) )
|
||||||
|
.reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return a;} );
|
||||||
|
|
||||||
|
Set<String> returnTypeVarNames = allClasses.stream().map(x -> x.getMethods().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
|
||||||
|
.map(z -> ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;} ).get();
|
||||||
|
|
||||||
|
Set<String> fieldTypeVarNames = allClasses.stream().map(x -> x.getFieldDecl().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
|
||||||
|
.map(z -> ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;} ).get();
|
||||||
|
|
||||||
|
returnTypeVarNames.addAll(fieldTypeVarNames);
|
||||||
|
|
||||||
|
unifyCons = unifyCons.map(x -> {
|
||||||
|
//Hier muss ueberlegt werden, ob
|
||||||
|
//1. alle Argument- und Retuntyp-Variablen in allen UnifyPairs
|
||||||
|
// mit disableWildcardtable() werden.
|
||||||
|
//2. alle Typvariablen mit Argument- oder Retuntyp-Variablen
|
||||||
|
//in Beziehung auch auf disableWildcardtable() gesetzt werden muessen
|
||||||
|
//PL 2018-04-23
|
||||||
|
if ((x.getLhsType() instanceof PlaceholderType)) {
|
||||||
|
if (paraTypeVarNames.contains(x.getLhsType().getName())) {
|
||||||
|
((PlaceholderType)x.getLhsType()).setVariance((byte)1);
|
||||||
|
((PlaceholderType)x.getLhsType()).disableWildcardtable();
|
||||||
|
}
|
||||||
|
if (returnTypeVarNames.contains(x.getLhsType().getName())) {
|
||||||
|
((PlaceholderType)x.getLhsType()).setVariance((byte)-1);
|
||||||
|
((PlaceholderType)x.getLhsType()).disableWildcardtable();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ((x.getRhsType() instanceof PlaceholderType)) {
|
||||||
|
if (paraTypeVarNames.contains(x.getRhsType().getName())) {
|
||||||
|
((PlaceholderType)x.getRhsType()).setVariance((byte)1);
|
||||||
|
((PlaceholderType)x.getRhsType()).disableWildcardtable();
|
||||||
|
}
|
||||||
|
if (returnTypeVarNames.contains(x.getRhsType().getName())) {
|
||||||
|
((PlaceholderType)x.getRhsType()).setVariance((byte)-1);
|
||||||
|
((PlaceholderType)x.getRhsType()).disableWildcardtable();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return x;//HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS ANDERE SEITE
|
||||||
|
});
|
||||||
|
Set<PlaceholderType> varianceTPHold;
|
||||||
|
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||||
|
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||||
|
|
||||||
|
/* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt
|
||||||
|
do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen
|
||||||
|
//anderen Seite übertragen
|
||||||
|
varianceTPHold = new HashSet<>(varianceTPH);
|
||||||
|
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||||
|
unifyCons.map( y -> {
|
||||||
|
if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) {
|
||||||
|
if (((PlaceholderType)y.getLhsType()).getVariance() != 0 && ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
||||||
|
((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType()).getVariance());
|
||||||
|
}
|
||||||
|
if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
|
||||||
|
((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType()).getVariance());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return y; } ); }
|
||||||
|
while (!varianceTPHold.equals(varianceTPH));
|
||||||
|
*/
|
||||||
|
|
||||||
|
//Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
|
||||||
|
//Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||||
|
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, logFile, log);
|
||||||
|
System.out.println("RESULT: " + result);
|
||||||
|
logFile.write("RES: " + result.toString()+"\n");
|
||||||
|
logFile.flush();
|
||||||
|
results.addAll(result);
|
||||||
|
|
||||||
|
|
||||||
|
results = results.stream().map(x -> {
|
||||||
|
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||||
|
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
||||||
|
return y; //alle Paare a <.? b erden durch a =. b ersetzt
|
||||||
|
}).collect(Collectors.toCollection(HashSet::new)));
|
||||||
|
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
|
||||||
|
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
|
||||||
|
}
|
||||||
|
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||||
|
}).collect(Collectors.toCollection(HashSet::new));
|
||||||
|
System.out.println("RESULT Final: " + results);
|
||||||
|
logFile.write("RES_FINAL: " + results.toString()+"\n");
|
||||||
|
logFile.flush();
|
||||||
|
}
|
||||||
|
catch (IOException e) { }
|
||||||
|
return results.stream().map((unifyPairs ->
|
||||||
|
new ResultSet(UnifyTypeFactory.convert(unifyPairs, generateTPHMap(cons))))).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a)
|
||||||
|
* wenn a eine Variance !=0 hat auf alle Typvariablen in Theta.
|
||||||
|
* @param eq The set of constraints
|
||||||
|
*/
|
||||||
|
private Set<PlaceholderType> varianceInheritanceConstraintSet(ConstraintSet<UnifyPair> cons) {
|
||||||
|
Set<UnifyPair> eq = cons.getAll();
|
||||||
|
Set<PlaceholderType> usedTPH = new HashSet<>();
|
||||||
|
Set<PlaceholderType> phSet = eq.stream().map(x -> {
|
||||||
|
Set<PlaceholderType> pair = new HashSet<>();
|
||||||
|
if (x.getLhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getLhsType());
|
||||||
|
if (x.getRhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getRhsType());
|
||||||
|
return pair;
|
||||||
|
}).reduce(new HashSet<>(), (a,b) -> { a.addAll(b); return a;} , (c,d) -> { c.addAll(d); return c;});
|
||||||
|
|
||||||
|
ArrayList<PlaceholderType> phSetVariance = new ArrayList<>(phSet);
|
||||||
|
phSetVariance.removeIf(x -> (x.getVariance() == 0));
|
||||||
|
while(!phSetVariance.isEmpty()) {
|
||||||
|
PlaceholderType a = phSetVariance.remove(0);
|
||||||
|
usedTPH.add(a);
|
||||||
|
//HashMap<PlaceholderType,Integer> ht = new HashMap<>();
|
||||||
|
//ht.put(a, a.getVariance());
|
||||||
|
//ConstraintSet<UnifyPair> eq1 = cons;
|
||||||
|
//eq1.removeIf(x -> !(x.getLhsType() instanceof PlaceholderType && ((PlaceholderType)x.getLhsType()).equals(a)));
|
||||||
|
//durch if-Abfrage im foreach geloest
|
||||||
|
cons.forEach(x -> {
|
||||||
|
if (x.getLhsType() instanceof PlaceholderType && ((PlaceholderType)x.getLhsType()).equals(a)) {
|
||||||
|
x.getRhsType().accept(new distributeVariance(), a.getVariance());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
//` eq1 = new HashSet<>(eq);
|
||||||
|
//eq1.removeIf(x -> !(x.getRhsType() instanceof PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)));
|
||||||
|
//durch if-Abfrage im foreach geloest
|
||||||
|
cons.forEach(x -> {
|
||||||
|
if (x.getRhsType() instanceof PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)) {
|
||||||
|
x.getLhsType().accept(new distributeVariance(), a.getVariance());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
phSetVariance = new ArrayList<>(phSet); //macht vermutlich keinen Sinn PL 2018-10-18, doch, es koennen neue TPHs mit Variancen dazugekommen sein PL 2018-11-07
|
||||||
|
phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x)));
|
||||||
|
}
|
||||||
|
return usedTPH;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
|
private Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
|
||||||
HashMap<String, TypePlaceholder> ret = new HashMap<>();
|
HashMap<String, TypePlaceholder> ret = new HashMap<>();
|
||||||
|
@ -5,6 +5,7 @@ import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
|
|||||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
import java.util.function.Consumer;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
@ -52,4 +53,29 @@ public class ConstraintSet<A> {
|
|||||||
ret.oderConstraints = newOder;
|
ret.oderConstraints = newOder;
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void forEach (Consumer<? super A> c) {
|
||||||
|
undConstraints.stream().forEach(c);
|
||||||
|
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||||
|
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
|
||||||
|
as.stream().forEach(c));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Set<A> getAll () {
|
||||||
|
Set<A> ret = new HashSet<>();
|
||||||
|
ret.addAll(undConstraints);
|
||||||
|
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||||
|
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as));
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Set<Constraint<A>>> getOderConstraints() {
|
||||||
|
return oderConstraints;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Set<A> getUndConstraints() {
|
||||||
|
return undConstraints;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -228,6 +228,13 @@ public class TYPEStmt implements StatementVisitor{
|
|||||||
binary.operation.equals(BinaryExpr.Operator.ADD)||
|
binary.operation.equals(BinaryExpr.Operator.ADD)||
|
||||||
binary.operation.equals(BinaryExpr.Operator.SUB)){
|
binary.operation.equals(BinaryExpr.Operator.SUB)){
|
||||||
Set<Constraint<Pair>> numericAdditionOrStringConcatenation = new HashSet<>();
|
Set<Constraint<Pair>> numericAdditionOrStringConcatenation = new HashSet<>();
|
||||||
|
|
||||||
|
// TODO PL 2018-11-06
|
||||||
|
|
||||||
|
// Auf importierte Typen einschraenken
|
||||||
|
// pruefen, ob Typen richtig bestimmt werden.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
//Zuerst der Fall für Numerische AusdrücPairOpnumericeratorke, das sind Mul, Mod und Div immer:
|
//Zuerst der Fall für Numerische AusdrücPairOpnumericeratorke, das sind Mul, Mod und Div immer:
|
||||||
//see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17
|
//see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify;
|
package de.dhbwstuttgart.typeinference.unify;
|
||||||
|
|
||||||
import java.io.FileWriter;
|
import java.io.FileWriter;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ForkJoinPool;
|
import java.util.concurrent.ForkJoinPool;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
|
||||||
@ -21,5 +23,11 @@ public class TypeUnify {
|
|||||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) {
|
||||||
|
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log);
|
||||||
|
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,7 @@ import java.util.function.BinaryOperator;
|
|||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
|
||||||
@ -71,7 +72,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
*/
|
*/
|
||||||
protected IRuleSet rules;
|
protected IRuleSet rules;
|
||||||
|
|
||||||
protected Set<UnifyPair> eq;
|
protected Set<UnifyPair> eq; //und-constraints
|
||||||
|
|
||||||
|
protected List<Set<Set<UnifyPair>>> oderConstraintsField;
|
||||||
|
|
||||||
protected IFiniteClosure fc;
|
protected IFiniteClosure fc;
|
||||||
|
|
||||||
@ -85,6 +88,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
|
|
||||||
Integer noAllErasedElements = 0;
|
Integer noAllErasedElements = 0;
|
||||||
|
|
||||||
|
static Integer noou = 0;
|
||||||
|
|
||||||
static int noBacktracking;
|
static int noBacktracking;
|
||||||
|
|
||||||
public TypeUnifyTask() {
|
public TypeUnifyTask() {
|
||||||
@ -101,6 +106,25 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
rules = new RuleSet(logFile);
|
rules = new RuleSet(logFile);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) {
|
||||||
|
this.eq = eq;
|
||||||
|
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
||||||
|
this.oderConstraintsField = oderConstraints.stream().map(x -> {
|
||||||
|
Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||||
|
for (Constraint<UnifyPair> y : x) {
|
||||||
|
ret.add(new HashSet<>(y));
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}).collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
|
||||||
|
//x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
||||||
|
this.fc = fc;
|
||||||
|
this.oup = new OrderingUnifyPair(fc);
|
||||||
|
this.parallel = parallel;
|
||||||
|
this.logFile = logFile;
|
||||||
|
this.log = log;
|
||||||
|
rules = new RuleSet(logFile);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Vererbt alle Variancen
|
* Vererbt alle Variancen
|
||||||
@ -134,260 +158,33 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
@Override
|
|
||||||
protected Set<Set<UnifyPair>> compute() {
|
protected Set<Set<UnifyPair>> compute() {
|
||||||
Set<Set<UnifyPair>> res = unify(eq, fc, parallel);
|
Set<UnifyPair> neweq = new HashSet<>(eq);
|
||||||
|
/* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */
|
||||||
|
oderConstraintsField.stream()
|
||||||
|
.filter(x -> x.size()==1)
|
||||||
|
.map(y -> y.stream().findFirst().get()).forEach(x -> neweq.addAll(x));
|
||||||
|
ArrayList<Set<Set<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
|
||||||
|
.filter(x -> x.size()>1)
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, 0);
|
||||||
if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
|
if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
|
||||||
else return res;
|
else return res;
|
||||||
}
|
}
|
||||||
|
/*
|
||||||
/**
|
@Override
|
||||||
* Computes all principal type unifiers for a set of constraints.
|
protected Set<Set<UnifyPair>> compute() {
|
||||||
* @param eq The set of constraints
|
Set<Set<UnifyPair>> fstElems = new HashSet<>();
|
||||||
* @param fc The finite closure
|
fstElems.add(eq);
|
||||||
* @return The set of all principal type unifiers
|
Set<Set<UnifyPair>> res = computeCartesianRecursiveOderConstraints(fstElems, oderConstraints, fc, parallel);
|
||||||
*/
|
if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
|
||||||
protected Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
|
else return res;
|
||||||
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
|
|
||||||
// ).collect(Collectors.toCollection(HashSet::new));
|
|
||||||
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
|
|
||||||
//if (aas.isEmpty()) {
|
|
||||||
// System.out.println("");
|
|
||||||
//}
|
|
||||||
/*
|
|
||||||
* Step 1: Repeated application of reduce, adapt, erase, swap
|
|
||||||
*/
|
|
||||||
nOfUnify++;
|
|
||||||
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
|
|
||||||
//eq = eq.stream().map(x -> {x.setVariance((byte)-1); return x;}).collect(Collectors.toCollection(HashSet::new));
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Variancen auf alle Gleichungen vererben
|
|
||||||
*/
|
|
||||||
//PL 2018-05-17 nach JavaTXCompiler verschoben
|
|
||||||
//varianceInheritance(eq);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* ? extends ? extends Theta rausfiltern
|
|
||||||
*/
|
|
||||||
Set<UnifyPair> doubleExt = eq.stream().filter(x -> (x.wrongWildcard())).map(x -> { x.setUndefinedPair(); return x;})
|
|
||||||
.collect(Collectors.toCollection(HashSet::new));
|
|
||||||
if (doubleExt.size() > 0) {
|
|
||||||
Set<Set<UnifyPair>> ret = new HashSet<>();
|
|
||||||
ret.add(doubleExt);
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Set<UnifyPair> eq0 = applyTypeUnificationRules(eq, fc);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
|
|
||||||
*/
|
|
||||||
Set<UnifyPair> eq1s = new HashSet<>();
|
|
||||||
Set<UnifyPair> eq2s = new HashSet<>();
|
|
||||||
splitEq(eq0, eq1s, eq2s);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Step 4: Create possible typings
|
|
||||||
*
|
|
||||||
* "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
|
|
||||||
* mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
|
|
||||||
*/
|
|
||||||
|
|
||||||
// There are up to 10 toplevel set. 8 of 10 are the result of the
|
|
||||||
// cartesian product of the sets created by pattern matching.
|
|
||||||
List<Set<Set<UnifyPair>>> topLevelSets = new ArrayList<>();
|
|
||||||
|
|
||||||
//System.out.println(eq2s);
|
|
||||||
|
|
||||||
if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
|
|
||||||
Set<Set<UnifyPair>> wrap = new HashSet<>();
|
|
||||||
wrap.add(eq1s);
|
|
||||||
topLevelSets.add(wrap); // Add Eq1'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
|
|
||||||
Set<UnifyPair> bufferSet = eq2s.stream()
|
|
||||||
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
|
|
||||||
.collect(Collectors.toSet());
|
|
||||||
|
|
||||||
if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
|
|
||||||
Set<Set<UnifyPair>> wrap = new HashSet<>();
|
|
||||||
wrap.add(bufferSet);
|
|
||||||
topLevelSets.add(wrap);
|
|
||||||
eq2s.removeAll(bufferSet);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sets that originate from pair pattern matching
|
|
||||||
// Sets of the "second level"
|
|
||||||
Set<UnifyPair> undefinedPairs = new HashSet<>();
|
|
||||||
if (printtag) System.out.println("eq2s " + eq2s);
|
|
||||||
//writeLog("BufferSet: " + bufferSet.toString()+"\n");
|
|
||||||
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs);
|
|
||||||
//PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
|
|
||||||
//nicht ausgewertet Faculty Beispiel im 1. Schritt
|
|
||||||
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
|
|
||||||
//Typen getestet werden.
|
|
||||||
if (printtag) System.out.println("secondLevelSets:" +secondLevelSets);
|
|
||||||
// If pairs occured that did not match one of the cartesian product cases,
|
|
||||||
// those pairs are contradictory and the unification is impossible.
|
|
||||||
if(!undefinedPairs.isEmpty()) {
|
|
||||||
noUndefPair++;
|
|
||||||
for (UnifyPair up : undefinedPairs) {
|
|
||||||
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up);
|
|
||||||
writeLog("BasePair; " + up.getBasePair());
|
|
||||||
}
|
|
||||||
Set<Set<UnifyPair>> error = new HashSet<>();
|
|
||||||
undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new));
|
|
||||||
error.add(undefinedPairs);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Up to here, no cartesian products are calculated.
|
|
||||||
* filters for pairs and sets can be applied here */
|
|
||||||
|
|
||||||
// Alternative: Sub cartesian products of the second level (pattern matched) sets
|
|
||||||
// "the big (x)"
|
|
||||||
/* for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
|
|
||||||
//System.out.println("secondLevelSet "+secondLevelSet.size());
|
|
||||||
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
|
|
||||||
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
|
|
||||||
//System.out.println("CardResult: "+cartResult.size());
|
|
||||||
// Flatten and add to top level sets
|
|
||||||
Set<Set<UnifyPair>> flat = new HashSet<>();
|
|
||||||
int j = 0;
|
|
||||||
for(List<Set<UnifyPair>> s : cartResult) {
|
|
||||||
j++;
|
|
||||||
//System.out.println("s from CardResult: "+cartResult.size() + " " + j);
|
|
||||||
Set<UnifyPair> flat1 = new HashSet<>();
|
|
||||||
for(Set<UnifyPair> s1 : s)
|
|
||||||
flat1.addAll(s1);
|
|
||||||
flat.add(flat1);
|
|
||||||
}
|
|
||||||
//topLevelSets.add(flat);
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
//Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden
|
|
||||||
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
|
|
||||||
for (Set<Set<UnifyPair>> secondlevelelem : secondLevelSet) {
|
|
||||||
topLevelSets.add(secondlevelelem);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//System.out.println(topLevelSets);
|
|
||||||
//System.out.println();
|
|
||||||
|
|
||||||
|
|
||||||
//Aufruf von computeCartesianRecursive ANFANG
|
|
||||||
return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, fc, parallel);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
|
|
||||||
//Aufruf von computeCartesianRecursive ENDE
|
|
||||||
|
|
||||||
//keine Ahnung woher das kommt
|
|
||||||
//Set<Set<UnifyPair>> setToFlatten = topLevelSets.stream().map(x -> x.iterator().next()).collect(Collectors.toCollection(HashSet::new));
|
|
||||||
|
|
||||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
|
|
||||||
// Cartesian product over all (up to 10) top level sets
|
|
||||||
//Set<Set<Set<UnifyPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
|
|
||||||
// .stream().map(x -> new HashSet<>(x))
|
|
||||||
// .collect(Collectors.toCollection(HashSet::new));
|
|
||||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
|
||||||
|
|
||||||
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
|
|
||||||
|
|
||||||
Set<TypeUnifyTask> forks = new HashSet<>();
|
|
||||||
|
|
||||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
|
|
||||||
//for(Set<Set<UnifyPair>> setToFlatten : eqPrimeSet) {
|
|
||||||
// Flatten the cartesian product
|
|
||||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
|
||||||
Set<UnifyPair> eqPrime = new HashSet<>();
|
|
||||||
setToFlatten.stream().forEach(x -> eqPrime.addAll(x));
|
|
||||||
|
|
||||||
/*
|
public Set<Set<UnifyPair>> computeCartesianRecursiveOderConstraints(Set<Set<UnifyPair>> fstElems, List<Set<Set<UnifyPair>>> topLevelSets, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||||
* Step 5: Substitution
|
|
||||||
*/
|
|
||||||
//System.out.println("vor Subst: " + eqPrime);
|
|
||||||
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Step 6 a) Restart (fork) for pairs where subst was applied
|
|
||||||
*/
|
|
||||||
if(parallel) {
|
|
||||||
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
|
||||||
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
|
||||||
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
|
|
||||||
//eqPrimePrime Veraenderungen in subst repraesentieren.
|
|
||||||
eqPrimePrimeSet.add(eqPrime);
|
|
||||||
else if(eqPrimePrime.isPresent()) {
|
|
||||||
//System.out.println("nextStep: " + eqPrimePrime.get());
|
|
||||||
TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc, true, logFile, log);
|
|
||||||
forks.add(fork);
|
|
||||||
fork.fork();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
//System.out.println("nextStep: " + eqPrime);
|
|
||||||
TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc, true, logFile, log);
|
|
||||||
forks.add(fork);
|
|
||||||
fork.fork();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else { // sequentiell (Step 6b is included)
|
|
||||||
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
|
|
||||||
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
|
||||||
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
|
||||||
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
|
|
||||||
//eqPrimePrime Veraenderungen in subst repraesentieren.
|
|
||||||
try {
|
|
||||||
if (isSolvedForm(eqPrime)) {
|
|
||||||
logFile.write(eqPrime.toString()+"\n");
|
|
||||||
logFile.flush();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (IOException e) { }
|
|
||||||
eqPrimePrimeSet.add(eqPrime);
|
|
||||||
}
|
|
||||||
else if(eqPrimePrime.isPresent()) {
|
|
||||||
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), fc, false);
|
|
||||||
|
|
||||||
eqPrimePrimeSet.addAll(unifyres);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Set<Set<UnifyPair>> unifyres = unify(eqPrime, fc, false);
|
|
||||||
|
|
||||||
|
|
||||||
eqPrimePrimeSet.addAll(unifyres);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
|
|
||||||
//}
|
|
||||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Step 6 b) Build the union over everything.
|
|
||||||
*/
|
|
||||||
|
|
||||||
if(parallel)
|
|
||||||
for(TypeUnifyTask fork : forks)
|
|
||||||
eqPrimePrimeSet.addAll(fork.join());
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Step 7: Filter empty sets;
|
|
||||||
*/
|
|
||||||
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
|
|
||||||
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet))
|
|
||||||
writeLog("Result1 " + eqPrimePrimeSet.toString());
|
|
||||||
return eqPrimePrimeSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
|
|
||||||
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
|
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
|
||||||
fstElems.addAll(topLevelSets.stream()
|
fstElems.addAll(topLevelSets.stream()
|
||||||
.filter(x -> x.size()==1)
|
.filter(x -> x.size()==1)
|
||||||
@ -397,7 +194,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
.filter(x -> x.size()>1)
|
.filter(x -> x.size()>1)
|
||||||
.collect(Collectors.toCollection(ArrayList::new));
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
|
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
|
||||||
Set<Set<UnifyPair>> result = unify2(fstElems, eq, fc, parallel);
|
Set<UnifyPair> eq = new HashSet<>();
|
||||||
|
fstElems.stream().forEach(x -> eq.addAll(x));
|
||||||
|
Set<Set<UnifyPair>> result = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0);
|
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0);
|
||||||
@ -492,9 +291,507 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
i++;
|
i++;
|
||||||
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
|
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
|
||||||
elems.add(a);
|
elems.add(a);
|
||||||
|
Set<Set<UnifyPair>> res = new HashSet<>();
|
||||||
|
if (remainingSets.isEmpty()) {
|
||||||
|
noou++;
|
||||||
|
writeLog("Vor unify Aufruf: " + eq.toString());
|
||||||
|
writeLog("No of Unify " + noou);
|
||||||
|
System.out.println(noou);
|
||||||
|
Set<UnifyPair> eq = new HashSet<>();
|
||||||
|
elems.stream().forEach(x -> eq.addAll(x));
|
||||||
|
res = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe);
|
||||||
|
}
|
||||||
|
else {//duerfte gar nicht mehr vorkommen PL 2018-04-03
|
||||||
|
res = computeCartesianRecursiveOderConstraints(elems, remainingSets, fc, parallel, rekTiefe);
|
||||||
|
|
||||||
|
}
|
||||||
|
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
|
||||||
|
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||||
|
result = res;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result))
|
||||||
|
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|
||||||
|
|| result.isEmpty()) {
|
||||||
|
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||||
|
result.addAll(res);
|
||||||
|
}
|
||||||
|
//else {
|
||||||
|
//wenn Korrekte Ergebnisse da und Feherfälle dazukommen Fehlerfälle ignorieren
|
||||||
|
// if (isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) {
|
||||||
|
// result = result;
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
|
||||||
|
if (!result.isEmpty() && !isUndefinedPairSetSet(res)) {
|
||||||
|
if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1)
|
||||||
|
System.out.print("");
|
||||||
|
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetasList).iterator();
|
||||||
|
if (variance == 1) {
|
||||||
|
System.out.println("");
|
||||||
|
while (nextSetasListIt.hasNext()) {
|
||||||
|
Set<UnifyPair> a_next = nextSetasListIt.next();
|
||||||
|
if (a.equals(a_next) ||
|
||||||
|
(oup.compare(a, a_next) == 1)) {
|
||||||
|
nextSetasList.remove(a_next);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
System.out.println("");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else { if (variance == -1) {
|
||||||
|
System.out.println("");
|
||||||
|
while (nextSetasListIt.hasNext()) {
|
||||||
|
Set<UnifyPair> a_next = nextSetasListIt.next();
|
||||||
|
if (a.equals(a_next) ||
|
||||||
|
(oup.compare(a, a_next) == -1)) {
|
||||||
|
nextSetasList.remove(0);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
System.out.println("");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (variance == 0) {
|
||||||
|
//break;
|
||||||
|
}}
|
||||||
|
}
|
||||||
|
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
|
||||||
|
|
||||||
|
/* PL 2018-11-05 wird falsch weil es auf der obersten Ebene ist.
|
||||||
|
if (isUndefinedPairSetSet(res)) {
|
||||||
|
int nofstred= 0;
|
||||||
|
Set<UnifyPair> abhSubst = res.stream()
|
||||||
|
.map(b ->
|
||||||
|
b.stream()
|
||||||
|
.map(x -> x.getAllSubstitutions())
|
||||||
|
.reduce((y,z) -> { y.addAll(z); return y;}).get())
|
||||||
|
.reduce((y,z) -> { y.addAll(z); return y;}).get();
|
||||||
|
Set<UnifyPair> b = a;//effective final a
|
||||||
|
Set<UnifyPair> durchschnitt = abhSubst.stream()
|
||||||
|
.filter(x -> b.contains(x))
|
||||||
|
//.filter(y -> abhSubst.contains(y))
|
||||||
|
.collect(Collectors.toCollection(HashSet::new));
|
||||||
|
//Set<PlaceholderType> vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
|
||||||
|
int len = nextSetasList.size();
|
||||||
|
Set<UnifyPair> undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results
|
||||||
|
Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream()
|
||||||
|
.map(x -> { Set<UnifyPair> su = x.getAllSubstitutions(); //alle benutzten Substitutionen
|
||||||
|
su.add(x.getGroundBasePair()); // urspruengliches Paar
|
||||||
|
su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen
|
||||||
|
return new Pair<>(su, x.getGroundBasePair());})
|
||||||
|
.collect(Collectors.toCollection(HashSet::new));
|
||||||
|
if (res.size() > 1) {
|
||||||
|
System.out.println();
|
||||||
|
}
|
||||||
|
nextSetasList = nextSetasList.stream().filter(x -> {
|
||||||
|
//Boolean ret = false;
|
||||||
|
//for (PlaceholderType var : vars) {
|
||||||
|
// ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get();
|
||||||
|
//}
|
||||||
|
return (!x.containsAll(durchschnitt));//Was passiert wenn durchschnitt leer ist??
|
||||||
|
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
nofstred = nextSetasList.size();
|
||||||
|
//NOCH NICHT korrekt PL 2018-10-12
|
||||||
|
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
|
||||||
|
// .collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
writeLog("res (undef): " + res.toString());
|
||||||
|
writeLog("abhSubst: " + abhSubst.toString());
|
||||||
|
writeLog("a: " + a.toString());
|
||||||
|
writeLog("Durchschnitt: " + durchschnitt.toString());
|
||||||
|
writeLog("nextSet: " + nextSet.toString());
|
||||||
|
writeLog("nextSetasList: " + nextSetasList.toString());
|
||||||
|
writeLog("Number first erased Elements (undef): " + (len - nofstred));
|
||||||
|
writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size()));
|
||||||
|
writeLog("Number erased Elements (undef): " + (len - nextSetasList.size()));
|
||||||
|
noAllErasedElements = noAllErasedElements + (len - nextSetasList.size());
|
||||||
|
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
|
||||||
|
noBacktracking++;
|
||||||
|
writeLog("Number of Backtracking: " + noBacktracking);
|
||||||
|
System.out.println("");
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
|
||||||
|
// return result;
|
||||||
|
//}
|
||||||
|
//else {
|
||||||
|
// result.removeIf(y -> isUndefinedPairSet(y));
|
||||||
|
//}
|
||||||
|
//else result.stream().filter(y -> !isUndefinedPairSet(y));
|
||||||
|
|
||||||
|
|
||||||
|
} // End of while (nextSetasList.size() > 0)
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Computes all principal type unifiers for a set of constraints.
|
||||||
|
* @param eq The set of constraints
|
||||||
|
* @param fc The finite closure
|
||||||
|
* @return The set of all principal type unifiers
|
||||||
|
*/
|
||||||
|
protected Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||||
|
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
|
||||||
|
// ).collect(Collectors.toCollection(HashSet::new));
|
||||||
|
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
|
||||||
|
//if (aas.isEmpty()) {
|
||||||
|
// System.out.println("");
|
||||||
|
//}
|
||||||
|
|
||||||
|
//.collect(Collectors.toCollection(HashSet::new)));
|
||||||
|
/*
|
||||||
|
* Step 1: Repeated application of reduce, adapt, erase, swap
|
||||||
|
*/
|
||||||
|
rekTiefe++;
|
||||||
|
nOfUnify++;
|
||||||
|
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
|
||||||
|
writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString());
|
||||||
|
//eq = eq.stream().map(x -> {x.setVariance((byte)-1); return x;}).collect(Collectors.toCollection(HashSet::new));
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Variancen auf alle Gleichungen vererben
|
||||||
|
*/
|
||||||
|
//PL 2018-05-17 nach JavaTXCompiler verschoben
|
||||||
|
//varianceInheritance(eq);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* ? extends ? extends Theta rausfiltern
|
||||||
|
*/
|
||||||
|
Set<UnifyPair> doubleExt = eq.stream().filter(x -> (x.wrongWildcard())).map(x -> { x.setUndefinedPair(); return x;})
|
||||||
|
.collect(Collectors.toCollection(HashSet::new));
|
||||||
|
if (doubleExt.size() > 0) {
|
||||||
|
Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||||
|
ret.add(doubleExt);
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Set<UnifyPair> eq0 = applyTypeUnificationRules(eq, fc);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
|
||||||
|
*/
|
||||||
|
Set<UnifyPair> eq1s = new HashSet<>();
|
||||||
|
Set<UnifyPair> eq2s = new HashSet<>();
|
||||||
|
splitEq(eq0, eq1s, eq2s);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Step 4: Create possible typings
|
||||||
|
*
|
||||||
|
* "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
|
||||||
|
* mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
|
||||||
|
*/
|
||||||
|
|
||||||
|
// There are up to 10 toplevel set. 8 of 10 are the result of the
|
||||||
|
// cartesian product of the sets created by pattern matching.
|
||||||
|
List<Set<Set<UnifyPair>>> topLevelSets = new ArrayList<>();
|
||||||
|
|
||||||
|
//System.out.println(eq2s);
|
||||||
|
|
||||||
|
if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
|
||||||
|
Set<Set<UnifyPair>> wrap = new HashSet<>();
|
||||||
|
wrap.add(eq1s);
|
||||||
|
topLevelSets.add(wrap); // Add Eq1'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
|
||||||
|
Set<UnifyPair> bufferSet = eq2s.stream()
|
||||||
|
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
|
if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
|
||||||
|
Set<Set<UnifyPair>> wrap = new HashSet<>();
|
||||||
|
wrap.add(bufferSet);
|
||||||
|
topLevelSets.add(wrap);
|
||||||
|
eq2s.removeAll(bufferSet);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sets that originate from pair pattern matching
|
||||||
|
// Sets of the "second level"
|
||||||
|
Set<UnifyPair> undefinedPairs = new HashSet<>();
|
||||||
|
if (printtag) System.out.println("eq2s " + eq2s);
|
||||||
|
//writeLog("BufferSet: " + bufferSet.toString()+"\n");
|
||||||
|
List<Set<Set<UnifyPair>>> oderConstraintsOutput = new ArrayList<>(oderConstraints);
|
||||||
|
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
|
||||||
|
//PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
|
||||||
|
//nicht ausgewertet Faculty Beispiel im 1. Schritt
|
||||||
|
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
|
||||||
|
//Typen getestet werden.
|
||||||
|
if (printtag) System.out.println("secondLevelSets:" +secondLevelSets);
|
||||||
|
// If pairs occured that did not match one of the cartesian product cases,
|
||||||
|
// those pairs are contradictory and the unification is impossible.
|
||||||
|
if(!undefinedPairs.isEmpty()) {
|
||||||
|
noUndefPair++;
|
||||||
|
for (UnifyPair up : undefinedPairs) {
|
||||||
|
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up);
|
||||||
|
writeLog("BasePair; " + up.getBasePair());
|
||||||
|
}
|
||||||
|
Set<Set<UnifyPair>> error = new HashSet<>();
|
||||||
|
undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new));
|
||||||
|
error.add(undefinedPairs);
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Up to here, no cartesian products are calculated.
|
||||||
|
* filters for pairs and sets can be applied here */
|
||||||
|
|
||||||
|
// Alternative: Sub cartesian products of the second level (pattern matched) sets
|
||||||
|
// "the big (x)"
|
||||||
|
/* for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
|
||||||
|
//System.out.println("secondLevelSet "+secondLevelSet.size());
|
||||||
|
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
|
||||||
|
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
|
||||||
|
//System.out.println("CardResult: "+cartResult.size());
|
||||||
|
// Flatten and add to top level sets
|
||||||
|
Set<Set<UnifyPair>> flat = new HashSet<>();
|
||||||
|
int j = 0;
|
||||||
|
for(List<Set<UnifyPair>> s : cartResult) {
|
||||||
|
j++;
|
||||||
|
//System.out.println("s from CardResult: "+cartResult.size() + " " + j);
|
||||||
|
Set<UnifyPair> flat1 = new HashSet<>();
|
||||||
|
for(Set<UnifyPair> s1 : s)
|
||||||
|
flat1.addAll(s1);
|
||||||
|
flat.add(flat1);
|
||||||
|
}
|
||||||
|
//topLevelSets.add(flat);
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
//Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden
|
||||||
|
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
|
||||||
|
for (Set<Set<UnifyPair>> secondlevelelem : secondLevelSet) {
|
||||||
|
topLevelSets.add(secondlevelelem);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//System.out.println(topLevelSets);
|
||||||
|
//System.out.println();
|
||||||
|
|
||||||
|
|
||||||
|
//Aufruf von computeCartesianRecursive ANFANG
|
||||||
|
return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||||
|
//Aufruf von computeCartesianRecursive ENDE
|
||||||
|
|
||||||
|
//keine Ahnung woher das kommt
|
||||||
|
//Set<Set<UnifyPair>> setToFlatten = topLevelSets.stream().map(x -> x.iterator().next()).collect(Collectors.toCollection(HashSet::new));
|
||||||
|
|
||||||
|
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
|
||||||
|
// Cartesian product over all (up to 10) top level sets
|
||||||
|
//Set<Set<Set<UnifyPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
|
||||||
|
// .stream().map(x -> new HashSet<>(x))
|
||||||
|
// .collect(Collectors.toCollection(HashSet::new));
|
||||||
|
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
||||||
|
|
||||||
|
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
|
||||||
|
|
||||||
|
Set<TypeUnifyTask> forks = new HashSet<>();
|
||||||
|
|
||||||
|
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
|
||||||
|
//for(Set<Set<UnifyPair>> setToFlatten : eqPrimeSet) {
|
||||||
|
// Flatten the cartesian product
|
||||||
|
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
||||||
|
Set<UnifyPair> eqPrime = new HashSet<>();
|
||||||
|
setToFlatten.stream().forEach(x -> eqPrime.addAll(x));
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Step 5: Substitution
|
||||||
|
*/
|
||||||
|
//System.out.println("vor Subst: " + eqPrime);
|
||||||
|
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Step 6 a) Restart (fork) for pairs where subst was applied
|
||||||
|
*/
|
||||||
|
if(parallel) {
|
||||||
|
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
||||||
|
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
||||||
|
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
|
||||||
|
//eqPrimePrime Veraenderungen in subst repraesentieren.
|
||||||
|
eqPrimePrimeSet.add(eqPrime);
|
||||||
|
else if(eqPrimePrime.isPresent()) {
|
||||||
|
//System.out.println("nextStep: " + eqPrimePrime.get());
|
||||||
|
TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc, true, logFile, log);
|
||||||
|
forks.add(fork);
|
||||||
|
fork.fork();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
//System.out.println("nextStep: " + eqPrime);
|
||||||
|
TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc, true, logFile, log);
|
||||||
|
forks.add(fork);
|
||||||
|
fork.fork();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else { // sequentiell (Step 6b is included)
|
||||||
|
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
|
||||||
|
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
||||||
|
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
||||||
|
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
|
||||||
|
//eqPrimePrime Veraenderungen in subst repraesentieren.
|
||||||
|
try {
|
||||||
|
if (isSolvedForm(eqPrime)) {
|
||||||
|
logFile.write(eqPrime.toString()+"\n");
|
||||||
|
logFile.flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (IOException e) { }
|
||||||
|
eqPrimePrimeSet.add(eqPrime);
|
||||||
|
}
|
||||||
|
else if(eqPrimePrime.isPresent()) {
|
||||||
|
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), oderConstraints, fc, false, rekTiefe);
|
||||||
|
|
||||||
|
eqPrimePrimeSet.addAll(unifyres);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Set<Set<UnifyPair>> unifyres = unify(eqPrime, oderConstraints, fc, false, rekTiefe);
|
||||||
|
|
||||||
|
|
||||||
|
eqPrimePrimeSet.addAll(unifyres);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
|
||||||
|
//}
|
||||||
|
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Step 6 b) Build the union over everything.
|
||||||
|
*/
|
||||||
|
|
||||||
|
if(parallel)
|
||||||
|
for(TypeUnifyTask fork : forks)
|
||||||
|
eqPrimePrimeSet.addAll(fork.join());
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Step 7: Filter empty sets;
|
||||||
|
*/
|
||||||
|
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
|
||||||
|
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet))
|
||||||
|
writeLog("Result1 " + eqPrimePrimeSet.toString());
|
||||||
|
return eqPrimePrimeSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||||
|
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
|
||||||
|
fstElems.addAll(topLevelSets.stream()
|
||||||
|
.filter(x -> x.size()==1)
|
||||||
|
.map(y -> y.stream().findFirst().get())
|
||||||
|
.collect(Collectors.toCollection(HashSet::new)));
|
||||||
|
ArrayList<Set<Set<UnifyPair>>> remainingSets = topLevelSets.stream()
|
||||||
|
.filter(x -> x.size()>1)
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
|
||||||
|
Set<Set<UnifyPair>> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0);
|
||||||
|
writeLog("nextSet: " + nextSet.toString());
|
||||||
|
List<Set<UnifyPair>> nextSetasList =new ArrayList<>(nextSet);
|
||||||
|
try {
|
||||||
|
//List<Set<UnifyPair>>
|
||||||
|
//nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet);
|
||||||
|
}
|
||||||
|
catch (java.lang.IllegalArgumentException e) {
|
||||||
|
System.out.print("");
|
||||||
|
}
|
||||||
|
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||||
|
int variance = 0;
|
||||||
|
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType)
|
||||||
|
.filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0)
|
||||||
|
.map(c -> ((PlaceholderType)c.getLhsType()).getVariance())
|
||||||
|
.reduce((a,b)-> {if (a==b) return a; else return 2; })) //2 kommt insbesondere bei Oder-Constraints vor
|
||||||
|
.filter(d -> d.isPresent())
|
||||||
|
.map(e -> e.get())
|
||||||
|
.findAny();
|
||||||
|
if (xi.isPresent()) {
|
||||||
|
variance = xi.get();
|
||||||
|
}
|
||||||
|
//if (variance == 1 && nextSetasList.size() > 1) {
|
||||||
|
// List<Set<UnifyPair>> al = new ArrayList<>(nextSetasList.size());
|
||||||
|
// for (int ii = 0; ii < nextSetasList.size();ii++) {
|
||||||
|
// al.add(0,nextSetasList.get(ii));
|
||||||
|
// }
|
||||||
|
// nextSetasList = al;
|
||||||
|
//}
|
||||||
|
//Set<UnifyPair> a = nextSetasListIt.next();
|
||||||
|
/*if (nextSetasList.size()>1) {zu loeschen
|
||||||
|
if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D"))
|
||||||
|
System.out.print("");
|
||||||
|
if (variance == 1) {
|
||||||
|
a_next = oup.max(nextSetasList.iterator());
|
||||||
|
}
|
||||||
|
else if (variance == -1) {
|
||||||
|
a_next = oup.min(nextSetasList.iterator());
|
||||||
|
}
|
||||||
|
else if (variance == 0) {
|
||||||
|
a_next = nextSetasList.iterator().next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
a_next = nextSetasList.iterator().next();
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
if (!nextSetasList.iterator().hasNext())
|
||||||
|
System.out.print("");
|
||||||
|
if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1)
|
||||||
|
System.out.print("");
|
||||||
|
writeLog("nextSetasList: " + nextSetasList.toString());
|
||||||
|
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
|
||||||
|
Set<UnifyPair> a = null;
|
||||||
|
if (variance == 1) {
|
||||||
|
a = oup.max(nextSetasList.iterator());
|
||||||
|
nextSetasList.remove(a);
|
||||||
|
}
|
||||||
|
else if (variance == -1) {
|
||||||
|
a = oup.min(nextSetasList.iterator());
|
||||||
|
nextSetasList.remove(a);
|
||||||
|
}
|
||||||
|
else if (variance == 0 || variance == 2) {
|
||||||
|
a = nextSetasList.remove(0);
|
||||||
|
}
|
||||||
|
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
|
||||||
|
//nextSetasList.remove(a);
|
||||||
|
/* zu loeschen
|
||||||
|
if (nextSetasList.size() > 0) {
|
||||||
|
if (nextSetasList.size()>1) {
|
||||||
|
if (variance == 1) {
|
||||||
|
a_next = oup.max(nextSetasList.iterator());
|
||||||
|
}
|
||||||
|
else if (variance == -1) {
|
||||||
|
a_next = oup.min(nextSetasList.iterator());
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
a_next = nextSetasList.iterator().next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
a_next = nextSetasList.iterator().next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
//PL 2018-03-01
|
||||||
|
//TODO: 1. Maximum und Minimum unterscheiden
|
||||||
|
//TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
|
||||||
|
//for(Set<UnifyPair> a : newSet) {
|
||||||
|
i++;
|
||||||
|
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
|
||||||
|
writeLog("a1: " + rekTiefe + " "+ a.toString()+ "\n");
|
||||||
|
elems.add(a);
|
||||||
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
|
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
|
||||||
//writeLog("Vor unify2 Aufruf: " + eq.toString());
|
//writeLog("Vor unify2 Aufruf: " + eq.toString());
|
||||||
Set<Set<UnifyPair>> res = unify2(elems, eq, fc, parallel);
|
Set<Set<UnifyPair>> res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe);
|
||||||
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
|
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
|
||||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||||
result = res;
|
result = res;
|
||||||
@ -565,6 +862,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
.map(x -> x.getAllSubstitutions())
|
.map(x -> x.getAllSubstitutions())
|
||||||
.reduce((y,z) -> { y.addAll(z); return y;}).get())
|
.reduce((y,z) -> { y.addAll(z); return y;}).get())
|
||||||
.reduce((y,z) -> { y.addAll(z); return y;}).get();
|
.reduce((y,z) -> { y.addAll(z); return y;}).get();
|
||||||
|
abhSubst.addAll(
|
||||||
|
res.stream()
|
||||||
|
.map(b ->
|
||||||
|
b.stream()
|
||||||
|
.map(x -> x.getAllBases())
|
||||||
|
.reduce((y,z) -> { y.addAll(z); return y;}).get())
|
||||||
|
.reduce((y,z) -> { y.addAll(z); return y;}).get()
|
||||||
|
);
|
||||||
Set<UnifyPair> b = a;//effective final a
|
Set<UnifyPair> b = a;//effective final a
|
||||||
Set<UnifyPair> durchschnitt = abhSubst.stream()
|
Set<UnifyPair> durchschnitt = abhSubst.stream()
|
||||||
.filter(x -> b.contains(x))
|
.filter(x -> b.contains(x))
|
||||||
@ -596,7 +901,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
// .collect(Collectors.toCollection(ArrayList::new));
|
// .collect(Collectors.toCollection(ArrayList::new));
|
||||||
writeLog("res (undef): " + res.toString());
|
writeLog("res (undef): " + res.toString());
|
||||||
writeLog("abhSubst: " + abhSubst.toString());
|
writeLog("abhSubst: " + abhSubst.toString());
|
||||||
writeLog("a: " + a.toString());
|
writeLog("a2: " + rekTiefe + " " + a.toString());
|
||||||
writeLog("Durchschnitt: " + durchschnitt.toString());
|
writeLog("Durchschnitt: " + durchschnitt.toString());
|
||||||
writeLog("nextSet: " + nextSet.toString());
|
writeLog("nextSet: " + nextSet.toString());
|
||||||
writeLog("nextSetasList: " + nextSetasList.toString());
|
writeLog("nextSetasList: " + nextSetasList.toString());
|
||||||
@ -616,7 +921,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
// result.removeIf(y -> isUndefinedPairSet(y));
|
// result.removeIf(y -> isUndefinedPairSet(y));
|
||||||
//}
|
//}
|
||||||
//else result.stream().filter(y -> !isUndefinedPairSet(y));
|
//else result.stream().filter(y -> !isUndefinedPairSet(y));
|
||||||
|
writeLog("res: " + res.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
writeLog("Return computeCR: " + result.toString());
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -893,15 +1201,17 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
|
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
|
||||||
* (as in case 1 where sigma is added to the innermost set).
|
* (as in case 1 where sigma is added to the innermost set).
|
||||||
*/
|
*/
|
||||||
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, IFiniteClosure fc, Set<UnifyPair> undefined) {
|
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, List<Set<Set<UnifyPair>>> oderConstraintsInput, IFiniteClosure fc, Set<UnifyPair> undefined, List<Set<Set<UnifyPair>>> oderConstraintsOutput) {
|
||||||
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(8);
|
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(9);
|
||||||
|
|
||||||
// Init all 8 cases
|
// Init all 8 cases + 9. Case: oderConstraints
|
||||||
for(int i = 0; i < 8; i++)
|
for(int i = 0; i < 9; i++)
|
||||||
result.add(new HashSet<>());
|
result.add(new HashSet<>());
|
||||||
|
|
||||||
ArrayList<UnifyPair> eq2sprime = new ArrayList<>(eq2s);
|
ArrayList<UnifyPair> eq2sprime = new ArrayList<>(eq2s);
|
||||||
Iterator<UnifyPair> eq2sprimeit = eq2sprime.iterator();
|
Iterator<UnifyPair> eq2sprimeit = eq2sprime.iterator();
|
||||||
ArrayList<UnifyPair> eq2sAsList = new ArrayList<>();
|
ArrayList<UnifyPair> eq2sAsList = new ArrayList<>();
|
||||||
|
Boolean first = true;
|
||||||
while(eq2sprimeit.hasNext()) {// alle mit Variance != 0 nach vorne schieben
|
while(eq2sprimeit.hasNext()) {// alle mit Variance != 0 nach vorne schieben
|
||||||
UnifyPair up = eq2sprimeit.next();
|
UnifyPair up = eq2sprimeit.next();
|
||||||
if ((up.getLhsType() instanceof PlaceholderType && ((PlaceholderType)up.getLhsType()).getVariance() != 0)
|
if ((up.getLhsType() instanceof PlaceholderType && ((PlaceholderType)up.getLhsType()).getVariance() != 0)
|
||||||
@ -910,8 +1220,43 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
eq2s.remove(up);
|
eq2s.remove(up);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (eq2sAsList.isEmpty()) {
|
||||||
|
List<Set<Set<UnifyPair>>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind
|
||||||
|
.filter(x -> x.stream()
|
||||||
|
.filter(y ->
|
||||||
|
y.stream().filter(z -> ((z.getLhsType() instanceof PlaceholderType)
|
||||||
|
&& (((PlaceholderType)(z.getLhsType())).getVariance() != 0))
|
||||||
|
|| ((z.getRhsType() instanceof PlaceholderType)
|
||||||
|
&& (((PlaceholderType)(z.getRhsType())).getVariance() != 0))
|
||||||
|
).findFirst().isPresent()
|
||||||
|
).findFirst().isPresent()).collect(Collectors.toList());
|
||||||
|
if (!oderConstraintsVariance.isEmpty()) {
|
||||||
|
Set<Set<UnifyPair>> ret = oderConstraintsVariance.get(0);
|
||||||
|
oderConstraintsOutput.remove(ret);
|
||||||
|
//Set<UnifyPair> retFlat = new HashSet<>();
|
||||||
|
//ret.stream().forEach(x -> retFlat.addAll(x));
|
||||||
|
ret.stream().forEach(x -> x.stream().forEach(y -> y.addSubstitutions(x)));
|
||||||
|
result.get(8).add(ret);
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
eq2sAsList.addAll(eq2s);
|
eq2sAsList.addAll(eq2s);
|
||||||
Boolean first = true;
|
|
||||||
|
if (eq2sAsList.isEmpty() && first) {//Alle eq2s sind empty und alle oderConstraints mit Variance != 0 sind bearbeitet
|
||||||
|
if (!oderConstraintsOutput.isEmpty()) {
|
||||||
|
Set<Set<UnifyPair>> ret = oderConstraintsOutput.remove(0);
|
||||||
|
//Set<UnifyPair> retFlat = new HashSet<>();
|
||||||
|
//ret.stream().forEach(x -> retFlat.addAll(x));
|
||||||
|
ret.stream().forEach(x -> x.stream().forEach(y -> y.addSubstitutions(x)));
|
||||||
|
result.get(8).add(ret);
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
Bei allen die Abhaengigkeit der Elemente aus eq2sAsList als evtl. als Substitution
|
||||||
|
hinzufuegen
|
||||||
|
*/
|
||||||
for(UnifyPair pair : eq2sAsList) {
|
for(UnifyPair pair : eq2sAsList) {
|
||||||
PairOperator pairOp = pair.getPairOp();
|
PairOperator pairOp = pair.getPairOp();
|
||||||
UnifyType lhsType = pair.getLhsType();
|
UnifyType lhsType = pair.getLhsType();
|
||||||
@ -921,6 +1266,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
if (((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.SMALLERNEQDOT)) && lhsType instanceof PlaceholderType) {
|
if (((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.SMALLERNEQDOT)) && lhsType instanceof PlaceholderType) {
|
||||||
//System.out.println(pair);
|
//System.out.println(pair);
|
||||||
if (first) { //writeLog(pair.toString()+"\n");
|
if (first) { //writeLog(pair.toString()+"\n");
|
||||||
|
if (((PlaceholderType)(pair.getLhsType())).getName().equals("AR")) {
|
||||||
|
System.out.println("AR");
|
||||||
|
}
|
||||||
Set<Set<UnifyPair>> x1 = unifyCase1(pair, fc);
|
Set<Set<UnifyPair>> x1 = unifyCase1(pair, fc);
|
||||||
if (pairOp == PairOperator.SMALLERNEQDOT) {
|
if (pairOp == PairOperator.SMALLERNEQDOT) {
|
||||||
Set<UnifyPair> remElem = new HashSet<>();
|
Set<UnifyPair> remElem = new HashSet<>();
|
||||||
|
@ -121,6 +121,10 @@ public class UnifyPair {
|
|||||||
pairOp = po;
|
pairOp = po;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void addSubstitutions(Set<UnifyPair> sup) {
|
||||||
|
substitution.addAll(sup);
|
||||||
|
}
|
||||||
|
|
||||||
public byte getVariance() {
|
public byte getVariance() {
|
||||||
return variance;
|
return variance;
|
||||||
}
|
}
|
||||||
@ -152,6 +156,15 @@ public class UnifyPair {
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Set<UnifyPair> getAllBases () {
|
||||||
|
Set<UnifyPair> ret = new HashSet<>();
|
||||||
|
if (basePair != null) {
|
||||||
|
ret.add(getBasePair());
|
||||||
|
ret.addAll(basePair.getAllBases());
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
public UnifyPair getGroundBasePair () {
|
public UnifyPair getGroundBasePair () {
|
||||||
if (basePair == null) {
|
if (basePair == null) {
|
||||||
return this;
|
return this;
|
||||||
|
@ -32,9 +32,9 @@ public class MatrixOpTest {
|
|||||||
fileToTest = new File(path);
|
fileToTest = new File(path);
|
||||||
compiler = new JavaTXCompiler(fileToTest);
|
compiler = new JavaTXCompiler(fileToTest);
|
||||||
pathToClassFile = System.getProperty("user.dir")+"/testBytecode/generatedBC/";
|
pathToClassFile = System.getProperty("user.dir")+"/testBytecode/generatedBC/";
|
||||||
// compiler.generateBytecode(pathToClassFile);
|
compiler.generateBytecode(pathToClassFile);
|
||||||
// loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
|
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
|
||||||
// classToTest = loader.loadClass("MatrixOP");
|
classToTest = loader.loadClass("MatrixOP");
|
||||||
/*
|
/*
|
||||||
Vector<Vector<Integer>> vv = new Vector<Vector<Integer>>();
|
Vector<Vector<Integer>> vv = new Vector<Vector<Integer>>();
|
||||||
Vector<Integer> v1 = new Vector<Integer> ();
|
Vector<Integer> v1 = new Vector<Integer> ();
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
import java.util.Vector;
|
import java.util.Vector;
|
||||||
import java.lang.Integer;
|
import java.lang.Integer;
|
||||||
//import java.lang.Float;
|
import java.lang.Float;
|
||||||
//import java.lang.Byte;
|
//import java.lang.Byte;
|
||||||
import java.lang.Boolean;
|
//import java.lang.Boolean;
|
||||||
|
|
||||||
public class Matrix extends Vector<Vector<Integer>> {
|
public class Matrix extends Vector<Vector<Integer>> {
|
||||||
|
|
||||||
@ -19,7 +19,7 @@ public class Matrix extends Vector<Vector<Integer>> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mul(m) {
|
mul(m) {
|
||||||
var ret = new Matrix();
|
var ret = new Matrix();
|
||||||
var i = 0;
|
var i = 0;
|
||||||
while(i < size()) {
|
while(i < size()) {
|
||||||
|
Loading…
Reference in New Issue
Block a user