diff --git a/src/de/dhbwstuttgart/core/JavaTXCompiler.java b/src/de/dhbwstuttgart/core/JavaTXCompiler.java index 83f5aeb6b..5387e2a11 100644 --- a/src/de/dhbwstuttgart/core/JavaTXCompiler.java +++ b/src/de/dhbwstuttgart/core/JavaTXCompiler.java @@ -103,7 +103,7 @@ public class JavaTXCompiler { return new ArrayList<>(allClasses); } - public List typeInference() throws ClassNotFoundException { + public List typeInferenceOld() throws ClassNotFoundException { List allClasses = new ArrayList<>();//environment.getAllAvailableClasses(); //Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC for(SourceFile sf : this.sourceFiles.values()) { @@ -176,10 +176,7 @@ public class JavaTXCompiler { } } return x;//HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS ANDERE SEITE - }) - /* PL 2018-11-07 wird in varianceInheritance erledigt - .map( y -> { - + }).map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 && ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType()).getVariance()); @@ -189,20 +186,16 @@ public class JavaTXCompiler { } } return y; } ) - */ .collect(Collectors.toCollection(HashSet::new)); - varianceInheritance(xConsSet); - - - - Set> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log); - //Set> result = unify.unify(xConsSet, finiteClosure); - System.out.println("RESULT: " + result); - logFile.write("RES: " + result.toString()+"\n"); - logFile.flush(); - results.addAll(result); + varianceInheritance(xConsSet); + Set> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log); + //Set> result = unify.unify(xConsSet, finiteClosure); + System.out.println("RESULT: " + result); + logFile.write("RES: " + result.toString()+"\n"); + logFile.flush(); + results.addAll(result); } - + results = results.stream().map(x -> { Optional> res = new RuleSet().subst(x.stream().map(y -> { if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT); @@ -223,10 +216,11 @@ public class JavaTXCompiler { } /** - * Vererbt alle Variancen + * Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a) + * wenn a eine Variance !=0 hat auf alle Typvariablen in Theta. * @param eq The set of constraints */ - private void varianceInheritance(Set eq) { + private void varianceInheritance(Set eq) { Set usedTPH = new HashSet<>(); Set phSet = eq.stream().map(x -> { Set pair = new HashSet<>(); @@ -252,6 +246,171 @@ public class JavaTXCompiler { phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x))); } } + + + + public List typeInference() throws ClassNotFoundException { + List allClasses = new ArrayList<>();//environment.getAllAvailableClasses(); + //Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC + for(SourceFile sf : this.sourceFiles.values()) { + allClasses.addAll(getAvailableClasses(sf)); + allClasses.addAll(sf.getClasses()); + } + + final ConstraintSet cons = getConstraints(); + + FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses); + System.out.println(finiteClosure); + ConstraintSet unifyCons = UnifyTypeFactory.convert(cons); + + TypeUnify unify = new TypeUnify(); + Set> results = new HashSet<>(); + try { + FileWriter logFile = new FileWriter(new File(System.getProperty("user.dir")+"/test/logFiles/"+"log")); + logFile.write("FC:\\" + finiteClosure.toString()+"\n"); + for(SourceFile sf : this.sourceFiles.values()) { + logFile.write(ASTTypePrinter.print(sf)); + } + logFile.flush(); + + Set paraTypeVarNames = allClasses.stream().map(x -> x.getMethods().stream().map(y -> y.getParameterList().getFormalparalist() + .stream().filter(z -> z.getType() instanceof TypePlaceholder) + .map(z -> ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(HashSet::new))) + .reduce(new HashSet(), (a,b) -> { a.addAll(b); return a;}, (a,b) -> { a.addAll(b); return a;} ) ) + .reduce(new HashSet(), (a,b) -> { a.addAll(b); return a;} ); + + Set returnTypeVarNames = allClasses.stream().map(x -> x.getMethods().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder) + .map(z -> ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;} ).get(); + + Set fieldTypeVarNames = allClasses.stream().map(x -> x.getFieldDecl().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder) + .map(z -> ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;} ).get(); + + returnTypeVarNames.addAll(fieldTypeVarNames); + + unifyCons = unifyCons.map(x -> { + //Hier muss ueberlegt werden, ob + //1. alle Argument- und Retuntyp-Variablen in allen UnifyPairs + // mit disableWildcardtable() werden. + //2. alle Typvariablen mit Argument- oder Retuntyp-Variablen + //in Beziehung auch auf disableWildcardtable() gesetzt werden muessen + //PL 2018-04-23 + if ((x.getLhsType() instanceof PlaceholderType)) { + if (paraTypeVarNames.contains(x.getLhsType().getName())) { + ((PlaceholderType)x.getLhsType()).setVariance((byte)1); + ((PlaceholderType)x.getLhsType()).disableWildcardtable(); + } + if (returnTypeVarNames.contains(x.getLhsType().getName())) { + ((PlaceholderType)x.getLhsType()).setVariance((byte)-1); + ((PlaceholderType)x.getLhsType()).disableWildcardtable(); + } + } + if ((x.getRhsType() instanceof PlaceholderType)) { + if (paraTypeVarNames.contains(x.getRhsType().getName())) { + ((PlaceholderType)x.getRhsType()).setVariance((byte)1); + ((PlaceholderType)x.getRhsType()).disableWildcardtable(); + } + if (returnTypeVarNames.contains(x.getRhsType().getName())) { + ((PlaceholderType)x.getRhsType()).setVariance((byte)-1); + ((PlaceholderType)x.getRhsType()).disableWildcardtable(); + } + } + return x;//HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS ANDERE SEITE + }); + Set varianceTPHold; + Set varianceTPH = new HashSet<>(); + varianceTPH = varianceInheritanceConstraintSet(unifyCons); + + /* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt + do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen + //anderen Seite übertragen + varianceTPHold = new HashSet<>(varianceTPH); + varianceTPH = varianceInheritanceConstraintSet(unifyCons); + unifyCons.map( y -> { + if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { + if (((PlaceholderType)y.getLhsType()).getVariance() != 0 && ((PlaceholderType)y.getRhsType()).getVariance() == 0) { + ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType()).getVariance()); + } + if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { + ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType()).getVariance()); + } + } + return y; } ); } + while (!varianceTPHold.equals(varianceTPH)); + */ + + //Set> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log); + //Set> result = unify.unify(xConsSet, finiteClosure); + Set> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, logFile, log); + System.out.println("RESULT: " + result); + logFile.write("RES: " + result.toString()+"\n"); + logFile.flush(); + results.addAll(result); + + + results = results.stream().map(x -> { + Optional> res = new RuleSet().subst(x.stream().map(y -> { + if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT); + return y; //alle Paare a <.? b erden durch a =. b ersetzt + }).collect(Collectors.toCollection(HashSet::new))); + if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert + return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure); + } + else return x; //wenn nichts veraendert wurde wird x zurueckgegeben + }).collect(Collectors.toCollection(HashSet::new)); + System.out.println("RESULT Final: " + results); + logFile.write("RES_FINAL: " + results.toString()+"\n"); + logFile.flush(); + } + catch (IOException e) { } + return results.stream().map((unifyPairs -> + new ResultSet(UnifyTypeFactory.convert(unifyPairs, generateTPHMap(cons))))).collect(Collectors.toList()); + } + + + /** + * Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a) + * wenn a eine Variance !=0 hat auf alle Typvariablen in Theta. + * @param eq The set of constraints + */ + private Set varianceInheritanceConstraintSet(ConstraintSet cons) { + Set eq = cons.getAll(); + Set usedTPH = new HashSet<>(); + Set phSet = eq.stream().map(x -> { + Set pair = new HashSet<>(); + if (x.getLhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getLhsType()); + if (x.getRhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getRhsType()); + return pair; + }).reduce(new HashSet<>(), (a,b) -> { a.addAll(b); return a;} , (c,d) -> { c.addAll(d); return c;}); + + ArrayList phSetVariance = new ArrayList<>(phSet); + phSetVariance.removeIf(x -> (x.getVariance() == 0)); + while(!phSetVariance.isEmpty()) { + PlaceholderType a = phSetVariance.remove(0); + usedTPH.add(a); + //HashMap ht = new HashMap<>(); + //ht.put(a, a.getVariance()); + //ConstraintSet eq1 = cons; + //eq1.removeIf(x -> !(x.getLhsType() instanceof PlaceholderType && ((PlaceholderType)x.getLhsType()).equals(a))); + //durch if-Abfrage im foreach geloest + cons.forEach(x -> { + if (x.getLhsType() instanceof PlaceholderType && ((PlaceholderType)x.getLhsType()).equals(a)) { + x.getRhsType().accept(new distributeVariance(), a.getVariance()); + } + }); + //` eq1 = new HashSet<>(eq); + //eq1.removeIf(x -> !(x.getRhsType() instanceof PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a))); + //durch if-Abfrage im foreach geloest + cons.forEach(x -> { + if (x.getRhsType() instanceof PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)) { + x.getLhsType().accept(new distributeVariance(), a.getVariance()); + } + }); + phSetVariance = new ArrayList<>(phSet); //macht vermutlich keinen Sinn PL 2018-10-18, doch, es koennen neue TPHs mit Variancen dazugekommen sein PL 2018-11-07 + phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x))); + } + return usedTPH; + } + private Map generateTPHMap(ConstraintSet constraints) { HashMap ret = new HashMap<>(); diff --git a/src/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java b/src/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java index 01356fe9a..4d1f076f3 100644 --- a/src/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java +++ b/src/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java @@ -5,6 +5,7 @@ import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import java.util.*; +import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; @@ -52,4 +53,29 @@ public class ConstraintSet { ret.oderConstraints = newOder; return ret; } + + public void forEach (Consumer c) { + undConstraints.stream().forEach(c); + for(Set> oderConstraint : oderConstraints){ + oderConstraint.parallelStream().forEach((Constraint as) -> + as.stream().forEach(c)); + } + } + + public Set getAll () { + Set ret = new HashSet<>(); + ret.addAll(undConstraints); + for(Set> oderConstraint : oderConstraints){ + oderConstraint.parallelStream().forEach((Constraint as) -> ret.addAll(as)); + } + return ret; + } + + public List>> getOderConstraints() { + return oderConstraints; + } + + public Set getUndConstraints() { + return undConstraints; + } } diff --git a/src/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java b/src/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java index 609600cd1..e06669505 100644 --- a/src/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java +++ b/src/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java @@ -228,6 +228,13 @@ public class TYPEStmt implements StatementVisitor{ binary.operation.equals(BinaryExpr.Operator.ADD)|| binary.operation.equals(BinaryExpr.Operator.SUB)){ Set> numericAdditionOrStringConcatenation = new HashSet<>(); + +// TODO PL 2018-11-06 + + // Auf importierte Typen einschraenken + // pruefen, ob Typen richtig bestimmt werden. + + //Zuerst der Fall für Numerische AusdrücPairOpnumericeratorke, das sind Mul, Mod und Div immer: //see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17 diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnify.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnify.java index fa190cb7f..d2428defc 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/TypeUnify.java +++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnify.java @@ -1,9 +1,11 @@ package de.dhbwstuttgart.typeinference.unify; import java.io.FileWriter; +import java.util.List; import java.util.Set; import java.util.concurrent.ForkJoinPool; +import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; @@ -21,5 +23,11 @@ public class TypeUnify { Set> res = unifyTask.compute(); return res; } + + public Set> unifyOderConstraints(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) { + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log); + Set> res = unifyTask.compute(); + return res; + } } diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index 7864ece1b..fbf354d0e 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -18,6 +18,7 @@ import java.util.function.BinaryOperator; import java.util.stream.Collectors; import java.util.stream.Stream; +import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet; import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations; @@ -71,7 +72,9 @@ public class TypeUnifyTask extends RecursiveTask>> { */ protected IRuleSet rules; - protected Set eq; + protected Set eq; //und-constraints + + protected List>> oderConstraintsField; protected IFiniteClosure fc; @@ -85,6 +88,8 @@ public class TypeUnifyTask extends RecursiveTask>> { Integer noAllErasedElements = 0; + static Integer noou = 0; + static int noBacktracking; public TypeUnifyTask() { @@ -101,6 +106,25 @@ public class TypeUnifyTask extends RecursiveTask>> { rules = new RuleSet(logFile); } + public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) { + this.eq = eq; + //this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); + this.oderConstraintsField = oderConstraints.stream().map(x -> { + Set> ret = new HashSet<>(); + for (Constraint y : x) { + ret.add(new HashSet<>(y)); + } + return ret; + }).collect(Collectors.toCollection(ArrayList::new)); + + //x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); + this.fc = fc; + this.oup = new OrderingUnifyPair(fc); + this.parallel = parallel; + this.logFile = logFile; + this.log = log; + rules = new RuleSet(logFile); + } /** * Vererbt alle Variancen @@ -134,260 +158,33 @@ public class TypeUnifyTask extends RecursiveTask>> { } } */ - @Override + protected Set> compute() { - Set> res = unify(eq, fc, parallel); + Set neweq = new HashSet<>(eq); + /* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */ + oderConstraintsField.stream() + .filter(x -> x.size()==1) + .map(y -> y.stream().findFirst().get()).forEach(x -> neweq.addAll(x)); + ArrayList>> remainingOderconstraints = oderConstraintsField.stream() + .filter(x -> x.size()>1) + .collect(Collectors.toCollection(ArrayList::new)); + Set> res = unify(neweq, remainingOderconstraints, fc, parallel, 0); if (isUndefinedPairSetSet(res)) { return new HashSet<>(); } else return res; } - - /** - * Computes all principal type unifiers for a set of constraints. - * @param eq The set of constraints - * @param fc The finite closure - * @return The set of all principal type unifiers - */ - protected Set> unify(Set eq, IFiniteClosure fc, boolean parallel) { - //Set aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT) - // ).collect(Collectors.toCollection(HashSet::new)); - //writeLog(nOfUnify.toString() + " AA: " + aas.toString()); - //if (aas.isEmpty()) { - // System.out.println(""); - //} - /* - * Step 1: Repeated application of reduce, adapt, erase, swap - */ - nOfUnify++; - writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString()); - //eq = eq.stream().map(x -> {x.setVariance((byte)-1); return x;}).collect(Collectors.toCollection(HashSet::new)); - - /* - * Variancen auf alle Gleichungen vererben - */ - //PL 2018-05-17 nach JavaTXCompiler verschoben - //varianceInheritance(eq); - - /* - * ? extends ? extends Theta rausfiltern - */ - Set doubleExt = eq.stream().filter(x -> (x.wrongWildcard())).map(x -> { x.setUndefinedPair(); return x;}) - .collect(Collectors.toCollection(HashSet::new)); - if (doubleExt.size() > 0) { - Set> ret = new HashSet<>(); - ret.add(doubleExt); - return ret; - } - - - Set eq0 = applyTypeUnificationRules(eq, fc); - - /* - * Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs - */ - Set eq1s = new HashSet<>(); - Set eq2s = new HashSet<>(); - splitEq(eq0, eq1s, eq2s); - - /* - * Step 4: Create possible typings - * - * "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c) - * mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia - */ - - // There are up to 10 toplevel set. 8 of 10 are the result of the - // cartesian product of the sets created by pattern matching. - List>> topLevelSets = new ArrayList<>(); - - //System.out.println(eq2s); - - if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty. - Set> wrap = new HashSet<>(); - wrap.add(eq1s); - topLevelSets.add(wrap); // Add Eq1' - } - - // Add the set of [a =. Theta | (a=. Theta) in Eq2'] - Set bufferSet = eq2s.stream() - .filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType) - .collect(Collectors.toSet()); - - if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty. - Set> wrap = new HashSet<>(); - wrap.add(bufferSet); - topLevelSets.add(wrap); - eq2s.removeAll(bufferSet); - } - - // Sets that originate from pair pattern matching - // Sets of the "second level" - Set undefinedPairs = new HashSet<>(); - if (printtag) System.out.println("eq2s " + eq2s); - //writeLog("BufferSet: " + bufferSet.toString()+"\n"); - Set>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs); - //PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer - //nicht ausgewertet Faculty Beispiel im 1. Schritt - //PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren - //Typen getestet werden. - if (printtag) System.out.println("secondLevelSets:" +secondLevelSets); - // If pairs occured that did not match one of the cartesian product cases, - // those pairs are contradictory and the unification is impossible. - if(!undefinedPairs.isEmpty()) { - noUndefPair++; - for (UnifyPair up : undefinedPairs) { - writeLog(noUndefPair.toString() + " UndefinedPairs; " + up); - writeLog("BasePair; " + up.getBasePair()); - } - Set> error = new HashSet<>(); - undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new)); - error.add(undefinedPairs); - return error; - } - - /* Up to here, no cartesian products are calculated. - * filters for pairs and sets can be applied here */ - - // Alternative: Sub cartesian products of the second level (pattern matched) sets - // "the big (x)" - /* for(Set>> secondLevelSet : secondLevelSets) { - //System.out.println("secondLevelSet "+secondLevelSet.size()); - List>> secondLevelSetList = new ArrayList<>(secondLevelSet); - Set>> cartResult = setOps.cartesianProduct(secondLevelSetList); - //System.out.println("CardResult: "+cartResult.size()); - // Flatten and add to top level sets - Set> flat = new HashSet<>(); - int j = 0; - for(List> s : cartResult) { - j++; - //System.out.println("s from CardResult: "+cartResult.size() + " " + j); - Set flat1 = new HashSet<>(); - for(Set s1 : s) - flat1.addAll(s1); - flat.add(flat1); - } - //topLevelSets.add(flat); - } - */ - - //Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden - for(Set>> secondLevelSet : secondLevelSets) { - for (Set> secondlevelelem : secondLevelSet) { - topLevelSets.add(secondlevelelem); - } - } - //System.out.println(topLevelSets); - //System.out.println(); - - - //Aufruf von computeCartesianRecursive ANFANG - return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, fc, parallel); - +/* + @Override + protected Set> compute() { + Set> fstElems = new HashSet<>(); + fstElems.add(eq); + Set> res = computeCartesianRecursiveOderConstraints(fstElems, oderConstraints, fc, parallel); + if (isUndefinedPairSetSet(res)) { return new HashSet<>(); } + else return res; } - - - Set> unify2(Set> setToFlatten, Set eq, IFiniteClosure fc, boolean parallel) { - //Aufruf von computeCartesianRecursive ENDE - - //keine Ahnung woher das kommt - //Set> setToFlatten = topLevelSets.stream().map(x -> x.iterator().next()).collect(Collectors.toCollection(HashSet::new)); - - //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG - // Cartesian product over all (up to 10) top level sets - //Set>> eqPrimeSet = setOps.cartesianProduct(topLevelSets) - // .stream().map(x -> new HashSet<>(x)) - // .collect(Collectors.toCollection(HashSet::new)); - //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE +*/ - Set> eqPrimePrimeSet = new HashSet<>(); - - Set forks = new HashSet<>(); - - //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG - //for(Set> setToFlatten : eqPrimeSet) { - // Flatten the cartesian product - //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE - Set eqPrime = new HashSet<>(); - setToFlatten.stream().forEach(x -> eqPrime.addAll(x)); - /* - * Step 5: Substitution - */ - //System.out.println("vor Subst: " + eqPrime); - Optional> eqPrimePrime = rules.subst(eqPrime); - - /* - * Step 6 a) Restart (fork) for pairs where subst was applied - */ - if(parallel) { - if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch - //PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent()) - //PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst - //eqPrimePrime Veraenderungen in subst repraesentieren. - eqPrimePrimeSet.add(eqPrime); - else if(eqPrimePrime.isPresent()) { - //System.out.println("nextStep: " + eqPrimePrime.get()); - TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc, true, logFile, log); - forks.add(fork); - fork.fork(); - } - else { - //System.out.println("nextStep: " + eqPrime); - TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc, true, logFile, log); - forks.add(fork); - fork.fork(); - } - } - else { // sequentiell (Step 6b is included) - if (printtag) System.out.println("nextStep: " + eqPrimePrime); - if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch - //PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent()) - //PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst - //eqPrimePrime Veraenderungen in subst repraesentieren. - try { - if (isSolvedForm(eqPrime)) { - logFile.write(eqPrime.toString()+"\n"); - logFile.flush(); - } - } - catch (IOException e) { } - eqPrimePrimeSet.add(eqPrime); - } - else if(eqPrimePrime.isPresent()) { - Set> unifyres = unify(eqPrimePrime.get(), fc, false); - - eqPrimePrimeSet.addAll(unifyres); - } - else { - Set> unifyres = unify(eqPrime, fc, false); - - - eqPrimePrimeSet.addAll(unifyres); - } - } - //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG - //} - //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE - - /* - * Step 6 b) Build the union over everything. - */ - - if(parallel) - for(TypeUnifyTask fork : forks) - eqPrimePrimeSet.addAll(fork.join()); - - /* - * Step 7: Filter empty sets; - */ - eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new)); - if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) - writeLog("Result1 " + eqPrimePrimeSet.toString()); - return eqPrimePrimeSet; - } - - - - Set> computeCartesianRecursive(Set> fstElems, ArrayList>> topLevelSets, Set eq, IFiniteClosure fc, boolean parallel) { + public Set> computeCartesianRecursiveOderConstraints(Set> fstElems, List>> topLevelSets, IFiniteClosure fc, boolean parallel, int rekTiefe) { //ArrayList>> remainingSets = new ArrayList<>(topLevelSets); fstElems.addAll(topLevelSets.stream() .filter(x -> x.size()==1) @@ -397,7 +194,9 @@ public class TypeUnifyTask extends RecursiveTask>> { .filter(x -> x.size()>1) .collect(Collectors.toCollection(ArrayList::new)); if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig - Set> result = unify2(fstElems, eq, fc, parallel); + Set eq = new HashSet<>(); + fstElems.stream().forEach(x -> eq.addAll(x)); + Set> result = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe); return result; } Set> nextSet = remainingSets.remove(0); @@ -492,9 +291,507 @@ public class TypeUnifyTask extends RecursiveTask>> { i++; Set> elems = new HashSet>(fstElems); elems.add(a); + Set> res = new HashSet<>(); + if (remainingSets.isEmpty()) { + noou++; + writeLog("Vor unify Aufruf: " + eq.toString()); + writeLog("No of Unify " + noou); + System.out.println(noou); + Set eq = new HashSet<>(); + elems.stream().forEach(x -> eq.addAll(x)); + res = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe); + } + else {//duerfte gar nicht mehr vorkommen PL 2018-04-03 + res = computeCartesianRecursiveOderConstraints(elems, remainingSets, fc, parallel, rekTiefe); + + } + if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) { + //wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen + result = res; + } + else { + if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) + || (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) + || result.isEmpty()) { + //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden + result.addAll(res); + } + //else { + //wenn Korrekte Ergebnisse da und Feherfälle dazukommen Fehlerfälle ignorieren + // if (isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) { + // result = result; + // } + //} + } + + + + + /* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */ + if (!result.isEmpty() && !isUndefinedPairSetSet(res)) { + if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1) + System.out.print(""); + Iterator> nextSetasListIt = new ArrayList>(nextSetasList).iterator(); + if (variance == 1) { + System.out.println(""); + while (nextSetasListIt.hasNext()) { + Set a_next = nextSetasListIt.next(); + if (a.equals(a_next) || + (oup.compare(a, a_next) == 1)) { + nextSetasList.remove(a_next); + } + else { + System.out.println(""); + } + } + } + else { if (variance == -1) { + System.out.println(""); + while (nextSetasListIt.hasNext()) { + Set a_next = nextSetasListIt.next(); + if (a.equals(a_next) || + (oup.compare(a, a_next) == -1)) { + nextSetasList.remove(0); + } + else { + System.out.println(""); + } + } + } + else if (variance == 0) { + //break; + }} + } + /* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */ + + /* PL 2018-11-05 wird falsch weil es auf der obersten Ebene ist. + if (isUndefinedPairSetSet(res)) { + int nofstred= 0; + Set abhSubst = res.stream() + .map(b -> + b.stream() + .map(x -> x.getAllSubstitutions()) + .reduce((y,z) -> { y.addAll(z); return y;}).get()) + .reduce((y,z) -> { y.addAll(z); return y;}).get(); + Set b = a;//effective final a + Set durchschnitt = abhSubst.stream() + .filter(x -> b.contains(x)) + //.filter(y -> abhSubst.contains(y)) + .collect(Collectors.toCollection(HashSet::new)); + //Set vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new)); + int len = nextSetasList.size(); + Set undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results + Set, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream() + .map(x -> { Set su = x.getAllSubstitutions(); //alle benutzten Substitutionen + su.add(x.getGroundBasePair()); // urspruengliches Paar + su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen + return new Pair<>(su, x.getGroundBasePair());}) + .collect(Collectors.toCollection(HashSet::new)); + if (res.size() > 1) { + System.out.println(); + } + nextSetasList = nextSetasList.stream().filter(x -> { + //Boolean ret = false; + //for (PlaceholderType var : vars) { + // ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get(); + //} + return (!x.containsAll(durchschnitt));//Was passiert wenn durchschnitt leer ist?? + })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10 + .collect(Collectors.toCollection(ArrayList::new)); + nofstred = nextSetasList.size(); + //NOCH NICHT korrekt PL 2018-10-12 + //nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) + // .collect(Collectors.toCollection(ArrayList::new)); + writeLog("res (undef): " + res.toString()); + writeLog("abhSubst: " + abhSubst.toString()); + writeLog("a: " + a.toString()); + writeLog("Durchschnitt: " + durchschnitt.toString()); + writeLog("nextSet: " + nextSet.toString()); + writeLog("nextSetasList: " + nextSetasList.toString()); + writeLog("Number first erased Elements (undef): " + (len - nofstred)); + writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size())); + writeLog("Number erased Elements (undef): " + (len - nextSetasList.size())); + noAllErasedElements = noAllErasedElements + (len - nextSetasList.size()); + writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString()); + noBacktracking++; + writeLog("Number of Backtracking: " + noBacktracking); + System.out.println(""); + } + */ + //if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) { + // return result; + //} + //else { + // result.removeIf(y -> isUndefinedPairSet(y)); + //} + //else result.stream().filter(y -> !isUndefinedPairSet(y)); + + + } // End of while (nextSetasList.size() > 0) + return result; + } + + + /** + * Computes all principal type unifiers for a set of constraints. + * @param eq The set of constraints + * @param fc The finite closure + * @return The set of all principal type unifiers + */ + protected Set> unify(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { + //Set aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT) + // ).collect(Collectors.toCollection(HashSet::new)); + //writeLog(nOfUnify.toString() + " AA: " + aas.toString()); + //if (aas.isEmpty()) { + // System.out.println(""); + //} + + //.collect(Collectors.toCollection(HashSet::new))); + /* + * Step 1: Repeated application of reduce, adapt, erase, swap + */ + rekTiefe++; + nOfUnify++; + writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString()); + writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString()); + //eq = eq.stream().map(x -> {x.setVariance((byte)-1); return x;}).collect(Collectors.toCollection(HashSet::new)); + + /* + * Variancen auf alle Gleichungen vererben + */ + //PL 2018-05-17 nach JavaTXCompiler verschoben + //varianceInheritance(eq); + + /* + * ? extends ? extends Theta rausfiltern + */ + Set doubleExt = eq.stream().filter(x -> (x.wrongWildcard())).map(x -> { x.setUndefinedPair(); return x;}) + .collect(Collectors.toCollection(HashSet::new)); + if (doubleExt.size() > 0) { + Set> ret = new HashSet<>(); + ret.add(doubleExt); + return ret; + } + + + Set eq0 = applyTypeUnificationRules(eq, fc); + + /* + * Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs + */ + Set eq1s = new HashSet<>(); + Set eq2s = new HashSet<>(); + splitEq(eq0, eq1s, eq2s); + + /* + * Step 4: Create possible typings + * + * "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c) + * mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia + */ + + // There are up to 10 toplevel set. 8 of 10 are the result of the + // cartesian product of the sets created by pattern matching. + List>> topLevelSets = new ArrayList<>(); + + //System.out.println(eq2s); + + if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty. + Set> wrap = new HashSet<>(); + wrap.add(eq1s); + topLevelSets.add(wrap); // Add Eq1' + } + + // Add the set of [a =. Theta | (a=. Theta) in Eq2'] + Set bufferSet = eq2s.stream() + .filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType) + .collect(Collectors.toSet()); + + if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty. + Set> wrap = new HashSet<>(); + wrap.add(bufferSet); + topLevelSets.add(wrap); + eq2s.removeAll(bufferSet); + } + + // Sets that originate from pair pattern matching + // Sets of the "second level" + Set undefinedPairs = new HashSet<>(); + if (printtag) System.out.println("eq2s " + eq2s); + //writeLog("BufferSet: " + bufferSet.toString()+"\n"); + List>> oderConstraintsOutput = new ArrayList<>(oderConstraints); + Set>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput); + //PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer + //nicht ausgewertet Faculty Beispiel im 1. Schritt + //PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren + //Typen getestet werden. + if (printtag) System.out.println("secondLevelSets:" +secondLevelSets); + // If pairs occured that did not match one of the cartesian product cases, + // those pairs are contradictory and the unification is impossible. + if(!undefinedPairs.isEmpty()) { + noUndefPair++; + for (UnifyPair up : undefinedPairs) { + writeLog(noUndefPair.toString() + " UndefinedPairs; " + up); + writeLog("BasePair; " + up.getBasePair()); + } + Set> error = new HashSet<>(); + undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new)); + error.add(undefinedPairs); + return error; + } + + /* Up to here, no cartesian products are calculated. + * filters for pairs and sets can be applied here */ + + // Alternative: Sub cartesian products of the second level (pattern matched) sets + // "the big (x)" + /* for(Set>> secondLevelSet : secondLevelSets) { + //System.out.println("secondLevelSet "+secondLevelSet.size()); + List>> secondLevelSetList = new ArrayList<>(secondLevelSet); + Set>> cartResult = setOps.cartesianProduct(secondLevelSetList); + //System.out.println("CardResult: "+cartResult.size()); + // Flatten and add to top level sets + Set> flat = new HashSet<>(); + int j = 0; + for(List> s : cartResult) { + j++; + //System.out.println("s from CardResult: "+cartResult.size() + " " + j); + Set flat1 = new HashSet<>(); + for(Set s1 : s) + flat1.addAll(s1); + flat.add(flat1); + } + //topLevelSets.add(flat); + } + */ + + //Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden + for(Set>> secondLevelSet : secondLevelSets) { + for (Set> secondlevelelem : secondLevelSet) { + topLevelSets.add(secondlevelelem); + } + } + //System.out.println(topLevelSets); + //System.out.println(); + + + //Aufruf von computeCartesianRecursive ANFANG + return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe); + + } + + + Set> unify2(Set> setToFlatten, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { + //Aufruf von computeCartesianRecursive ENDE + + //keine Ahnung woher das kommt + //Set> setToFlatten = topLevelSets.stream().map(x -> x.iterator().next()).collect(Collectors.toCollection(HashSet::new)); + + //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG + // Cartesian product over all (up to 10) top level sets + //Set>> eqPrimeSet = setOps.cartesianProduct(topLevelSets) + // .stream().map(x -> new HashSet<>(x)) + // .collect(Collectors.toCollection(HashSet::new)); + //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE + + Set> eqPrimePrimeSet = new HashSet<>(); + + Set forks = new HashSet<>(); + + //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG + //for(Set> setToFlatten : eqPrimeSet) { + // Flatten the cartesian product + //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE + Set eqPrime = new HashSet<>(); + setToFlatten.stream().forEach(x -> eqPrime.addAll(x)); + + /* + * Step 5: Substitution + */ + //System.out.println("vor Subst: " + eqPrime); + Optional> eqPrimePrime = rules.subst(eqPrime); + + /* + * Step 6 a) Restart (fork) for pairs where subst was applied + */ + if(parallel) { + if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch + //PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent()) + //PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst + //eqPrimePrime Veraenderungen in subst repraesentieren. + eqPrimePrimeSet.add(eqPrime); + else if(eqPrimePrime.isPresent()) { + //System.out.println("nextStep: " + eqPrimePrime.get()); + TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc, true, logFile, log); + forks.add(fork); + fork.fork(); + } + else { + //System.out.println("nextStep: " + eqPrime); + TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc, true, logFile, log); + forks.add(fork); + fork.fork(); + } + } + else { // sequentiell (Step 6b is included) + if (printtag) System.out.println("nextStep: " + eqPrimePrime); + if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch + //PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent()) + //PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst + //eqPrimePrime Veraenderungen in subst repraesentieren. + try { + if (isSolvedForm(eqPrime)) { + logFile.write(eqPrime.toString()+"\n"); + logFile.flush(); + } + } + catch (IOException e) { } + eqPrimePrimeSet.add(eqPrime); + } + else if(eqPrimePrime.isPresent()) { + Set> unifyres = unify(eqPrimePrime.get(), oderConstraints, fc, false, rekTiefe); + + eqPrimePrimeSet.addAll(unifyres); + } + else { + Set> unifyres = unify(eqPrime, oderConstraints, fc, false, rekTiefe); + + + eqPrimePrimeSet.addAll(unifyres); + } + } + //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG + //} + //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE + + /* + * Step 6 b) Build the union over everything. + */ + + if(parallel) + for(TypeUnifyTask fork : forks) + eqPrimePrimeSet.addAll(fork.join()); + + /* + * Step 7: Filter empty sets; + */ + eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new)); + if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) + writeLog("Result1 " + eqPrimePrimeSet.toString()); + return eqPrimePrimeSet; + } + + + + Set> computeCartesianRecursive(Set> fstElems, ArrayList>> topLevelSets, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { + //ArrayList>> remainingSets = new ArrayList<>(topLevelSets); + fstElems.addAll(topLevelSets.stream() + .filter(x -> x.size()==1) + .map(y -> y.stream().findFirst().get()) + .collect(Collectors.toCollection(HashSet::new))); + ArrayList>> remainingSets = topLevelSets.stream() + .filter(x -> x.size()>1) + .collect(Collectors.toCollection(ArrayList::new)); + if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig + Set> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe); + return result; + } + Set> nextSet = remainingSets.remove(0); + writeLog("nextSet: " + nextSet.toString()); + List> nextSetasList =new ArrayList<>(nextSet); + try { + //List> + //nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet); + } + catch (java.lang.IllegalArgumentException e) { + System.out.print(""); + } + Set> result = new HashSet<>(); + int variance = 0; + Optional xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType) + .filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0) + .map(c -> ((PlaceholderType)c.getLhsType()).getVariance()) + .reduce((a,b)-> {if (a==b) return a; else return 2; })) //2 kommt insbesondere bei Oder-Constraints vor + .filter(d -> d.isPresent()) + .map(e -> e.get()) + .findAny(); + if (xi.isPresent()) { + variance = xi.get(); + } + //if (variance == 1 && nextSetasList.size() > 1) { + // List> al = new ArrayList<>(nextSetasList.size()); + // for (int ii = 0; ii < nextSetasList.size();ii++) { + // al.add(0,nextSetasList.get(ii)); + // } + // nextSetasList = al; + //} + //Set a = nextSetasListIt.next(); + /*if (nextSetasList.size()>1) {zu loeschen + if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D")) + System.out.print(""); + if (variance == 1) { + a_next = oup.max(nextSetasList.iterator()); + } + else if (variance == -1) { + a_next = oup.min(nextSetasList.iterator()); + } + else if (variance == 0) { + a_next = nextSetasList.iterator().next(); + } + } + else { + a_next = nextSetasList.iterator().next(); + } + */ + if (!nextSetasList.iterator().hasNext()) + System.out.print(""); + if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1) + System.out.print(""); + writeLog("nextSetasList: " + nextSetasList.toString()); + while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) { + Set a = null; + if (variance == 1) { + a = oup.max(nextSetasList.iterator()); + nextSetasList.remove(a); + } + else if (variance == -1) { + a = oup.min(nextSetasList.iterator()); + nextSetasList.remove(a); + } + else if (variance == 0 || variance == 2) { + a = nextSetasList.remove(0); + } + //writeLog("nextSet: " + nextSetasList.toString()+ "\n"); + //nextSetasList.remove(a); + /* zu loeschen + if (nextSetasList.size() > 0) { + if (nextSetasList.size()>1) { + if (variance == 1) { + a_next = oup.max(nextSetasList.iterator()); + } + else if (variance == -1) { + a_next = oup.min(nextSetasList.iterator()); + } + else { + a_next = nextSetasList.iterator().next(); + } + } + else { + a_next = nextSetasList.iterator().next(); + } + } + */ + //PL 2018-03-01 + //TODO: 1. Maximum und Minimum unterscheiden + //TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern + //for(Set a : newSet) { + i++; + Set> elems = new HashSet>(fstElems); + writeLog("a1: " + rekTiefe + " "+ a.toString()+ "\n"); + elems.add(a); //if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt //writeLog("Vor unify2 Aufruf: " + eq.toString()); - Set> res = unify2(elems, eq, fc, parallel); + Set> res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe); if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) { //wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen result = res; @@ -565,6 +862,14 @@ public class TypeUnifyTask extends RecursiveTask>> { .map(x -> x.getAllSubstitutions()) .reduce((y,z) -> { y.addAll(z); return y;}).get()) .reduce((y,z) -> { y.addAll(z); return y;}).get(); + abhSubst.addAll( + res.stream() + .map(b -> + b.stream() + .map(x -> x.getAllBases()) + .reduce((y,z) -> { y.addAll(z); return y;}).get()) + .reduce((y,z) -> { y.addAll(z); return y;}).get() + ); Set b = a;//effective final a Set durchschnitt = abhSubst.stream() .filter(x -> b.contains(x)) @@ -596,7 +901,7 @@ public class TypeUnifyTask extends RecursiveTask>> { // .collect(Collectors.toCollection(ArrayList::new)); writeLog("res (undef): " + res.toString()); writeLog("abhSubst: " + abhSubst.toString()); - writeLog("a: " + a.toString()); + writeLog("a2: " + rekTiefe + " " + a.toString()); writeLog("Durchschnitt: " + durchschnitt.toString()); writeLog("nextSet: " + nextSet.toString()); writeLog("nextSetasList: " + nextSetasList.toString()); @@ -616,7 +921,10 @@ public class TypeUnifyTask extends RecursiveTask>> { // result.removeIf(y -> isUndefinedPairSet(y)); //} //else result.stream().filter(y -> !isUndefinedPairSet(y)); + writeLog("res: " + res.toString()); } + + writeLog("Return computeCR: " + result.toString()); return result; } @@ -893,15 +1201,17 @@ public class TypeUnifyTask extends RecursiveTask>> { * from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements * (as in case 1 where sigma is added to the innermost set). */ - protected Set>>> calculatePairSets(Set eq2s, IFiniteClosure fc, Set undefined) { - List>>> result = new ArrayList<>(8); + protected Set>>> calculatePairSets(Set eq2s, List>> oderConstraintsInput, IFiniteClosure fc, Set undefined, List>> oderConstraintsOutput) { + List>>> result = new ArrayList<>(9); - // Init all 8 cases - for(int i = 0; i < 8; i++) + // Init all 8 cases + 9. Case: oderConstraints + for(int i = 0; i < 9; i++) result.add(new HashSet<>()); + ArrayList eq2sprime = new ArrayList<>(eq2s); Iterator eq2sprimeit = eq2sprime.iterator(); ArrayList eq2sAsList = new ArrayList<>(); + Boolean first = true; while(eq2sprimeit.hasNext()) {// alle mit Variance != 0 nach vorne schieben UnifyPair up = eq2sprimeit.next(); if ((up.getLhsType() instanceof PlaceholderType && ((PlaceholderType)up.getLhsType()).getVariance() != 0) @@ -910,8 +1220,43 @@ public class TypeUnifyTask extends RecursiveTask>> { eq2s.remove(up); } } + if (eq2sAsList.isEmpty()) { + List>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind + .filter(x -> x.stream() + .filter(y -> + y.stream().filter(z -> ((z.getLhsType() instanceof PlaceholderType) + && (((PlaceholderType)(z.getLhsType())).getVariance() != 0)) + || ((z.getRhsType() instanceof PlaceholderType) + && (((PlaceholderType)(z.getRhsType())).getVariance() != 0)) + ).findFirst().isPresent() + ).findFirst().isPresent()).collect(Collectors.toList()); + if (!oderConstraintsVariance.isEmpty()) { + Set> ret = oderConstraintsVariance.get(0); + oderConstraintsOutput.remove(ret); + //Set retFlat = new HashSet<>(); + //ret.stream().forEach(x -> retFlat.addAll(x)); + ret.stream().forEach(x -> x.stream().forEach(y -> y.addSubstitutions(x))); + result.get(8).add(ret); + first = false; + } + } + eq2sAsList.addAll(eq2s); - Boolean first = true; + + if (eq2sAsList.isEmpty() && first) {//Alle eq2s sind empty und alle oderConstraints mit Variance != 0 sind bearbeitet + if (!oderConstraintsOutput.isEmpty()) { + Set> ret = oderConstraintsOutput.remove(0); + //Set retFlat = new HashSet<>(); + //ret.stream().forEach(x -> retFlat.addAll(x)); + ret.stream().forEach(x -> x.stream().forEach(y -> y.addSubstitutions(x))); + result.get(8).add(ret); + first = false; + } + } + /* + Bei allen die Abhaengigkeit der Elemente aus eq2sAsList als evtl. als Substitution + hinzufuegen + */ for(UnifyPair pair : eq2sAsList) { PairOperator pairOp = pair.getPairOp(); UnifyType lhsType = pair.getLhsType(); @@ -921,6 +1266,9 @@ public class TypeUnifyTask extends RecursiveTask>> { if (((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.SMALLERNEQDOT)) && lhsType instanceof PlaceholderType) { //System.out.println(pair); if (first) { //writeLog(pair.toString()+"\n"); + if (((PlaceholderType)(pair.getLhsType())).getName().equals("AR")) { + System.out.println("AR"); + } Set> x1 = unifyCase1(pair, fc); if (pairOp == PairOperator.SMALLERNEQDOT) { Set remElem = new HashSet<>(); diff --git a/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java b/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java index d69138a72..57b4e61ed 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java +++ b/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java @@ -121,6 +121,10 @@ public class UnifyPair { pairOp = po; } + public void addSubstitutions(Set sup) { + substitution.addAll(sup); + } + public byte getVariance() { return variance; } @@ -152,6 +156,15 @@ public class UnifyPair { return ret; } + public Set getAllBases () { + Set ret = new HashSet<>(); + if (basePair != null) { + ret.add(getBasePair()); + ret.addAll(basePair.getAllBases()); + } + return ret; + } + public UnifyPair getGroundBasePair () { if (basePair == null) { return this; diff --git a/test/bytecode/MatrixOpTest.java b/test/bytecode/MatrixOpTest.java index 8fe538aaf..178aa43e5 100644 --- a/test/bytecode/MatrixOpTest.java +++ b/test/bytecode/MatrixOpTest.java @@ -32,9 +32,9 @@ public class MatrixOpTest { fileToTest = new File(path); compiler = new JavaTXCompiler(fileToTest); pathToClassFile = System.getProperty("user.dir")+"/testBytecode/generatedBC/"; -// compiler.generateBytecode(pathToClassFile); -// loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)}); -// classToTest = loader.loadClass("MatrixOP"); + compiler.generateBytecode(pathToClassFile); + loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)}); + classToTest = loader.loadClass("MatrixOP"); /* Vector> vv = new Vector>(); Vector v1 = new Vector (); diff --git a/test/bytecode/javFiles/Matrix.jav b/test/bytecode/javFiles/Matrix.jav index 48f6eda29..295051617 100644 --- a/test/bytecode/javFiles/Matrix.jav +++ b/test/bytecode/javFiles/Matrix.jav @@ -1,8 +1,8 @@ import java.util.Vector; import java.lang.Integer; -//import java.lang.Float; +import java.lang.Float; //import java.lang.Byte; -import java.lang.Boolean; +//import java.lang.Boolean; public class Matrix extends Vector> { @@ -19,7 +19,7 @@ public class Matrix extends Vector> { } } - mul(m) { + mul(m) { var ret = new Matrix(); var i = 0; while(i < size()) {