From 2bd61475173f4f31d4c41c16fc57d5e089dd88b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Pl=C3=BCmicke?= Date: Thu, 7 Mar 2019 09:14:41 +0100 Subject: [PATCH] modified: ../../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java modified: ../../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java modified: ../../../../main/java/de/dhbwstuttgart/typeinference/unify/model/OrderingUnifyPair.java modified: ../../../../main/java/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java Noch einige Fehler --- .../typeinference/unify/TypeUnify2Task.java | 2 +- .../typeinference/unify/TypeUnifyTask.java | 279 ++---------------- .../unify/model/OrderingUnifyPair.java | 12 + .../typeinference/unify/model/UnifyPair.java | 16 +- 4 files changed, 47 insertions(+), 262 deletions(-) diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java index 31247aef..315c01e7 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java @@ -33,7 +33,7 @@ public class TypeUnify2Task extends TypeUnifyTask { System.out.println("two"); } one = true; - Set> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField); + Set> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, true); /*if (isUndefinedPairSetSet(res)) { return new HashSet<>(); } else diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index adc86d34..22e596ab 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -217,7 +217,7 @@ public class TypeUnifyTask extends RecursiveTask>> { ArrayList>> remainingOderconstraints = oderConstraintsField.stream() .filter(x -> x.size()>1) .collect(Collectors.toCollection(ArrayList::new)); - Set> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField); + Set> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, true); noOfThread--; try { logFile.close(); @@ -240,253 +240,7 @@ public class TypeUnifyTask extends RecursiveTask>> { */ - public Set> computeCartesianRecursiveOderConstraints(Set> fstElems, List>> topLevelSets, IFiniteClosure fc, boolean parallel, int rekTiefe) { - //ArrayList>> remainingSets = new ArrayList<>(topLevelSets); - fstElems.addAll(topLevelSets.stream() - .filter(x -> x.size()==1) - .map(y -> y.stream().findFirst().get()) - .collect(Collectors.toCollection(HashSet::new))); - ArrayList>> remainingSets = topLevelSets.stream() - .filter(x -> x.size()>1) - .collect(Collectors.toCollection(ArrayList::new)); - if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig - Set eq = new HashSet<>(); - fstElems.stream().forEach(x -> eq.addAll(x)); - Set> result = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe); - return result; - } - Set> nextSet = remainingSets.remove(0); - writeLog("nextSet: " + nextSet.toString()); - List> nextSetasList =new ArrayList<>(nextSet); - try { - //List> - //nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet); - } - catch (java.lang.IllegalArgumentException e) { - System.out.print(""); - } - Set> result = new HashSet<>(); - int variance = 0; - Optional xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType) - .filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0) - .map(c -> ((PlaceholderType)c.getLhsType()).getVariance()) - .reduce((a,b)-> {if (a==b) return a; else return 0; })) - .filter(d -> d.isPresent()) - .map(e -> e.get()) - .findAny(); - if (xi.isPresent()) { - variance = xi.get(); - } - //if (variance == 1 && nextSetasList.size() > 1) { - // List> al = new ArrayList<>(nextSetasList.size()); - // for (int ii = 0; ii < nextSetasList.size();ii++) { - // al.add(0,nextSetasList.get(ii)); - // } - // nextSetasList = al; - //} - //Set a = nextSetasListIt.next(); - /*if (nextSetasList.size()>1) {zu loeschen - if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D")) - System.out.print(""); - if (variance == 1) { - a_next = oup.max(nextSetasList.iterator()); - } - else if (variance == -1) { - a_next = oup.min(nextSetasList.iterator()); - } - else if (variance == 0) { - a_next = nextSetasList.iterator().next(); - } - } - else { - a_next = nextSetasList.iterator().next(); - } - */ - if (!nextSetasList.iterator().hasNext()) - System.out.print(""); - if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1) - System.out.print(""); - writeLog("nextSetasList: " + nextSetasList.toString()); - while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) { - Set a = null; - if (variance == 1) { - a = oup.max(nextSetasList.iterator()); - nextSetasList.remove(a); - } - else if (variance == -1) { - a = oup.min(nextSetasList.iterator()); - nextSetasList.remove(a); - } - else if (variance == 0) { - a = nextSetasList.remove(0); - } - //writeLog("nextSet: " + nextSetasList.toString()+ "\n"); - //nextSetasList.remove(a); - /* zu loeschen - if (nextSetasList.size() > 0) { - if (nextSetasList.size()>1) { - if (variance == 1) { - a_next = oup.max(nextSetasList.iterator()); - } - else if (variance == -1) { - a_next = oup.min(nextSetasList.iterator()); - } - else { - a_next = nextSetasList.iterator().next(); - } - } - else { - a_next = nextSetasList.iterator().next(); - } - } - */ - //PL 2018-03-01 - //TODO: 1. Maximum und Minimum unterscheiden - //TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern - //for(Set a : newSet) { - i++; - Set> elems = new HashSet>(fstElems); - elems.add(a); - Set> res = new HashSet<>(); - if (remainingSets.isEmpty()) { - noou++; - writeLog("Vor unify Aufruf: " + eq.toString()); - writeLog("No of Unify " + noou); - System.out.println(noou); - Set eq = new HashSet<>(); - elems.stream().forEach(x -> eq.addAll(x)); - res = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe); - } - else {//duerfte gar nicht mehr vorkommen PL 2018-04-03 - res = computeCartesianRecursiveOderConstraints(elems, remainingSets, fc, parallel, rekTiefe); - - } - if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) { - //wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen - result = res; - } - else { - if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) - || (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) - || result.isEmpty()) { - //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden - result.addAll(res); - } - //else { - //wenn Korrekte Ergebnisse da und Feherfälle dazukommen Fehlerfälle ignorieren - // if (isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) { - // result = result; - // } - //} - } - - - - - /* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */ - if (!result.isEmpty() && !isUndefinedPairSetSet(res)) { - if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1) - System.out.print(""); - Iterator> nextSetasListIt = new ArrayList>(nextSetasList).iterator(); - if (variance == 1) { - System.out.println(""); - while (nextSetasListIt.hasNext()) { - Set a_next = nextSetasListIt.next(); - if (a.equals(a_next) || - (oup.compare(a, a_next) == 1)) { - nextSetasList.remove(a_next); - } - else { - System.out.println(""); - } - } - } - else { if (variance == -1) { - System.out.println(""); - while (nextSetasListIt.hasNext()) { - Set a_next = nextSetasListIt.next(); - if (a.equals(a_next) || - (oup.compare(a, a_next) == -1)) { - nextSetasList.remove(0); - } - else { - System.out.println(""); - } - } - } - else if (variance == 0) { - //break; - }} - } - /* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */ - - /* PL 2018-11-05 wird falsch weil es auf der obersten Ebene ist. - if (isUndefinedPairSetSet(res)) { - int nofstred= 0; - Set abhSubst = res.stream() - .map(b -> - b.stream() - .map(x -> x.getAllSubstitutions()) - .reduce((y,z) -> { y.addAll(z); return y;}).get()) - .reduce((y,z) -> { y.addAll(z); return y;}).get(); - Set b = a;//effective final a - Set durchschnitt = abhSubst.stream() - .filter(x -> b.contains(x)) - //.filter(y -> abhSubst.contains(y)) - .collect(Collectors.toCollection(HashSet::new)); - //Set vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new)); - int len = nextSetasList.size(); - Set undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results - Set, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream() - .map(x -> { Set su = x.getAllSubstitutions(); //alle benutzten Substitutionen - su.add(x.getGroundBasePair()); // urspruengliches Paar - su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen - return new Pair<>(su, x.getGroundBasePair());}) - .collect(Collectors.toCollection(HashSet::new)); - if (res.size() > 1) { - System.out.println(); - } - nextSetasList = nextSetasList.stream().filter(x -> { - //Boolean ret = false; - //for (PlaceholderType var : vars) { - // ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get(); - //} - return (!x.containsAll(durchschnitt));//Was passiert wenn durchschnitt leer ist?? - })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10 - .collect(Collectors.toCollection(ArrayList::new)); - nofstred = nextSetasList.size(); - //NOCH NICHT korrekt PL 2018-10-12 - //nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) - // .collect(Collectors.toCollection(ArrayList::new)); - writeLog("res (undef): " + res.toString()); - writeLog("abhSubst: " + abhSubst.toString()); - writeLog("a: " + a.toString()); - writeLog("Durchschnitt: " + durchschnitt.toString()); - writeLog("nextSet: " + nextSet.toString()); - writeLog("nextSetasList: " + nextSetasList.toString()); - writeLog("Number first erased Elements (undef): " + (len - nofstred)); - writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size())); - writeLog("Number erased Elements (undef): " + (len - nextSetasList.size())); - noAllErasedElements = noAllErasedElements + (len - nextSetasList.size()); - writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString()); - noBacktracking++; - writeLog("Number of Backtracking: " + noBacktracking); - System.out.println(""); - } - */ - //if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) { - // return result; - //} - //else { - // result.removeIf(y -> isUndefinedPairSet(y)); - //} - //else result.stream().filter(y -> !isUndefinedPairSet(y)); - - - } // End of while (nextSetasList.size() > 0) - return result; - } - + /** * Computes all principal type unifiers for a set of constraints. @@ -494,7 +248,7 @@ public class TypeUnifyTask extends RecursiveTask>> { * @param fc The finite closure * @return The set of all principal type unifiers */ - protected Set> unify(final Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { + protected Set> unify(final Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) { //Set aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT) // ).collect(Collectors.toCollection(HashSet::new)); //writeLog(nOfUnify.toString() + " AA: " + aas.toString()); @@ -647,12 +401,12 @@ public class TypeUnifyTask extends RecursiveTask>> { //Aufruf von computeCartesianRecursive ANFANG //writeLog("topLevelSets: " + topLevelSets.toString()); - return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe, collectErr); + return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe, collectErr, finalresult); } - Set> unify2(Set> setToFlatten, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { + Set> unify2(Set> setToFlatten, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) { //Aufruf von computeCartesianRecursive ENDE //keine Ahnung woher das kommt @@ -729,15 +483,17 @@ public class TypeUnifyTask extends RecursiveTask>> { // System.err.println("log-File nicht vorhanden"); //} eqPrimePrimeSet.add(eqPrime); - urm.notify(eqPrimePrimeSet); + if (finalresult) { + urm.notify(eqPrimePrimeSet); + } } else if(eqPrimePrime.isPresent()) { - Set> unifyres = unifyres1 = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe); + Set> unifyres = unifyres1 = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe, finalresult); eqPrimePrimeSet.addAll(unifyres); } else { - Set> unifyres = unifyres2 = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe); + Set> unifyres = unifyres2 = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe, finalresult); eqPrimePrimeSet.addAll(unifyres); @@ -769,7 +525,7 @@ public class TypeUnifyTask extends RecursiveTask>> { - Set> computeCartesianRecursive(Set> fstElems, ArrayList>> topLevelSets, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set> collectErr) { + Set> computeCartesianRecursive(Set> fstElems, ArrayList>> topLevelSets, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set> collectErr, Boolean finalresult) { //ArrayList>> remainingSets = new ArrayList<>(topLevelSets); fstElems.addAll(topLevelSets.stream() .filter(x -> x.size()==1) @@ -779,7 +535,7 @@ public class TypeUnifyTask extends RecursiveTask>> { .filter(x -> x.size()>1) .collect(Collectors.toCollection(ArrayList::new)); if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig - Set> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe); + Set> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe, finalresult); return result; } Set> nextSet = remainingSets.remove(0); @@ -1128,7 +884,7 @@ public class TypeUnifyTask extends RecursiveTask>> { } else { //parallel = false; //Wenn MaxNoOfThreads erreicht ist, sequentiell weiterarbeiten elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859 - res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe); + res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, finalresult); }}} if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) { //wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen @@ -1164,6 +920,7 @@ public class TypeUnifyTask extends RecursiveTask>> { //Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last //System.out.println(a_last); a_last.forEach(x -> {writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());}); + try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen List varsLast_a = a_last.stream().filter(x -> ((x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName()) && (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getLhsType() instanceof PlaceholderType)) @@ -1210,6 +967,10 @@ public class TypeUnifyTask extends RecursiveTask>> { writeLog("RES var=1 ADD:" + result.toString() + " " + res.toString()); result.addAll(res); }}} + } + catch (NullPointerException e) { + writeLog("NullPointerException: " + a_last.toString()); + } } else { //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden @@ -1845,7 +1606,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Set unitedSubst = new HashSet<>(type.getSubstitution()); unitedSubst.addAll(sameEq.getSubstitution()); localEq.add(new UnifyPair(type.getRhsType(), sameEq.getRhsType(), sameEq.getPairOp(), unitedSubst, null)); - Set> localRes = unify(localEq, new ArrayList<>(), fc, parallel, 0); + Set> localRes = unify(localEq, new ArrayList<>(), fc, false, 0, false); Boolean localCorr = !isUndefinedPairSetSet(localRes); if (!localCorr) { collectErr.addAll(localRes); @@ -1865,7 +1626,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Set unitedSubst = new HashSet<>(type.getSubstitution()); unitedSubst.addAll(sameEq.getSubstitution()); localEq.add(new UnifyPair(sameEq.getLhsType(), type.getRhsType(), sameEq.getPairOp(), unitedSubst, null)); - Set> localRes = unify(localEq, new ArrayList<>(), fc, parallel, 0); + Set> localRes = unify(localEq, new ArrayList<>(), fc, false, 0, false); Boolean localCorr = !isUndefinedPairSetSet(localRes); if (!localCorr) { collectErr.addAll(localRes); diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/model/OrderingUnifyPair.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/model/OrderingUnifyPair.java index 28da9a80..736186d5 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/model/OrderingUnifyPair.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/model/OrderingUnifyPair.java @@ -1,5 +1,6 @@ package de.dhbwstuttgart.typeinference.unify.model; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -31,14 +32,25 @@ public class OrderingUnifyPair extends Ordering> { * in dem compare(Theta, Theta') aufgerufen wird. */ public int compareEq (UnifyPair left, UnifyPair right) { + try { //if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) { return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC); } else { return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT); + }} + catch (ClassCastException e) { + try { + ((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() +"\n\n"); + ((FiniteClosure)fc).logFile.flush(); + } + catch (IOException ie) { + } + return -99; } } + /* public int compareEq (UnifyPair left, UnifyPair right) { if (left == null || right == null) diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java index 19f6010f..b1fecd2c 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java @@ -177,6 +177,16 @@ public class UnifyPair { } } + public void disableCondWildcards() { + if (lhs instanceof PlaceholderType && rhs instanceof PlaceholderType + && (!((PlaceholderType)lhs).isWildcardable() || !((PlaceholderType)rhs).isWildcardable())) + { + ((PlaceholderType)lhs).disableWildcardtable(); + ((PlaceholderType)rhs).disableWildcardtable(); + } + + } + public Boolean wrongWildcard() { return lhs.wrongWildcard() || rhs.wrongWildcard(); } @@ -219,10 +229,12 @@ public class UnifyPair { public String toString() { String ret = ""; if (lhs instanceof PlaceholderType) { - ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " " + ((PlaceholderType)lhs).isInnerType(); + ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " " + ((PlaceholderType)lhs).isInnerType() + + " " + ((PlaceholderType)lhs).isWildcardable(); } if (rhs instanceof PlaceholderType) { - ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " " + ((PlaceholderType)rhs).isInnerType(); + ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " " + ((PlaceholderType)rhs).isInnerType() + + " " + ((PlaceholderType)rhs).isWildcardable(); } return "(" + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])"; }