From 241c7f37d9a9014ed6e81db72c8feddb6f2488fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Pl=C3=BCmicke?= Date: Thu, 11 Oct 2018 00:45:59 +0200 Subject: [PATCH 1/8] modified: src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java weitere Element rausfiltern angefangen modified: src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java Methode: getGroundBasePair eingefuegt --- .../typeinference/unify/TypeUnifyTask.java | 59 ++++++++++++++++--- .../typeinference/unify/model/UnifyPair.java | 13 ++++ 2 files changed, 63 insertions(+), 9 deletions(-) diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index 0e396810..3f9466cf 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -566,15 +566,29 @@ public class TypeUnifyTask extends RecursiveTask>> { .filter(x -> b.contains(x)) //.filter(y -> abhSubst.contains(y)) .collect(Collectors.toCollection(HashSet::new)); - Set vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new)); + //Set vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new)); int len = nextSetasList.size(); - nextSetasList = nextSetasList.stream().filter(x -> { - //Boolean ret = false; - //for (PlaceholderType var : vars) { - // ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get(); - //} - return (!x.containsAll(durchschnitt)); - }).collect(Collectors.toCollection(ArrayList::new)); + if (!durchschnitt.isEmpty()) { + UnifyPair groundBasepair = res.iterator().next().iterator().next().getGroundBasePair().get(); + Set undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); + Set> reducedUndefResSubst = undefRes.stream() + .map(x -> x.getAllSubstitutions()).map(y -> { y.removeAll(durchschnitt); return y;}) + .collect(Collectors.toCollection(HashSet::new)); + Set resGroundBasepairs = undefRes.stream().map(x -> x.getGroundBasePair().get()).collect(Collectors.toCollection(HashSet::new)); + if (res.size() > 1) { + System.out.println(); + } + Set reducedAbhSubst = new HashSet<>(abhSubst); + reducedAbhSubst.removeAll(durchschnitt); + nextSetasList = nextSetasList.stream().filter(x -> { + //Boolean ret = false; + //for (PlaceholderType var : vars) { + // ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get(); + //} + return (!x.containsAll(durchschnitt)); + }).filter(y -> couldBecorrect(reducedAbhSubst, groundBasepair, y)) + .collect(Collectors.toCollection(ArrayList::new)); + } writeLog("abhSubst: " + abhSubst.toString()); writeLog("a: " + a.toString()); writeLog("Durchschnitt: " + durchschnitt.toString()); @@ -598,7 +612,34 @@ public class TypeUnifyTask extends RecursiveTask>> { return result; } - + protected boolean couldBecorrect(Set reducedAbhSubst, UnifyPair groundBasepair, Set nextElem) { + reducedAbhSubst.add(groundBasepair); + reducedAbhSubst.addAll(nextElem); + Optional> substRes = rules.subst(reducedAbhSubst); + if (!substRes.isPresent()) { + return true; + } + else { + UnifyPair checkPair = substRes.get().stream().filter(x -> x.getGroundBasePair().get().equals(groundBasepair)).findFirst().get(); + if ((checkPair.getLhsType() instanceof PlaceholderType) || (checkPair.getRhsType() instanceof PlaceholderType)) { + Set up = new HashSet<>(); + up.add(checkPair); + Set undef = new HashSet<>(); + calculatePairSets(up, fc, undef); + if (undef.isEmpty()) { + return true; + } + else { + return false; + } + } + else { + //Pair type <. type' betrachten TODO PL 2018-10-09 + } + } + return true; + } + protected boolean isUndefinedPairSet(Set s) { if (s.size() >= 1 ) { Boolean ret = s.stream().map(x -> x.isUndefinedPair()).reduce(true, (x,y)-> (x && y)); diff --git a/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java b/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java index 86e13e1d..dd185836 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java +++ b/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java @@ -4,6 +4,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; +import java.util.Optional; import java.util.Set; @@ -151,6 +152,18 @@ public class UnifyPair { return ret; } + public Optional getGroundBasePair () { + if (basePair == null) { + return Optional.empty(); + } + if (basePair.getBasePair() == null) { + return Optional.of(basePair); + } + else { + return basePair.getGroundBasePair(); + } + } + public Boolean wrongWildcard() { return lhs.wrongWildcard() || rhs.wrongWildcard(); } From bda7dcb5c1d08f7a3d522988870c1d5f75697727 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Pl=C3=BCmicke?= Date: Thu, 11 Oct 2018 09:47:55 +0200 Subject: [PATCH 2/8] modified: src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java --- src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index 3f9466cf..b1cbbe7b 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -571,8 +571,9 @@ public class TypeUnifyTask extends RecursiveTask>> { if (!durchschnitt.isEmpty()) { UnifyPair groundBasepair = res.iterator().next().iterator().next().getGroundBasePair().get(); Set undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); - Set> reducedUndefResSubst = undefRes.stream() - .map(x -> x.getAllSubstitutions()).map(y -> { y.removeAll(durchschnitt); return y;}) + Set> reducedUndefResSubstGroundedBasePair = undefRes.stream() + .map(x -> { Set su = x.getAllSubstitutions(); su.add(x.getGroundBasePair().get()); return su;}) + .map(y -> { y.removeAll(durchschnitt); return y;}) .collect(Collectors.toCollection(HashSet::new)); Set resGroundBasepairs = undefRes.stream().map(x -> x.getGroundBasePair().get()).collect(Collectors.toCollection(HashSet::new)); if (res.size() > 1) { From 1b7bded3c365c50ca3f00b83ed372803d085485a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Pl=C3=BCmicke?= Date: Thu, 11 Oct 2018 12:40:20 +0200 Subject: [PATCH 3/8] =?UTF-8?q?=09modified:=20=20=20../../src/de/dhbwstutt?= =?UTF-8?q?gart/typeinference/unify/TypeUnifyTask.java=20=09modified:=20?= =?UTF-8?q?=20=20../../src/de/dhbwstuttgart/typeinference/unify/model/Unif?= =?UTF-8?q?yPair.java=201.=20Version=20l=C3=A4uft=20aber=20nicht=20schnell?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../typeinference/unify/TypeUnifyTask.java | 79 +++++++++++-------- .../typeinference/unify/model/UnifyPair.java | 6 +- 2 files changed, 48 insertions(+), 37 deletions(-) diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index b1cbbe7b..5980d29f 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -31,6 +31,7 @@ import de.dhbwstuttgart.typeinference.unify.model.Unifier; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyType; import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair; +import de.dhbwstuttgart.typeinference.unify.model.Pair; import java.io.File; import java.io.FileWriter; @@ -553,8 +554,9 @@ public class TypeUnifyTask extends RecursiveTask>> { } } /* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */ - + if (isUndefinedPairSetSet(res)) { + int nofstred= 0; Set abhSubst = res.stream() .map(b -> b.stream() @@ -569,35 +571,41 @@ public class TypeUnifyTask extends RecursiveTask>> { //Set vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new)); int len = nextSetasList.size(); if (!durchschnitt.isEmpty()) { - UnifyPair groundBasepair = res.iterator().next().iterator().next().getGroundBasePair().get(); - Set undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); - Set> reducedUndefResSubstGroundedBasePair = undefRes.stream() - .map(x -> { Set su = x.getAllSubstitutions(); su.add(x.getGroundBasePair().get()); return su;}) - .map(y -> { y.removeAll(durchschnitt); return y;}) + //UnifyPair groundBasepair = res.iterator().next().iterator().next().getGroundBasePair().get(); + Set undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results + Set, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream() + .map(x -> { Set su = x.getAllSubstitutions(); //alle benutzten Substitutionen + su.add(x.getGroundBasePair()); // urspruengliches Paar + su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen + return new Pair<>(su, x.getGroundBasePair());}) .collect(Collectors.toCollection(HashSet::new)); - Set resGroundBasepairs = undefRes.stream().map(x -> x.getGroundBasePair().get()).collect(Collectors.toCollection(HashSet::new)); + //Set resGroundBasepairs = undefRes.stream().map(x -> x.getGroundBasePair().get()).collect(Collectors.toCollection(HashSet::new)); if (res.size() > 1) { System.out.println(); } - Set reducedAbhSubst = new HashSet<>(abhSubst); - reducedAbhSubst.removeAll(durchschnitt); + //Set reducedAbhSubst = new HashSet<>(abhSubst); + //reducedAbhSubst.removeAll(durchschnitt); nextSetasList = nextSetasList.stream().filter(x -> { //Boolean ret = false; //for (PlaceholderType var : vars) { // ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get(); //} return (!x.containsAll(durchschnitt)); - }).filter(y -> couldBecorrect(reducedAbhSubst, groundBasepair, y)) + })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10 .collect(Collectors.toCollection(ArrayList::new)); + nofstred = nextSetasList.size(); + //nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) + // .collect(Collectors.toCollection(ArrayList::new)); } writeLog("abhSubst: " + abhSubst.toString()); writeLog("a: " + a.toString()); writeLog("Durchschnitt: " + durchschnitt.toString()); writeLog("nextSet: " + nextSet.toString()); writeLog("nextSetasList: " + nextSetasList.toString()); + writeLog("Number first erased Elements (undef): " + (len - nofstred)); writeLog("Number erased Elements (undef): " + (len - nextSetasList.size())); noAllErasedElements = noAllErasedElements + (len - nextSetasList.size()); - writeLog("Number erased Elements (undef): " + noAllErasedElements.toString()); + writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString()); noBacktracking++; writeLog("Number of Backtracking: " + noBacktracking); System.out.println(""); @@ -613,32 +621,35 @@ public class TypeUnifyTask extends RecursiveTask>> { return result; } - protected boolean couldBecorrect(Set reducedAbhSubst, UnifyPair groundBasepair, Set nextElem) { - reducedAbhSubst.add(groundBasepair); - reducedAbhSubst.addAll(nextElem); - Optional> substRes = rules.subst(reducedAbhSubst); - if (!substRes.isPresent()) { - return true; - } - else { - UnifyPair checkPair = substRes.get().stream().filter(x -> x.getGroundBasePair().get().equals(groundBasepair)).findFirst().get(); - if ((checkPair.getLhsType() instanceof PlaceholderType) || (checkPair.getRhsType() instanceof PlaceholderType)) { - Set up = new HashSet<>(); - up.add(checkPair); - Set undef = new HashSet<>(); - calculatePairSets(up, fc, undef); - if (undef.isEmpty()) { - return true; - } - else { - return false; - } + protected boolean couldBecorrect(Set, UnifyPair>> reducedUndefResSubstGroundedBasePair, Set nextElem) { + return reducedUndefResSubstGroundedBasePair.stream() + .map(pair -> { + Set reducedAbhSubst = pair.getKey(); + reducedAbhSubst.addAll(nextElem); + Optional> substRes = rules.subst(reducedAbhSubst); + if (!substRes.isPresent()) { + return true; } else { - //Pair type <. type' betrachten TODO PL 2018-10-09 + UnifyPair checkPair = substRes.get().stream() + .filter(x -> x.getGroundBasePair().equals(pair.getValue().get())).findFirst().get(); + if ((checkPair.getLhsType() instanceof PlaceholderType) || (checkPair.getRhsType() instanceof PlaceholderType)) { + Set up = new HashSet<>(); + up.add(checkPair); + Set undef = new HashSet<>(); + calculatePairSets(up, fc, undef); + if (undef.isEmpty()) { + return true; + } + else { + return false; + } + } + else { + //Pair type <. type' betrachten TODO PL 2018-10-09 + } } - } - return true; + return true;}).reduce((xx, yy) -> xx || yy).get(); } protected boolean isUndefinedPairSet(Set s) { diff --git a/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java b/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java index dd185836..d69138a7 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java +++ b/src/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java @@ -152,12 +152,12 @@ public class UnifyPair { return ret; } - public Optional getGroundBasePair () { + public UnifyPair getGroundBasePair () { if (basePair == null) { - return Optional.empty(); + return this; } if (basePair.getBasePair() == null) { - return Optional.of(basePair); + return basePair; } else { return basePair.getGroundBasePair(); From 56dd7597627a0efb6b9e7ba961bebfc9c3449f29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Pl=C3=BCmicke?= Date: Thu, 11 Oct 2018 13:09:18 +0200 Subject: [PATCH 4/8] modified: ../../src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java Nur bei leerem Durchscnit filtern geloescht. --- .../dhbwstuttgart/typeinference/unify/TypeUnifyTask.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index 5980d29f..91aeba6e 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -570,7 +570,8 @@ public class TypeUnifyTask extends RecursiveTask>> { .collect(Collectors.toCollection(HashSet::new)); //Set vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new)); int len = nextSetasList.size(); - if (!durchschnitt.isEmpty()) { + //if (!durchschnitt.isEmpty()) + { //UnifyPair groundBasepair = res.iterator().next().iterator().next().getGroundBasePair().get(); Set undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results Set, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream() @@ -594,8 +595,8 @@ public class TypeUnifyTask extends RecursiveTask>> { })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10 .collect(Collectors.toCollection(ArrayList::new)); nofstred = nextSetasList.size(); - //nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) - // .collect(Collectors.toCollection(ArrayList::new)); + nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) + .collect(Collectors.toCollection(ArrayList::new)); } writeLog("abhSubst: " + abhSubst.toString()); writeLog("a: " + a.toString()); From 2be9055608cf884ae020ad8fa20bf79428771244 Mon Sep 17 00:00:00 2001 From: Pluemicke Martin Date: Thu, 11 Oct 2018 16:31:02 +0200 Subject: [PATCH 5/8] modified: ../../src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java aufgeraeumt --- .../typeinference/unify/TypeUnifyTask.java | 47 ++++++++----------- 1 file changed, 20 insertions(+), 27 deletions(-) diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index 91aeba6e..63c2bd31 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -570,34 +570,27 @@ public class TypeUnifyTask extends RecursiveTask>> { .collect(Collectors.toCollection(HashSet::new)); //Set vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new)); int len = nextSetasList.size(); - //if (!durchschnitt.isEmpty()) - { - //UnifyPair groundBasepair = res.iterator().next().iterator().next().getGroundBasePair().get(); - Set undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results - Set, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream() - .map(x -> { Set su = x.getAllSubstitutions(); //alle benutzten Substitutionen - su.add(x.getGroundBasePair()); // urspruengliches Paar - su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen - return new Pair<>(su, x.getGroundBasePair());}) - .collect(Collectors.toCollection(HashSet::new)); - //Set resGroundBasepairs = undefRes.stream().map(x -> x.getGroundBasePair().get()).collect(Collectors.toCollection(HashSet::new)); - if (res.size() > 1) { - System.out.println(); - } - //Set reducedAbhSubst = new HashSet<>(abhSubst); - //reducedAbhSubst.removeAll(durchschnitt); - nextSetasList = nextSetasList.stream().filter(x -> { - //Boolean ret = false; - //for (PlaceholderType var : vars) { - // ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get(); - //} - return (!x.containsAll(durchschnitt)); - })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10 - .collect(Collectors.toCollection(ArrayList::new)); - nofstred = nextSetasList.size(); - nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) - .collect(Collectors.toCollection(ArrayList::new)); + Set undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results + Set, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream() + .map(x -> { Set su = x.getAllSubstitutions(); //alle benutzten Substitutionen + su.add(x.getGroundBasePair()); // urspruengliches Paar + su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen + return new Pair<>(su, x.getGroundBasePair());}) + .collect(Collectors.toCollection(HashSet::new)); + if (res.size() > 1) { + System.out.println(); } + nextSetasList = nextSetasList.stream().filter(x -> { + //Boolean ret = false; + //for (PlaceholderType var : vars) { + // ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get(); + //} + return (!x.containsAll(durchschnitt)); + })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10 + .collect(Collectors.toCollection(ArrayList::new)); + nofstred = nextSetasList.size(); + nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) + .collect(Collectors.toCollection(ArrayList::new)); writeLog("abhSubst: " + abhSubst.toString()); writeLog("a: " + a.toString()); writeLog("Durchschnitt: " + durchschnitt.toString()); From 99bf02606b3fa06abf2be9ddb4018028687b0c8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Pl=C3=BCmicke?= Date: Fri, 12 Oct 2018 13:17:59 +0200 Subject: [PATCH 6/8] modified: src/de/dhbwstuttgart/typeinference/result/ResultPair.java toString() eingefuegt MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit modified: src/de/dhbwstuttgart/typeinference/result/ResultSet.java toString() eingefuegt modified: src/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java int-Lieterals können auch double-Literals sein. modified: src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java kleien Ausgabeerweiterung --- .../dhbwstuttgart/typeinference/result/ResultPair.java | 4 ++++ src/de/dhbwstuttgart/typeinference/result/ResultSet.java | 6 +++++- .../dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java | 9 ++++++++- .../dhbwstuttgart/typeinference/unify/TypeUnifyTask.java | 1 + 4 files changed, 18 insertions(+), 2 deletions(-) diff --git a/src/de/dhbwstuttgart/typeinference/result/ResultPair.java b/src/de/dhbwstuttgart/typeinference/result/ResultPair.java index 19bd65f9..749f9c1c 100644 --- a/src/de/dhbwstuttgart/typeinference/result/ResultPair.java +++ b/src/de/dhbwstuttgart/typeinference/result/ResultPair.java @@ -23,4 +23,8 @@ public abstract class ResultPair oderConstraints = new HashSet<>(); + Constraint constraint = new Constraint(); + constraint.add(new Pair(literal.getType(), integer, PairOperator.EQUALSDOT)); + oderConstraints.add(constraint); + constraint = new Constraint(); + constraint.add(new Pair(literal.getType(), doublee, PairOperator.EQUALSDOT)); + oderConstraints.add(constraint); + constraintsSet.addOderConstraint(oderConstraints); return; } if (literal.value instanceof Short) { diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index 63c2bd31..f51c351e 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -597,6 +597,7 @@ public class TypeUnifyTask extends RecursiveTask>> { writeLog("nextSet: " + nextSet.toString()); writeLog("nextSetasList: " + nextSetasList.toString()); writeLog("Number first erased Elements (undef): " + (len - nofstred)); + writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size())); writeLog("Number erased Elements (undef): " + (len - nextSetasList.size())); noAllErasedElements = noAllErasedElements + (len - nextSetasList.size()); writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString()); From 693b47b61987f31a7b9b3731387bceb76ff3a841 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Pl=C3=BCmicke?= Date: Fri, 12 Oct 2018 23:44:48 +0200 Subject: [PATCH 7/8] modified: src/de/dhbwstuttgart/core/JavaTXCompiler.java modified: src/de/dhbwstuttgart/typeinference/result/ResultSet.java modified: src/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java modified: src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java modified: src/de/dhbwstuttgart/typeinference/unify/model/Pair.java --- src/de/dhbwstuttgart/core/JavaTXCompiler.java | 5 +++- .../typeinference/result/ResultSet.java | 3 +++ .../typeinference/typeAlgo/TYPEStmt.java | 3 +++ .../typeinference/unify/TypeUnifyTask.java | 26 +++++++++++++++---- .../typeinference/unify/model/Pair.java | 4 +++ 5 files changed, 35 insertions(+), 6 deletions(-) diff --git a/src/de/dhbwstuttgart/core/JavaTXCompiler.java b/src/de/dhbwstuttgart/core/JavaTXCompiler.java index 6f25a763..c09c90b0 100644 --- a/src/de/dhbwstuttgart/core/JavaTXCompiler.java +++ b/src/de/dhbwstuttgart/core/JavaTXCompiler.java @@ -29,6 +29,7 @@ import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure; import de.dhbwstuttgart.typeinference.unify.model.PairOperator; import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; +import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask; import java.io.File; import java.io.FileOutputStream; @@ -200,7 +201,9 @@ public class JavaTXCompiler { if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT); return y; //alle Paare a <.? b erden durch a =. b ersetzt }).collect(Collectors.toCollection(HashSet::new))); - if (res.isPresent()) return res.get(); //wenn subst ein Erg liefert wurde was veraendert + if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert + return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure); + } else return x; //wenn nichts veraendert wurde wird x zurueckgegeben }).collect(Collectors.toCollection(HashSet::new)); System.out.println("RESULT Final: " + results); diff --git a/src/de/dhbwstuttgart/typeinference/result/ResultSet.java b/src/de/dhbwstuttgart/typeinference/result/ResultSet.java index 44cd749c..eef72ab0 100644 --- a/src/de/dhbwstuttgart/typeinference/result/ResultSet.java +++ b/src/de/dhbwstuttgart/typeinference/result/ResultSet.java @@ -48,6 +48,7 @@ class Resolver implements ResultSetVisitor { public ResolvedType resolve(TypePlaceholder tph){ toResolve = tph; resolved = null; + System.out.println(tph.toString()); for(ResultPair resultPair : result.results){ if(resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)){ return resolve(((PairTPHEqualTPH) resultPair).getRight()); @@ -113,6 +114,8 @@ class Resolver implements ResultSetVisitor { } + + } /** diff --git a/src/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java b/src/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java index c66c6220..5a9085cd 100644 --- a/src/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java +++ b/src/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java @@ -379,6 +379,8 @@ public class TYPEStmt implements StatementVisitor{ return; } if (literal.value instanceof Integer) { + //constraintsSet.addUndConstraint(new Pair(literal.getType(),integer, PairOperator.EQUALSDOT)); + // /* Set oderConstraints = new HashSet<>(); Constraint constraint = new Constraint(); constraint.add(new Pair(literal.getType(), integer, PairOperator.EQUALSDOT)); @@ -387,6 +389,7 @@ public class TYPEStmt implements StatementVisitor{ constraint.add(new Pair(literal.getType(), doublee, PairOperator.EQUALSDOT)); oderConstraints.add(constraint); constraintsSet.addOderConstraint(oderConstraints); + // */ return; } if (literal.value instanceof Short) { diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index f51c351e..42cd4835 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -25,6 +25,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify; import de.dhbwstuttgart.typeinference.unify.model.ExtendsType; import de.dhbwstuttgart.typeinference.unify.model.PairOperator; import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType; +import de.dhbwstuttgart.typeinference.unify.model.ReferenceType; import de.dhbwstuttgart.typeinference.unify.model.SuperType; import de.dhbwstuttgart.typeinference.unify.model.TypeParams; import de.dhbwstuttgart.typeinference.unify.model.Unifier; @@ -589,8 +590,10 @@ public class TypeUnifyTask extends RecursiveTask>> { })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10 .collect(Collectors.toCollection(ArrayList::new)); nofstred = nextSetasList.size(); - nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) - .collect(Collectors.toCollection(ArrayList::new)); + //NOCH NICHT korrekt PL 2018-10-12 + //nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) + // .collect(Collectors.toCollection(ArrayList::new)); + writeLog("res (undef): " + res.toString()); writeLog("abhSubst: " + abhSubst.toString()); writeLog("a: " + a.toString()); writeLog("Durchschnitt: " + durchschnitt.toString()); @@ -625,10 +628,15 @@ public class TypeUnifyTask extends RecursiveTask>> { if (!substRes.isPresent()) { return true; } + //PL 2018-10-12 + //Evtl. zurest applyTypeUnification aufrufen + //evtl auch unify aufrufen else { UnifyPair checkPair = substRes.get().stream() .filter(x -> x.getGroundBasePair().equals(pair.getValue().get())).findFirst().get(); - if ((checkPair.getLhsType() instanceof PlaceholderType) || (checkPair.getRhsType() instanceof PlaceholderType)) { + if (((checkPair.getLhsType() instanceof PlaceholderType) || (checkPair.getRhsType() instanceof PlaceholderType)) + && (checkPair.getPairOp() == PairOperator.SMALLERDOT || checkPair.getPairOp() == PairOperator.SMALLERDOTWC)) + { Set up = new HashSet<>(); up.add(checkPair); Set undef = new HashSet<>(); @@ -637,11 +645,19 @@ public class TypeUnifyTask extends RecursiveTask>> { return true; } else { + writeLog("Second erase:" +checkPair.toString()); return false; } + } else { + if ((checkPair.getLhsType() instanceof ReferenceType) && (checkPair.getRhsType() instanceof ReferenceType)) + // && (checkPair.getPairOp() == PairOperator.SMALLERDOT || checkPair.getPairOp() == PairOperator.SMALLERDOTWC) + { + Set setCheckPair = new HashSet<>(); + setCheckPair.add(checkPair); + return isUndefinedPairSet(applyTypeUnificationRules(setCheckPair, fc)); + } else { + //Pair type <. ? extends ? extends type betrachten TODO PL 2018-10-09 } - else { - //Pair type <. type' betrachten TODO PL 2018-10-09 } } return true;}).reduce((xx, yy) -> xx || yy).get(); diff --git a/src/de/dhbwstuttgart/typeinference/unify/model/Pair.java b/src/de/dhbwstuttgart/typeinference/unify/model/Pair.java index 08c4aa1c..de05c97d 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/model/Pair.java +++ b/src/de/dhbwstuttgart/typeinference/unify/model/Pair.java @@ -18,4 +18,8 @@ public class Pair { public T getKey() { return key; } + + public String toString() { + return "(" + key.toString() + "," + "," + value.toString() + ")\n"; + } } From 60be47c0f1eb045c62a37ac4f20ae9e608128bab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Pl=C3=BCmicke?= Date: Wed, 17 Oct 2018 07:31:58 +0200 Subject: [PATCH 8/8] modified: ../../src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java --- .../typeinference/unify/TypeUnifyTask.java | 88 ++++++++++++++----- 1 file changed, 67 insertions(+), 21 deletions(-) diff --git a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index 42cd4835..7864ece1 100644 --- a/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -23,6 +23,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet; import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations; import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify; import de.dhbwstuttgart.typeinference.unify.model.ExtendsType; +import de.dhbwstuttgart.typeinference.unify.model.FunNType; import de.dhbwstuttgart.typeinference.unify.model.PairOperator; import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType; import de.dhbwstuttgart.typeinference.unify.model.ReferenceType; @@ -84,7 +85,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Integer noAllErasedElements = 0; - Integer noBacktracking = 0; + static int noBacktracking; public TypeUnifyTask() { rules = new RuleSet(); @@ -587,7 +588,7 @@ public class TypeUnifyTask extends RecursiveTask>> { // ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get(); //} return (!x.containsAll(durchschnitt)); - })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10 + })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10 .collect(Collectors.toCollection(ArrayList::new)); nofstred = nextSetasList.size(); //NOCH NICHT korrekt PL 2018-10-12 @@ -637,29 +638,74 @@ public class TypeUnifyTask extends RecursiveTask>> { if (((checkPair.getLhsType() instanceof PlaceholderType) || (checkPair.getRhsType() instanceof PlaceholderType)) && (checkPair.getPairOp() == PairOperator.SMALLERDOT || checkPair.getPairOp() == PairOperator.SMALLERDOTWC)) { - Set up = new HashSet<>(); - up.add(checkPair); - Set undef = new HashSet<>(); - calculatePairSets(up, fc, undef); - if (undef.isEmpty()) { - return true; - } - else { - writeLog("Second erase:" +checkPair.toString()); - return false; - } - } else { - if ((checkPair.getLhsType() instanceof ReferenceType) && (checkPair.getRhsType() instanceof ReferenceType)) - // && (checkPair.getPairOp() == PairOperator.SMALLERDOT || checkPair.getPairOp() == PairOperator.SMALLERDOTWC) - { + /* Set setCheckPair = new HashSet<>(); setCheckPair.add(checkPair); - return isUndefinedPairSet(applyTypeUnificationRules(setCheckPair, fc)); + Set setReturnCheckPair = applyTypeUnificationRules(setCheckPair, fc); + UnifyPair checkPair1 = setReturnCheckPair.iterator().next(); + Set up = new HashSet<>(); + up.add(checkPair1); + Set undef = new HashSet<>(); + */ + PairOperator pairOp = checkPair.getPairOp(); + UnifyType lhsType = checkPair.getLhsType(); + UnifyType rhsType = checkPair.getRhsType(); + ///* Case 1: (a <. Theta') + if ((((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.SMALLERNEQDOT)) && lhsType instanceof PlaceholderType) + // Case 2: (a <.? ? ext Theta') + || (pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType) + // Case 3: (a <.? ? sup Theta') + || (pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType) + + // Case 4 was replaced by an inference rule + // Case 4: (a <.? Theta') + || (pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType) + // Case 5: (Theta <. a) + || ((pairOp == PairOperator.SMALLERDOT) && rhsType instanceof PlaceholderType) + // Case 6 was replaced by an inference rule. + // Case 6: (? ext Theta <.? a) + || (pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType) + // Case 7 was replaced by an inference rule + // Case 7: (? sup Theta <.? a) + || (pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType) + // Case 8: (Theta <.? a) + || (pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType) + //reduceWildcardLow + || (pairOp == PairOperator.SMALLERDOTWC && (lhsType instanceof ExtendsType) && (rhsType instanceof ExtendsType)) + //reduceWildcardLowRight + || ((pairOp == PairOperator.SMALLERDOTWC) && (lhsType instanceof ReferenceType) && (rhsType instanceof ExtendsType)) + //reduceWildcardUp + || ((pairOp == PairOperator.SMALLERDOTWC) && (lhsType instanceof SuperType) && (rhsType instanceof SuperType)) + //reduceWildcardUpRight + || ((pairOp == PairOperator.SMALLERDOTWC) && (lhsType instanceof ReferenceType) && (rhsType instanceof SuperType)) + //reduceFunN + || (((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.EQUALSDOT)) + //PL 2017-10-03 hinzugefuegt + //da Regel auch fuer EQUALSDOT anwendbar + && (lhsType instanceof FunNType) && (rhsType instanceof FunNType)) + //greaterFunN + || ((pairOp== PairOperator.SMALLERDOT) && (lhsType instanceof FunNType) && (rhsType instanceof PlaceholderType)) + //smallerFunN + || ((pairOp == PairOperator.SMALLERDOT) && (lhsType instanceof PlaceholderType && rhsType instanceof FunNType)) + //reduceTph + || ((pairOp == PairOperator.SMALLERDOTWC) && (lhsType instanceof PlaceholderType && rhsType instanceof ReferenceType)) + //reduceTphExt + || ((pairOp == PairOperator.SMALLERDOTWC) && (lhsType instanceof ExtendsType) && rhsType instanceof PlaceholderType) + //reduceTphSup + || ((pairOp == PairOperator.SMALLERDOTWC) && (lhsType instanceof SuperType) && rhsType instanceof PlaceholderType)) { + return true; + } + // Case unknown: If a pair fits no other case, then the type unification has failed. + // Through application of the rules, every pair should have one of the above forms. + // Pairs that do not have one of the aboves form are contradictory. + else { + writeLog("Second erase:" +checkPair.toString()); + return false; + } + //*/ } else { //Pair type <. ? extends ? extends type betrachten TODO PL 2018-10-09 - } - } - } + }} return true;}).reduce((xx, yy) -> xx || yy).get(); }