From aae2e5244b2214c025fdf32f18fb405c48cefe74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Pl=C3=BCmicke?= Date: Wed, 16 Jan 2019 17:39:01 +0100 Subject: [PATCH] modified: src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java Erster Ansatz Parallelisierung --- .../de/dhbwstuttgart/core/JavaTXCompiler.java | 10 +- .../typeinference/unify/TypeUnify.java | 10 +- .../typeinference/unify/TypeUnifyTask.java | 134 ++++++++++++++++-- 3 files changed, 140 insertions(+), 14 deletions(-) diff --git a/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java b/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java index f8d4a363..f339af1a 100644 --- a/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java +++ b/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java @@ -388,7 +388,15 @@ public class JavaTXCompiler { //Set> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log); //Set> result = unify.unify(xConsSet, finiteClosure); - Set> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, logFile, log); + List>> oderConstraints = unifyCons.getOderConstraints().stream().map(x -> { + Set> ret = new HashSet<>(); + for (Constraint y : x) { + ret.add(new HashSet<>(y)); + } + return ret; + }).collect(Collectors.toCollection(ArrayList::new)); + Set> result = unify.unify(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log); + //Set> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log); System.out.println("RESULT: " + result); logFile.write("RES: " + result.toString()+"\n"); logFile.flush(); diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java index d2428def..fecd28fe 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java @@ -10,22 +10,24 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; public class TypeUnify { - public Set> unify(Set eq, IFiniteClosure fc, FileWriter logFile, Boolean log) { - TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, true, logFile, log); + public Set> unify(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) { + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(unifyTask); Set> res = unifyTask.join(); return res; } + /* public Set> unifySequential(Set eq, IFiniteClosure fc, FileWriter logFile, Boolean log) { TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, false, logFile, log); Set> res = unifyTask.compute(); return res; } + */ - public Set> unifyOderConstraints(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) { - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log); + public Set> unifyOderConstraints(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) { + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0); Set> res = unifyTask.compute(); return res; } diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index a149be4c..c08ace4f 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -34,6 +34,7 @@ import de.dhbwstuttgart.typeinference.unify.model.TypeParams; import de.dhbwstuttgart.typeinference.unify.model.Unifier; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyType; +import de.dhbwstuttgart.typeinference.unify.model.WildcardType; import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair; import de.dhbwstuttgart.typeinference.unify.model.Pair; @@ -55,6 +56,13 @@ public class TypeUnifyTask extends RecursiveTask>> { private boolean printtag = false; Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll? + /* + * Fuer die Threads + */ + private static int noOfThread = 0; + private int thNo; + protected boolean one = false; + public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/"; FileWriter logFile; @@ -83,6 +91,8 @@ public class TypeUnifyTask extends RecursiveTask>> { protected boolean parallel; + int rekTiefeField; + Integer nOfUnify = 0; Integer noUndefPair = 0; @@ -97,6 +107,7 @@ public class TypeUnifyTask extends RecursiveTask>> { rules = new RuleSet(); } + /* public TypeUnifyTask(Set eq, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) { this.eq = eq; this.fc = fc; @@ -105,18 +116,22 @@ public class TypeUnifyTask extends RecursiveTask>> { this.logFile = logFile; this.log = log; rules = new RuleSet(logFile); + noOfThread++; + thNo = noOfThread; } + */ - public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) { + public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe) { this.eq = eq; //this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); - this.oderConstraintsField = oderConstraints.stream().map(x -> { + this.oderConstraintsField = oderConstraints; /*.stream().map(x -> { Set> ret = new HashSet<>(); for (Constraint y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new)); + */ //x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); this.fc = fc; @@ -125,6 +140,9 @@ public class TypeUnifyTask extends RecursiveTask>> { this.logFile = logFile; this.log = log; rules = new RuleSet(logFile); + this.rekTiefeField = rekTiefe; + noOfThread++; + thNo = noOfThread; } /** @@ -161,6 +179,10 @@ public class TypeUnifyTask extends RecursiveTask>> { */ protected Set> compute() { + if (one) { + System.out.println("two"); + } + one = true; Set neweq = new HashSet<>(eq); /* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */ oderConstraintsField.stream() @@ -169,7 +191,7 @@ public class TypeUnifyTask extends RecursiveTask>> { ArrayList>> remainingOderconstraints = oderConstraintsField.stream() .filter(x -> x.size()>1) .collect(Collectors.toCollection(ArrayList::new)); - Set> res = unify(neweq, remainingOderconstraints, fc, parallel, 0); + Set> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField); if (isUndefinedPairSetSet(res)) { return new HashSet<>(); } else return res; } @@ -439,7 +461,7 @@ public class TypeUnifyTask extends RecursiveTask>> { * @param fc The finite closure * @return The set of all principal type unifiers */ - protected Set> unify(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { + protected Set> unify(final Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { //Set aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT) // ).collect(Collectors.toCollection(HashSet::new)); //writeLog(nOfUnify.toString() + " AA: " + aas.toString()); @@ -613,10 +635,13 @@ public class TypeUnifyTask extends RecursiveTask>> { */ //writeLog("vor Subst: " + eqPrime); Optional> eqPrimePrime = rules.subst(eqPrime); + Set> unifyres1 = null; + Set> unifyres2 = null; //writeLog("nach Subst: " + eqPrimePrime); /* * Step 6 a) Restart (fork) for pairs where subst was applied */ + /* if(parallel) { if (eqPrime.equals(eq) && !eqPrimePrime.isPresent() && oderConstraints.isEmpty()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch @@ -637,7 +662,8 @@ public class TypeUnifyTask extends RecursiveTask>> { fork.fork(); } } - else { // sequentiell (Step 6b is included) + else */ + {// sequentiell (Step 6b is included) if (printtag) System.out.println("nextStep: " + eqPrimePrime); if (eqPrime.equals(eq) && !eqPrimePrime.isPresent() && oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch @@ -656,12 +682,12 @@ public class TypeUnifyTask extends RecursiveTask>> { eqPrimePrimeSet.add(eqPrime); } else if(eqPrimePrime.isPresent()) { - Set> unifyres = unify(eqPrimePrime.get(), oderConstraints, fc, false, rekTiefe); + Set> unifyres = unifyres1 = unify(eqPrimePrime.get(), oderConstraints, fc, parallel, rekTiefe); eqPrimePrimeSet.addAll(unifyres); } else { - Set> unifyres = unify(eqPrime, oderConstraints, fc, false, rekTiefe); + Set> unifyres = unifyres2 = unify(eqPrime, oderConstraints, fc, parallel, rekTiefe); eqPrimePrimeSet.addAll(unifyres); @@ -683,8 +709,14 @@ public class TypeUnifyTask extends RecursiveTask>> { * Step 7: Filter empty sets; */ eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new)); - if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) + if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) { writeLog("Result1 " + eqPrimePrimeSet.toString()); + Iterator upit = eqPrimePrimeSet.iterator().next().iterator(); + if (upit.next().getLhsType() instanceof WildcardType + || upit.next().getLhsType() instanceof WildcardType) { + System.out.println(""); + } + } return eqPrimePrimeSet; } @@ -758,9 +790,19 @@ public class TypeUnifyTask extends RecursiveTask>> { Set a = null; while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) { Set a_last = a; + List> nextSetasListRest= new ArrayList<>(); if (variance == 1) { a = oup.max(nextSetasList.iterator()); nextSetasList.remove(a); + nextSetasListRest = new ArrayList<>(nextSetasList); + Iterator> nextSetasListItRest = new ArrayList>(nextSetasListRest).iterator(); + while (nextSetasListItRest.hasNext()) { + Set a_next = nextSetasListItRest.next(); + if (//a.equals(a_next) || + (oup.compare(a, a_next) == 1)) { + nextSetasListRest.remove(a_next); + } + } } else if (variance == -1) { a = oup.min(nextSetasList.iterator()); @@ -799,10 +841,56 @@ public class TypeUnifyTask extends RecursiveTask>> { elems.add(a); //if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt //writeLog("Vor unify2 Aufruf: " + elems.toString()); - Set> res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe); + Set> res = new HashSet<>(); + Set>> add_res = new HashSet<>(); + if(parallel && (variance == 1)) { + /* + elems.add(a); + TypeUnify2Task fork = new TypeUnify2Task(elems, eq, fc, parallel, logFile, log); + fork.fork(); + res = fork.join(); + */ + + Set forks = new HashSet<>(); + + //TypeUnify2Task fork1 = new TypeUnify2Task(elems, eq, fc, parallel, logFile, log); + + + Set newEqOrig = new HashSet<>(eq); + Set> newElemsOrig = new HashSet<>(elems); + List>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); + newElemsOrig.add(a); + TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe); + forks.add(forkOrig); + forkOrig.fork(); + + while (!nextSetasListRest.isEmpty()) { + Set nSaL = nextSetasListRest.remove(0); + Set newEq = new HashSet<>(eq); + Set> newElems = new HashSet<>(elems); + List>> newOderConstraints = new ArrayList<>(oderConstraints); + newElems.add(nSaL); + TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, fc, parallel, logFile, log, rekTiefe); + forks.add(fork); + fork.fork(); + } + //res = unify2(elems, eq, fc, parallel); + res = forkOrig.join(); + for(TypeUnifyTask fork : forks) { + Set> fork_res = fork.join(); + add_res.add(fork_res); + } + } + else { + elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 833 + res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe); + } if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) { //wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen result = res; + if (res.iterator().next() instanceof WildcardType) { + System.out.println(""); + } } else { if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) @@ -874,11 +962,13 @@ public class TypeUnifyTask extends RecursiveTask>> { //result = result; }}}} else { if (variance == 0) { + writeLog("RESADD:" + result.toString() + " " + res.toString()); result.addAll(res); }}} //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden } else { + writeLog("RESADD:" + result.toString() + " " + res.toString()); result.addAll(res); } } @@ -901,6 +991,31 @@ public class TypeUnifyTask extends RecursiveTask>> { System.out.print(""); Iterator> nextSetasListIt = new ArrayList>(nextSetasList).iterator(); if (variance == 1) { + if (parallel) { + for (Set> par_res : add_res) { + if (!isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) { + //wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen + result = par_res; + if (par_res.iterator().next() instanceof WildcardType) { + System.out.println(""); + } + } + else { + if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) + || (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result)) + || result.isEmpty()) { + //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden + writeLog("RESADD:" + result.toString() + " " + par_res.toString()); + result.addAll(par_res); + } + } + } + break; + } + /* nextSetasList = nextSetasListRest; */ + /* wird bereits vor den unify2-Aufruf durchgefuehrt und nextSetasListRest zugeordnet + */ + System.out.println(""); writeLog("a: " + rekTiefe + " variance: " + variance + a.toString()); while (nextSetasListIt.hasNext()) { @@ -1828,6 +1943,7 @@ public class TypeUnifyTask extends RecursiveTask>> { void writeLog(String str) { if (log) { try { + logFile.write("Thread no.:" + thNo + "\n"); logFile.write(str+"\n\n"); logFile.flush();