From bfbce8140943d7b1419b8ae6e5f8b91ececc6fe7 Mon Sep 17 00:00:00 2001 From: "pl@gohorb.ba-horb.de" Date: Fri, 10 May 2019 16:30:55 +0200 Subject: [PATCH] Abbruch aller Threads eingebaut modified: src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java new file: src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyTaskModel.java --- .../de/dhbwstuttgart/core/JavaTXCompiler.java | 10 +-- .../typeinference/unify/TypeUnify.java | 18 ++--- .../typeinference/unify/TypeUnify2Task.java | 8 ++- .../typeinference/unify/TypeUnifyTask.java | 69 +++++++++++++++---- .../typeinference/unify/UnifyTaskModel.java | 18 +++++ 5 files changed, 96 insertions(+), 27 deletions(-) create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyTaskModel.java diff --git a/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java b/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java index 9f1d17eb..e24b43d2 100644 --- a/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java +++ b/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java @@ -39,6 +39,7 @@ import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask; import de.dhbwstuttgart.typeinference.unify.UnifyResultListener; import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl; import de.dhbwstuttgart.typeinference.unify.UnifyResultModel; +import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel; import java.io.File; import java.io.FileOutputStream; @@ -58,9 +59,10 @@ import org.apache.commons.io.output.NullOutputStream; public class JavaTXCompiler { final CompilationEnvironment environment; - Boolean resultmodel = false; + Boolean resultmodel = true; public final Map sourceFiles = new HashMap<>(); Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"src/test/java/logFiles" geschrieben werden soll? + volatile UnifyTaskModel usedTasks = new UnifyTaskModel(); /** * Äußerste Liste der Source-Files. @@ -417,7 +419,7 @@ public class JavaTXCompiler { } return ret; }).collect(Collectors.toCollection(ArrayList::new)); - unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm); + unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks); } catch (IOException e) { System.err.println("kein LogFile"); @@ -557,7 +559,7 @@ public class JavaTXCompiler { UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure); UnifyResultListenerImpl li = new UnifyResultListenerImpl(); urm.addUnifyResultListener(li); - unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm); + unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks); System.out.println("RESULT Final: " + li.getResults()); logFile.write("RES_FINAL: " + li.getResults().toString()+"\n"); logFile.flush(); @@ -566,7 +568,7 @@ public class JavaTXCompiler { /* UnifyResultModel End */ else { //Set> result = unify.unify(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure)); - Set> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure)); + Set> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks); System.out.println("RESULT: " + result); logFile.write("RES: " + result.toString()+"\n"); logFile.flush(); diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java index ecef0e14..62404b30 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java @@ -3,10 +3,12 @@ package de.dhbwstuttgart.typeinference.unify; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; +import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.concurrent.ForkJoinPool; +import de.dhbwstuttgart.core.JavaTXCompiler; import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.ConstraintSet; import de.dhbwstuttgart.typeinference.constraints.Pair; @@ -26,8 +28,8 @@ public class TypeUnify { * @param cons * @return */ - public Set> unify(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) { - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret); + public Set> unify(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) { + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(unifyTask); Set> res = unifyTask.join(); @@ -52,8 +54,8 @@ public class TypeUnify { * @param ret * @return */ - public UnifyResultModel unifyAsync(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) { - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret); + public UnifyResultModel unifyAsync(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) { + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(unifyTask); return ret; @@ -70,8 +72,8 @@ public class TypeUnify { * @param ret * @return */ - public UnifyResultModel unifyParallel(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) { - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret); + public UnifyResultModel unifyParallel(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) { + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(unifyTask); Set> res = unifyTask.join(); @@ -103,8 +105,8 @@ public class TypeUnify { * @param cons * @return */ - public Set> unifyOderConstraints(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) { - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret); + public Set> unifyOderConstraints(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) { + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks); Set> res = unifyTask.compute(); try { logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n"); diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java index 315c01e7..46af7349 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java @@ -3,6 +3,7 @@ package de.dhbwstuttgart.typeinference.unify; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; +import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -17,8 +18,8 @@ public class TypeUnify2Task extends TypeUnifyTask { Set> setToFlatten; - public TypeUnify2Task(Set> setToFlatten, Set eq, List>> oderConstraints, Set nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm) { - super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm); + public TypeUnify2Task(Set> setToFlatten, Set eq, List>> oderConstraints, Set nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) { + super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); this.setToFlatten = setToFlatten; this.nextSetElement = nextSetElement; } @@ -38,6 +39,9 @@ public class TypeUnify2Task extends TypeUnifyTask { return new HashSet<>(); } else */ + if (this.isCancelled()) { + return new HashSet<>(); + } noOfThread--; return res; } diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index e11c81bc..36a2bc43 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -122,6 +122,8 @@ public class TypeUnifyTask extends RecursiveTask>> { static Integer noShortendElements = 0; + volatile UnifyTaskModel usedTasks; + public TypeUnifyTask() { rules = new RuleSet(); } @@ -141,7 +143,7 @@ public class TypeUnifyTask extends RecursiveTask>> { */ - public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm) { + public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) { synchronized (this) { this.eq = eq; //this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); @@ -178,6 +180,8 @@ public class TypeUnifyTask extends RecursiveTask>> { rules = new RuleSet(logFile); this.rekTiefeField = rekTiefe; this.urm = urm; + this.usedTasks = usedTasks; + this.usedTasks.add(this); } } @@ -228,6 +232,9 @@ public class TypeUnifyTask extends RecursiveTask>> { .filter(x -> x.size()>1) .collect(Collectors.toCollection(ArrayList::new)); Set> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, true); + if (this.isCancelled()) { + return new HashSet<>(); + } noOfThread--; try { logFile.close(); @@ -272,6 +279,9 @@ public class TypeUnifyTask extends RecursiveTask>> { /* * Step 1: Repeated application of reduce, adapt, erase, swap */ + if (totalnoOfThread > 10) { + usedTasks.cancel(); + } rekTiefe++; nOfUnify++; writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString()); @@ -430,7 +440,9 @@ public class TypeUnifyTask extends RecursiveTask>> { // .stream().map(x -> new HashSet<>(x)) // .collect(Collectors.toCollection(HashSet::new)); //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE + + Set> eqPrimePrimeSet = new HashSet<>(); Set forks = new HashSet<>(); @@ -540,6 +552,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Set> computeCartesianRecursive(Set> fstElems, ArrayList>> topLevelSets, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) { //ArrayList>> remainingSets = new ArrayList<>(topLevelSets); + fstElems.addAll(topLevelSets.stream() .filter(x -> x.size()==1) .map(y -> y.stream().findFirst().get()) @@ -804,9 +817,14 @@ public class TypeUnifyTask extends RecursiveTask>> { newElemsOrig.add(a); /* FORK ANFANG */ - TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm); + TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); //forks.add(forkOrig); - forkOrig.fork(); + synchronized(usedTasks) { + if (this.isCancelled()) { + return new HashSet<>(); + } + forkOrig.fork(); + } /* FORK ENDE */ synchronized (this) { @@ -851,9 +869,14 @@ public class TypeUnifyTask extends RecursiveTask>> { Set> newElems = new HashSet<>(elems); List>> newOderConstraints = new ArrayList<>(oderConstraints); newElems.add(nSaL); - TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm); + TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); forks.add(fork); - fork.fork(); + synchronized(usedTasks) { + if (this.isCancelled()) { + return new HashSet<>(); + } + fork.fork(); + } } //res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe); @@ -897,9 +920,14 @@ public class TypeUnifyTask extends RecursiveTask>> { newElemsOrig.add(a); /* FORK ANFANG */ - TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm); + TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); //forks.add(forkOrig); - forkOrig.fork(); + synchronized(usedTasks) { + if (this.isCancelled()) { + return new HashSet<>(); + } + forkOrig.fork(); + } /* FORK ENDE */ synchronized (this) { @@ -944,9 +972,14 @@ public class TypeUnifyTask extends RecursiveTask>> { Set> newElems = new HashSet<>(elems); List>> newOderConstraints = new ArrayList<>(oderConstraints); newElems.add(nSaL); - TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm); + TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); forks.add(fork); - fork.fork(); + synchronized(usedTasks) { + if (this.isCancelled()) { + return new HashSet<>(); + } + fork.fork(); + } } //res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe); @@ -991,9 +1024,14 @@ public class TypeUnifyTask extends RecursiveTask>> { newElemsOrig.add(a); /* FORK ANFANG */ - TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm); + TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); //forks.add(forkOrig); - forkOrig.fork(); + synchronized(usedTasks) { + if (this.isCancelled()) { + return new HashSet<>(); + } + forkOrig.fork(); + } /* FORK ENDE */ synchronized (this) { @@ -1007,9 +1045,14 @@ public class TypeUnifyTask extends RecursiveTask>> { Set> newElems = new HashSet<>(elems); List>> newOderConstraints = new ArrayList<>(oderConstraints); newElems.add(nSaL); - TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm); + TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); forks.add(fork); - fork.fork(); + synchronized(usedTasks) { + if (this.isCancelled()) { + return new HashSet<>(); + } + fork.fork(); + } } //res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe); diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyTaskModel.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyTaskModel.java new file mode 100644 index 00000000..2cc0d7d4 --- /dev/null +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyTaskModel.java @@ -0,0 +1,18 @@ +package de.dhbwstuttgart.typeinference.unify; + +import java.util.ArrayList; + +public class UnifyTaskModel { + + ArrayList usedTasks = new ArrayList<>(); + + public void add(TypeUnifyTask t) { + usedTasks.add(t); + } + + public void cancel() { + for(TypeUnifyTask t : usedTasks) { + t.cancel(true); + } + } +}