forked from JavaTX/JavaCompilerCore
Abbruch aller Threads eingebaut
modified: src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java new file: src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyTaskModel.java
This commit is contained in:
parent
985704c0b0
commit
bfbce81409
@ -39,6 +39,7 @@ import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListener;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
@ -58,9 +59,10 @@ import org.apache.commons.io.output.NullOutputStream;
|
||||
public class JavaTXCompiler {
|
||||
|
||||
final CompilationEnvironment environment;
|
||||
Boolean resultmodel = false;
|
||||
Boolean resultmodel = true;
|
||||
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
|
||||
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"src/test/java/logFiles" geschrieben werden soll?
|
||||
volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
|
||||
|
||||
/**
|
||||
* Äußerste Liste der Source-Files.
|
||||
@ -417,7 +419,7 @@ public class JavaTXCompiler {
|
||||
}
|
||||
return ret;
|
||||
}).collect(Collectors.toCollection(ArrayList::new));
|
||||
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm);
|
||||
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
@ -557,7 +559,7 @@ public class JavaTXCompiler {
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm);
|
||||
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
|
||||
System.out.println("RESULT Final: " + li.getResults());
|
||||
logFile.write("RES_FINAL: " + li.getResults().toString()+"\n");
|
||||
logFile.flush();
|
||||
@ -566,7 +568,7 @@ public class JavaTXCompiler {
|
||||
/* UnifyResultModel End */
|
||||
else {
|
||||
//Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure));
|
||||
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure));
|
||||
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
|
||||
System.out.println("RESULT: " + result);
|
||||
logFile.write("RES: " + result.toString()+"\n");
|
||||
logFile.flush();
|
||||
|
@ -3,10 +3,12 @@ package de.dhbwstuttgart.typeinference.unify;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
@ -26,8 +28,8 @@ public class TypeUnify {
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
@ -52,8 +54,8 @@ public class TypeUnify {
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
return ret;
|
||||
@ -70,8 +72,8 @@ public class TypeUnify {
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
@ -103,8 +105,8 @@ public class TypeUnify {
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret);
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
|
@ -3,6 +3,7 @@ package de.dhbwstuttgart.typeinference.unify;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
@ -17,8 +18,8 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
||||
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
}
|
||||
@ -38,6 +39,9 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
||||
return new HashSet<>(); }
|
||||
else
|
||||
*/
|
||||
if (this.isCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
noOfThread--;
|
||||
return res;
|
||||
}
|
||||
|
@ -122,6 +122,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
static Integer noShortendElements = 0;
|
||||
|
||||
volatile UnifyTaskModel usedTasks;
|
||||
|
||||
public TypeUnifyTask() {
|
||||
rules = new RuleSet();
|
||||
}
|
||||
@ -141,7 +143,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
*/
|
||||
|
||||
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm) {
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
|
||||
synchronized (this) {
|
||||
this.eq = eq;
|
||||
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
||||
@ -178,6 +180,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
rules = new RuleSet(logFile);
|
||||
this.rekTiefeField = rekTiefe;
|
||||
this.urm = urm;
|
||||
this.usedTasks = usedTasks;
|
||||
this.usedTasks.add(this);
|
||||
}
|
||||
}
|
||||
|
||||
@ -228,6 +232,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
.filter(x -> x.size()>1)
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, true);
|
||||
if (this.isCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
noOfThread--;
|
||||
try {
|
||||
logFile.close();
|
||||
@ -272,6 +279,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
/*
|
||||
* Step 1: Repeated application of reduce, adapt, erase, swap
|
||||
*/
|
||||
if (totalnoOfThread > 10) {
|
||||
usedTasks.cancel();
|
||||
}
|
||||
rekTiefe++;
|
||||
nOfUnify++;
|
||||
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
|
||||
@ -430,7 +440,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
// .stream().map(x -> new HashSet<>(x))
|
||||
// .collect(Collectors.toCollection(HashSet::new));
|
||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
||||
|
||||
|
||||
|
||||
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
|
||||
|
||||
Set<TypeUnifyTask> forks = new HashSet<>();
|
||||
@ -540,6 +552,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) {
|
||||
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
|
||||
|
||||
fstElems.addAll(topLevelSets.stream()
|
||||
.filter(x -> x.size()==1)
|
||||
.map(y -> y.stream().findFirst().get())
|
||||
@ -804,9 +817,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
//forks.add(forkOrig);
|
||||
forkOrig.fork();
|
||||
synchronized(usedTasks) {
|
||||
if (this.isCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
forkOrig.fork();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
synchronized (this) {
|
||||
@ -851,9 +869,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
synchronized(usedTasks) {
|
||||
if (this.isCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
}
|
||||
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
@ -897,9 +920,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
//forks.add(forkOrig);
|
||||
forkOrig.fork();
|
||||
synchronized(usedTasks) {
|
||||
if (this.isCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
forkOrig.fork();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
synchronized (this) {
|
||||
@ -944,9 +972,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
synchronized(usedTasks) {
|
||||
if (this.isCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
}
|
||||
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
@ -991,9 +1024,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
//forks.add(forkOrig);
|
||||
forkOrig.fork();
|
||||
synchronized(usedTasks) {
|
||||
if (this.isCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
forkOrig.fork();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
synchronized (this) {
|
||||
@ -1007,9 +1045,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
synchronized(usedTasks) {
|
||||
if (this.isCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
}
|
||||
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
|
@ -0,0 +1,18 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
public class UnifyTaskModel {
|
||||
|
||||
ArrayList<TypeUnifyTask> usedTasks = new ArrayList<>();
|
||||
|
||||
public void add(TypeUnifyTask t) {
|
||||
usedTasks.add(t);
|
||||
}
|
||||
|
||||
public void cancel() {
|
||||
for(TypeUnifyTask t : usedTasks) {
|
||||
t.cancel(true);
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user