modified: ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java

modified:   ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
	modified:   ../../../main/java/de/dhbwstuttgart/typeinference/unify/UnifyTaskModel.java

Eigenen Cancel-Mechanismus implementiert
This commit is contained in:
Martin Plümicke 2019-05-10 21:02:24 +02:00
parent bfbce81409
commit a149b0c391
3 changed files with 45 additions and 21 deletions

View File

@ -39,12 +39,17 @@ public class TypeUnify2Task extends TypeUnifyTask {
return new HashSet<>(); } return new HashSet<>(); }
else else
*/ */
if (this.isCancelled()) {
noOfThread--;
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>(); return new HashSet<>();
} }
noOfThread--; else {
return res; return res;
} }
}
}
public void closeLogFile() { public void closeLogFile() {

View File

@ -122,6 +122,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
static Integer noShortendElements = 0; static Integer noShortendElements = 0;
Boolean myIsCanceled = false;
volatile UnifyTaskModel usedTasks; volatile UnifyTaskModel usedTasks;
public TypeUnifyTask() { public TypeUnifyTask() {
@ -170,13 +172,19 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
thNo = totalnoOfThread; thNo = totalnoOfThread;
writeLog("thNo2 " + thNo); writeLog("thNo2 " + thNo);
try { try {
this.logFile = new OutputStreamWriter(new NullOutputStream()); this.logFile = //new OutputStreamWriter(new NullOutputStream());
//new FileWriter(new File(System.getProperty("user.dir")+"/src/test/java/logFiles/"+"Thread_"+thNo)); new FileWriter(new File(System.getProperty("user.dir")+"/src/test/java/logFiles/"+"Thread_"+thNo));
logFile.write(""); logFile.write("");
} }
catch (IOException e) { catch (IOException e) {
System.err.println("log-File nicht vorhanden"); System.err.println("log-File nicht vorhanden");
} }
if (thNo > 10) {
System.out.println("cancel");
usedTasks.cancel();
writeLog(nOfUnify.toString() + "cancel");
System.out.println("cancel");
}
rules = new RuleSet(logFile); rules = new RuleSet(logFile);
this.rekTiefeField = rekTiefe; this.rekTiefeField = rekTiefe;
this.urm = urm; this.urm = urm;
@ -217,6 +225,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
} }
*/ */
void myCancel(Boolean b) {
myIsCanceled = true;
}
public boolean myIsCancelled() {
return myIsCanceled;
}
protected Set<Set<UnifyPair>> compute() { protected Set<Set<UnifyPair>> compute() {
if (one) { if (one) {
@ -232,9 +247,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.filter(x -> x.size()>1) .filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, true); Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, true);
if (this.isCancelled()) {
return new HashSet<>();
}
noOfThread--; noOfThread--;
try { try {
logFile.close(); logFile.close();
@ -245,7 +257,16 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if (isUndefinedPairSetSet(res)) { if (isUndefinedPairSetSet(res)) {
throw new TypeinferenceException("Unresolved constraints: " + res.toString(), new NullToken()); //return new HashSet<>(); throw new TypeinferenceException("Unresolved constraints: " + res.toString(), new NullToken()); //return new HashSet<>();
} }
else return res; else {
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
else {
return res;
}
}
}
} }
/* /*
@Override @Override
@ -279,9 +300,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/* /*
* Step 1: Repeated application of reduce, adapt, erase, swap * Step 1: Repeated application of reduce, adapt, erase, swap
*/ */
if (totalnoOfThread > 10) {
usedTasks.cancel();
}
rekTiefe++; rekTiefe++;
nOfUnify++; nOfUnify++;
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString()); writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
@ -820,7 +839,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
//forks.add(forkOrig); //forks.add(forkOrig);
synchronized(usedTasks) { synchronized(usedTasks) {
if (this.isCancelled()) { if (this.myIsCancelled()) {
return new HashSet<>(); return new HashSet<>();
} }
forkOrig.fork(); forkOrig.fork();
@ -872,7 +891,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
forks.add(fork); forks.add(fork);
synchronized(usedTasks) { synchronized(usedTasks) {
if (this.isCancelled()) { if (this.myIsCancelled()) {
return new HashSet<>(); return new HashSet<>();
} }
fork.fork(); fork.fork();
@ -923,7 +942,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
//forks.add(forkOrig); //forks.add(forkOrig);
synchronized(usedTasks) { synchronized(usedTasks) {
if (this.isCancelled()) { if (this.myIsCancelled()) {
return new HashSet<>(); return new HashSet<>();
} }
forkOrig.fork(); forkOrig.fork();
@ -975,7 +994,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
forks.add(fork); forks.add(fork);
synchronized(usedTasks) { synchronized(usedTasks) {
if (this.isCancelled()) { if (this.myIsCancelled()) {
return new HashSet<>(); return new HashSet<>();
} }
fork.fork(); fork.fork();
@ -1027,7 +1046,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
//forks.add(forkOrig); //forks.add(forkOrig);
synchronized(usedTasks) { synchronized(usedTasks) {
if (this.isCancelled()) { if (this.myIsCancelled()) {
return new HashSet<>(); return new HashSet<>();
} }
forkOrig.fork(); forkOrig.fork();
@ -1048,7 +1067,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
forks.add(fork); forks.add(fork);
synchronized(usedTasks) { synchronized(usedTasks) {
if (this.isCancelled()) { if (this.myIsCancelled()) {
return new HashSet<>(); return new HashSet<>();
} }
fork.fork(); fork.fork();

View File

@ -12,7 +12,7 @@ public class UnifyTaskModel {
public void cancel() { public void cancel() {
for(TypeUnifyTask t : usedTasks) { for(TypeUnifyTask t : usedTasks) {
t.cancel(true); t.myCancel(true);
} }
} }
} }