modified: ../../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java

einige Aenderungen, die parallele Verarbeitung ermoeglicht
This commit is contained in:
Martin Plümicke 2019-01-20 21:49:22 +01:00
parent cf951043ef
commit 188ea1f7f8
2 changed files with 101 additions and 67 deletions

View File

@ -60,6 +60,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* Fuer die Threads * Fuer die Threads
*/ */
private static int noOfThread = 0; private static int noOfThread = 0;
private static int totalnoOfThread = 0;
private int thNo; private int thNo;
protected boolean one = false; protected boolean one = false;
Integer MaxNoOfThreads = 4; Integer MaxNoOfThreads = 4;
@ -123,29 +124,33 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/ */
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe) { public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe) {
this.eq = eq; synchronized (this) {
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); this.eq = eq;
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> { //this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
Set<Set<UnifyPair>> ret = new HashSet<>(); Set<Set<UnifyPair>> ret = new HashSet<>();
for (Constraint<UnifyPair> y : x) { for (Constraint<UnifyPair> y : x) {
ret.add(new HashSet<>(y)); ret.add(new HashSet<>(y));
} }
return ret; return ret;
}).collect(Collectors.toCollection(ArrayList::new)); }).collect(Collectors.toCollection(ArrayList::new));
*/ */
//x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); //x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.fc = fc; this.fc = fc;
this.oup = new OrderingUnifyPair(fc); this.oup = new OrderingUnifyPair(fc);
this.parallel = parallel; this.parallel = parallel;
this.logFile = logFile; this.logFile = logFile;
this.log = log; this.log = log;
rules = new RuleSet(logFile); rules = new RuleSet(logFile);
this.rekTiefeField = rekTiefe; this.rekTiefeField = rekTiefe;
synchronized (this) { noOfThread++; }
writeLog("thNo1 " + thNo); noOfThread++;
thNo = noOfThread; totalnoOfThread++;
writeLog("thNo2 " + thNo); writeLog("thNo1 " + thNo);
thNo = totalnoOfThread;
writeLog("thNo2 " + thNo);
}
} }
/** /**
@ -703,7 +708,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/* /*
* Step 6 b) Build the union over everything. * Step 6 b) Build the union over everything.
*/ */
/*
* PL 2019-01-20: muss uebrprueft werden
*/
if(parallel) if(parallel)
for(TypeUnifyTask fork : forks) for(TypeUnifyTask fork : forks)
eqPrimePrimeSet.addAll(fork.join()); eqPrimePrimeSet.addAll(fork.join());
@ -714,11 +721,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new)); eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) { if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) {
writeLog("Result1 " + eqPrimePrimeSet.toString()); writeLog("Result1 " + eqPrimePrimeSet.toString());
Iterator<UnifyPair> upit = eqPrimePrimeSet.iterator().next().iterator();
if (upit.next().getLhsType() instanceof WildcardType
|| upit.next().getLhsType() instanceof WildcardType) {
System.out.println("");
}
} }
return eqPrimePrimeSet; return eqPrimePrimeSet;
} }
@ -793,36 +795,40 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<UnifyPair> a = null; Set<UnifyPair> a = null;
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) { while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
Set<UnifyPair> a_last = a; Set<UnifyPair> a_last = a;
List<Set<UnifyPair>> nextSetasListRestMax = new ArrayList<>(); List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
List<Set<UnifyPair>> nextSetasListRestMin = new ArrayList<>(); //List<Set<UnifyPair>> nextSetasListRestMin = new ArrayList<>();
List<Set<UnifyPair>> nextSetasListRestOder = new ArrayList<>(); //List<Set<UnifyPair>> nextSetasListRestOder = new ArrayList<>();
if (variance == 1) { if (variance == 1) {
a = oup.max(nextSetasList.iterator()); a = oup.max(nextSetasList.iterator());
nextSetasList.remove(a); nextSetasList.remove(a);
nextSetasListRestMax = new ArrayList<>(nextSetasList); nextSetasListRest = new ArrayList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRestMax).iterator(); Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) { while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next(); Set<UnifyPair> a_next = nextSetasListItRest.next();
if (//a.equals(a_next) || if (//a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) { (oup.compare(a, a_next) == 1)) {
nextSetasListRestMax.remove(a_next); nextSetasListRest.remove(a_next);
} }
} }
} }
else if (variance == -1) { else if (variance == -1) {
a = oup.min(nextSetasList.iterator()); a = oup.min(nextSetasList.iterator());
nextSetasList.remove(a); nextSetasList.remove(a);
nextSetasListRestMin = new ArrayList<>(nextSetasList); nextSetasListRest = new ArrayList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRestMin).iterator(); Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) { while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next(); Set<UnifyPair> a_next = nextSetasListItRest.next();
if (//a.equals(a_next) || if (//a.equals(a_next) ||
(oup.compare(a, a_next) == -1)) { (oup.compare(a, a_next) == -1)) {
nextSetasListRestMin.remove(a_next); nextSetasListRest.remove(a_next);
} }
} }
} }
else if (variance == 0 || variance == 2) { else if (variance == 2) {
a = nextSetasList.remove(0);
nextSetasListRest = new ArrayList<>(nextSetasList);
}
else if (variance == 0) {
a = nextSetasList.remove(0); a = nextSetasList.remove(0);
} }
//writeLog("nextSet: " + nextSetasList.toString()+ "\n"); //writeLog("nextSet: " + nextSetasList.toString()+ "\n");
@ -852,7 +858,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
i++; i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems); Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
writeLog("a1: " + rekTiefe + " "+ a.toString()+ "\n"); writeLog("a1: " + rekTiefe + " "+ a.toString()+ "\n");
elems.add(a); //elems.add(a); PL 2019-01-20 Muss weg, weil das in jeweiligen Thread erfolgen muss. Fuer den sequentiellen Fall
//im else-Zweig
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt //if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
//writeLog("Vor unify2 Aufruf: " + elems.toString()); //writeLog("Vor unify2 Aufruf: " + elems.toString());
Set<Set<UnifyPair>> res = new HashSet<>(); Set<Set<UnifyPair>> res = new HashSet<>();
@ -877,9 +884,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe);
forks.add(forkOrig); forks.add(forkOrig);
forkOrig.fork(); forkOrig.fork();
synchronized (this) {
while (!nextSetasListRestMax.isEmpty()) { writeLog("a in " + variance + " "+ a);
Set<UnifyPair> nSaL = nextSetasListRestMax.remove(0); writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
synchronized (this) { nextSetasList.remove(nSaL);
writeLog("1 RM" + nSaL.toString());
}
Set<UnifyPair> newEq = new HashSet<>(eq); Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
@ -891,9 +904,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//res = unify2(elems, eq, fc, parallel); //res = unify2(elems, eq, fc, parallel);
res = forkOrig.join(); res = forkOrig.join();
for(TypeUnifyTask fork : forks) { for(TypeUnifyTask fork : forks) {
Set<Set<UnifyPair>> fork_res = fork.join(); synchronized (this) {
synchronized (this) { noOfThread--; }; Set<Set<UnifyPair>> fork_res = fork.join();
add_res.add(fork_res); writeLog("Join " + new Integer(fork.thNo).toString());
noOfThread--;
add_res.add(fork_res);
};
} }
} else { } else {
if(parallel && (variance == -1) && noOfThread <= MaxNoOfThreads) { if(parallel && (variance == -1) && noOfThread <= MaxNoOfThreads) {
@ -905,9 +921,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe);
forks.add(forkOrig); forks.add(forkOrig);
forkOrig.fork(); forkOrig.fork();
synchronized (this) {
while (!nextSetasListRestMin.isEmpty()) { writeLog("a in " + variance + " "+ a);
Set<UnifyPair> nSaL = nextSetasListRestMin.remove(0); writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
synchronized (this) { nextSetasList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString());
}
Set<UnifyPair> newEq = new HashSet<>(eq); Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
@ -919,12 +941,17 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//res = unify2(elems, eq, fc, parallel); //res = unify2(elems, eq, fc, parallel);
res = forkOrig.join(); res = forkOrig.join();
for(TypeUnifyTask fork : forks) { for(TypeUnifyTask fork : forks) {
Set<Set<UnifyPair>> fork_res = fork.join(); synchronized (this) {
synchronized (this) { noOfThread--; }; Set<Set<UnifyPair>> fork_res = fork.join();
add_res.add(fork_res); writeLog("Join " + new Integer(fork.thNo).toString());
noOfThread--;
add_res.add(fork_res);
};
} }
} else { } else {
if(parallel && (variance == 2) && noOfThread <= MaxNoOfThreads) { if(parallel && (variance == 2) //&& noOfThread <= MaxNoOfThreads
) {
writeLog("var2einstieg");
Set<TypeUnifyTask> forks = new HashSet<>(); Set<TypeUnifyTask> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq); Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems); Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
@ -933,9 +960,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe);
forks.add(forkOrig); forks.add(forkOrig);
forkOrig.fork(); forkOrig.fork();
synchronized (this) {
while (!nextSetasListRestOder.isEmpty()) { writeLog("a in " + variance + " "+ a);
Set<UnifyPair> nSaL = nextSetasListRestOder.remove(0); writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
//nextSetasList.remove(nSaL);
Set<UnifyPair> newEq = new HashSet<>(eq); Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
@ -947,13 +978,16 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//res = unify2(elems, eq, fc, parallel); //res = unify2(elems, eq, fc, parallel);
res = forkOrig.join(); res = forkOrig.join();
for(TypeUnifyTask fork : forks) { for(TypeUnifyTask fork : forks) {
Set<Set<UnifyPair>> fork_res = fork.join(); synchronized (this) {
synchronized (this) { noOfThread--; }; Set<Set<UnifyPair>> fork_res = fork.join();
add_res.add(fork_res); writeLog("Join " + new Integer(fork.thNo).toString());
noOfThread--;
add_res.add(fork_res);
};
}} }}
else { else {
parallel = false; //Wenn MaxNoOfThreads erreicht ist, sequentiell weiterarbeiten //parallel = false; //Wenn MaxNoOfThreads erreicht ist, sequentiell weiterarbeiten
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 833 elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe); res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe);
}}} }}}
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) { if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
@ -1033,13 +1067,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//result = result; //result = result;
}}}} }}}}
else { if (variance == 0) { else { if (variance == 0) {
writeLog("RESADD:" + result.toString() + " " + res.toString()); writeLog("RES var=1 ADD:" + result.toString() + " " + res.toString());
result.addAll(res); result.addAll(res);
}}} }}}
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
} }
else { else {
writeLog("RESADD:" + result.toString() + " " + res.toString()); //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RES Fst:" + result.toString() + " " + res.toString());
result.addAll(res); result.addAll(res);
} }
} }
@ -1076,12 +1110,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result)) || (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) { || result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RESADD:" + result.toString() + " " + par_res.toString()); writeLog("RES var1 ADD:" + result.toString() + " " + par_res.toString());
result.addAll(par_res); result.addAll(par_res);
} }
} }
} }
break; //break;
} }
/* nextSetasList = nextSetasListRest; */ /* nextSetasList = nextSetasListRest; */
/* wird bereits vor den unify2-Aufruf durchgefuehrt und nextSetasListRest zugeordnet /* wird bereits vor den unify2-Aufruf durchgefuehrt und nextSetasListRest zugeordnet
@ -1117,12 +1151,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result)) || (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) { || result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RESADD:" + result.toString() + " " + par_res.toString()); writeLog("RES var-1 ADD:" + result.toString() + " " + par_res.toString());
result.addAll(par_res); result.addAll(par_res);
} }
} }
} }
break; //break;
} }
System.out.println(""); System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString()); writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
@ -1157,12 +1191,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result)) || (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) { || result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RESADD:" + result.toString() + " " + par_res.toString()); writeLog("RES var2 ADD:" + result.toString() + " " + par_res.toString());
result.addAll(par_res); result.addAll(par_res);
} }
} }
} }
break; //break;
} }
}}} }}}
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString()); writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
@ -2069,6 +2103,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
System.err.println("kein LogFile"); System.err.println("kein LogFile");
} }
} }
} }
} }
} }

View File

@ -1,6 +1,6 @@
import java.util.Vector; import java.util.Vector;
import java.lang.Integer; import java.lang.Integer;
//import java.lang.Float; import java.lang.Float;
//import java.lang.Byte; //import java.lang.Byte;
//import java.lang.Boolean; //import java.lang.Boolean;