modified: ../../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java

einige Aenderungen, die parallele Verarbeitung ermoeglicht
This commit is contained in:
Martin Plümicke 2019-01-20 21:49:22 +01:00
parent cf951043ef
commit 188ea1f7f8
2 changed files with 101 additions and 67 deletions

View File

@ -60,6 +60,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* Fuer die Threads
*/
private static int noOfThread = 0;
private static int totalnoOfThread = 0;
private int thNo;
protected boolean one = false;
Integer MaxNoOfThreads = 4;
@ -123,29 +124,33 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe) {
this.eq = eq;
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
synchronized (this) {
this.eq = eq;
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
Set<Set<UnifyPair>> ret = new HashSet<>();
for (Constraint<UnifyPair> y : x) {
ret.add(new HashSet<>(y));
}
return ret;
}).collect(Collectors.toCollection(ArrayList::new));
*/
}).collect(Collectors.toCollection(ArrayList::new));
*/
//x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.fc = fc;
this.oup = new OrderingUnifyPair(fc);
this.parallel = parallel;
this.logFile = logFile;
this.log = log;
rules = new RuleSet(logFile);
this.rekTiefeField = rekTiefe;
synchronized (this) { noOfThread++; }
writeLog("thNo1 " + thNo);
thNo = noOfThread;
writeLog("thNo2 " + thNo);
//x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.fc = fc;
this.oup = new OrderingUnifyPair(fc);
this.parallel = parallel;
this.logFile = logFile;
this.log = log;
rules = new RuleSet(logFile);
this.rekTiefeField = rekTiefe;
noOfThread++;
totalnoOfThread++;
writeLog("thNo1 " + thNo);
thNo = totalnoOfThread;
writeLog("thNo2 " + thNo);
}
}
/**
@ -703,7 +708,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/*
* Step 6 b) Build the union over everything.
*/
/*
* PL 2019-01-20: muss uebrprueft werden
*/
if(parallel)
for(TypeUnifyTask fork : forks)
eqPrimePrimeSet.addAll(fork.join());
@ -714,11 +721,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) {
writeLog("Result1 " + eqPrimePrimeSet.toString());
Iterator<UnifyPair> upit = eqPrimePrimeSet.iterator().next().iterator();
if (upit.next().getLhsType() instanceof WildcardType
|| upit.next().getLhsType() instanceof WildcardType) {
System.out.println("");
}
}
return eqPrimePrimeSet;
}
@ -793,36 +795,40 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<UnifyPair> a = null;
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
Set<UnifyPair> a_last = a;
List<Set<UnifyPair>> nextSetasListRestMax = new ArrayList<>();
List<Set<UnifyPair>> nextSetasListRestMin = new ArrayList<>();
List<Set<UnifyPair>> nextSetasListRestOder = new ArrayList<>();
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
//List<Set<UnifyPair>> nextSetasListRestMin = new ArrayList<>();
//List<Set<UnifyPair>> nextSetasListRestOder = new ArrayList<>();
if (variance == 1) {
a = oup.max(nextSetasList.iterator());
nextSetasList.remove(a);
nextSetasListRestMax = new ArrayList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRestMax).iterator();
nextSetasListRest = new ArrayList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next();
if (//a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) {
nextSetasListRestMax.remove(a_next);
nextSetasListRest.remove(a_next);
}
}
}
else if (variance == -1) {
a = oup.min(nextSetasList.iterator());
nextSetasList.remove(a);
nextSetasListRestMin = new ArrayList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRestMin).iterator();
nextSetasListRest = new ArrayList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next();
if (//a.equals(a_next) ||
(oup.compare(a, a_next) == -1)) {
nextSetasListRestMin.remove(a_next);
nextSetasListRest.remove(a_next);
}
}
}
else if (variance == 0 || variance == 2) {
else if (variance == 2) {
a = nextSetasList.remove(0);
nextSetasListRest = new ArrayList<>(nextSetasList);
}
else if (variance == 0) {
a = nextSetasList.remove(0);
}
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
@ -852,7 +858,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
writeLog("a1: " + rekTiefe + " "+ a.toString()+ "\n");
elems.add(a);
//elems.add(a); PL 2019-01-20 Muss weg, weil das in jeweiligen Thread erfolgen muss. Fuer den sequentiellen Fall
//im else-Zweig
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
//writeLog("Vor unify2 Aufruf: " + elems.toString());
Set<Set<UnifyPair>> res = new HashSet<>();
@ -877,9 +884,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe);
forks.add(forkOrig);
forkOrig.fork();
while (!nextSetasListRestMax.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRestMax.remove(0);
synchronized (this) {
writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
synchronized (this) { nextSetasList.remove(nSaL);
writeLog("1 RM" + nSaL.toString());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
@ -891,9 +904,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//res = unify2(elems, eq, fc, parallel);
res = forkOrig.join();
for(TypeUnifyTask fork : forks) {
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (this) { noOfThread--; };
add_res.add(fork_res);
synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join();
writeLog("Join " + new Integer(fork.thNo).toString());
noOfThread--;
add_res.add(fork_res);
};
}
} else {
if(parallel && (variance == -1) && noOfThread <= MaxNoOfThreads) {
@ -905,9 +921,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe);
forks.add(forkOrig);
forkOrig.fork();
while (!nextSetasListRestMin.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRestMin.remove(0);
synchronized (this) {
writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
synchronized (this) { nextSetasList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
@ -919,12 +941,17 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//res = unify2(elems, eq, fc, parallel);
res = forkOrig.join();
for(TypeUnifyTask fork : forks) {
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (this) { noOfThread--; };
add_res.add(fork_res);
synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join();
writeLog("Join " + new Integer(fork.thNo).toString());
noOfThread--;
add_res.add(fork_res);
};
}
} else {
if(parallel && (variance == 2) && noOfThread <= MaxNoOfThreads) {
if(parallel && (variance == 2) //&& noOfThread <= MaxNoOfThreads
) {
writeLog("var2einstieg");
Set<TypeUnifyTask> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
@ -933,9 +960,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe);
forks.add(forkOrig);
forkOrig.fork();
while (!nextSetasListRestOder.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRestOder.remove(0);
synchronized (this) {
writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
//nextSetasList.remove(nSaL);
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
@ -947,13 +978,16 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//res = unify2(elems, eq, fc, parallel);
res = forkOrig.join();
for(TypeUnifyTask fork : forks) {
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (this) { noOfThread--; };
add_res.add(fork_res);
synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join();
writeLog("Join " + new Integer(fork.thNo).toString());
noOfThread--;
add_res.add(fork_res);
};
}}
else {
parallel = false; //Wenn MaxNoOfThreads erreicht ist, sequentiell weiterarbeiten
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 833
//parallel = false; //Wenn MaxNoOfThreads erreicht ist, sequentiell weiterarbeiten
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe);
}}}
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
@ -1033,13 +1067,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//result = result;
}}}}
else { if (variance == 0) {
writeLog("RESADD:" + result.toString() + " " + res.toString());
writeLog("RES var=1 ADD:" + result.toString() + " " + res.toString());
result.addAll(res);
}}}
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
}
else {
writeLog("RESADD:" + result.toString() + " " + res.toString());
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RES Fst:" + result.toString() + " " + res.toString());
result.addAll(res);
}
}
@ -1076,12 +1110,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RESADD:" + result.toString() + " " + par_res.toString());
writeLog("RES var1 ADD:" + result.toString() + " " + par_res.toString());
result.addAll(par_res);
}
}
}
break;
//break;
}
/* nextSetasList = nextSetasListRest; */
/* wird bereits vor den unify2-Aufruf durchgefuehrt und nextSetasListRest zugeordnet
@ -1117,12 +1151,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RESADD:" + result.toString() + " " + par_res.toString());
writeLog("RES var-1 ADD:" + result.toString() + " " + par_res.toString());
result.addAll(par_res);
}
}
}
break;
//break;
}
System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
@ -1157,12 +1191,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RESADD:" + result.toString() + " " + par_res.toString());
writeLog("RES var2 ADD:" + result.toString() + " " + par_res.toString());
result.addAll(par_res);
}
}
}
break;
//break;
}
}}}
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
@ -2069,6 +2103,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
System.err.println("kein LogFile");
}
}
}
}
}
}

View File

@ -1,6 +1,6 @@
import java.util.Vector;
import java.lang.Integer;
//import java.lang.Float;
import java.lang.Float;
//import java.lang.Byte;
//import java.lang.Boolean;