modified: src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java

modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
Erster Ansatz Parallelisierung
This commit is contained in:
Martin Plümicke 2019-01-16 17:39:01 +01:00
parent 8c517e7f4a
commit aae2e5244b
3 changed files with 140 additions and 14 deletions

View File

@ -388,7 +388,15 @@ public class JavaTXCompiler {
//Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
//Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, logFile, log);
List<Set<Set<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints().stream().map(x -> {
Set<Set<UnifyPair>> ret = new HashSet<>();
for (Constraint<UnifyPair> y : x) {
ret.add(new HashSet<>(y));
}
return ret;
}).collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log);
//Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log);
System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString()+"\n");
logFile.flush();

View File

@ -10,22 +10,24 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify {
public Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc, FileWriter logFile, Boolean log) {
TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, true, logFile, log);
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
return res;
}
/*
public Set<Set<UnifyPair>> unifySequential(Set<UnifyPair> eq, IFiniteClosure fc, FileWriter logFile, Boolean log) {
TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, false, logFile, log);
Set<Set<UnifyPair>> res = unifyTask.compute();
return res;
}
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log);
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0);
Set<Set<UnifyPair>> res = unifyTask.compute();
return res;
}

View File

@ -34,6 +34,7 @@ import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.Pair;
@ -55,6 +56,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
private boolean printtag = false;
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll?
/*
* Fuer die Threads
*/
private static int noOfThread = 0;
private int thNo;
protected boolean one = false;
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
FileWriter logFile;
@ -83,6 +91,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
protected boolean parallel;
int rekTiefeField;
Integer nOfUnify = 0;
Integer noUndefPair = 0;
@ -97,6 +107,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
rules = new RuleSet();
}
/*
public TypeUnifyTask(Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) {
this.eq = eq;
this.fc = fc;
@ -105,18 +116,22 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
this.logFile = logFile;
this.log = log;
rules = new RuleSet(logFile);
noOfThread++;
thNo = noOfThread;
}
*/
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) {
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe) {
this.eq = eq;
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.oderConstraintsField = oderConstraints.stream().map(x -> {
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
Set<Set<UnifyPair>> ret = new HashSet<>();
for (Constraint<UnifyPair> y : x) {
ret.add(new HashSet<>(y));
}
return ret;
}).collect(Collectors.toCollection(ArrayList::new));
*/
//x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.fc = fc;
@ -125,6 +140,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
this.logFile = logFile;
this.log = log;
rules = new RuleSet(logFile);
this.rekTiefeField = rekTiefe;
noOfThread++;
thNo = noOfThread;
}
/**
@ -161,6 +179,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
protected Set<Set<UnifyPair>> compute() {
if (one) {
System.out.println("two");
}
one = true;
Set<UnifyPair> neweq = new HashSet<>(eq);
/* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */
oderConstraintsField.stream()
@ -169,7 +191,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
ArrayList<Set<Set<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, 0);
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField);
if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
else return res;
}
@ -439,7 +461,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* @param fc The finite closure
* @return The set of all principal type unifiers
*/
protected Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
// ).collect(Collectors.toCollection(HashSet::new));
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
@ -613,10 +635,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
//writeLog("vor Subst: " + eqPrime);
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime);
Set<Set<UnifyPair>> unifyres1 = null;
Set<Set<UnifyPair>> unifyres2 = null;
//writeLog("nach Subst: " + eqPrimePrime);
/*
* Step 6 a) Restart (fork) for pairs where subst was applied
*/
/*
if(parallel) {
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()
&& oderConstraints.isEmpty()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
@ -637,7 +662,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
fork.fork();
}
}
else { // sequentiell (Step 6b is included)
else */
{// sequentiell (Step 6b is included)
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()
&& oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
@ -656,12 +682,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
eqPrimePrimeSet.add(eqPrime);
}
else if(eqPrimePrime.isPresent()) {
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), oderConstraints, fc, false, rekTiefe);
Set<Set<UnifyPair>> unifyres = unifyres1 = unify(eqPrimePrime.get(), oderConstraints, fc, parallel, rekTiefe);
eqPrimePrimeSet.addAll(unifyres);
}
else {
Set<Set<UnifyPair>> unifyres = unify(eqPrime, oderConstraints, fc, false, rekTiefe);
Set<Set<UnifyPair>> unifyres = unifyres2 = unify(eqPrime, oderConstraints, fc, parallel, rekTiefe);
eqPrimePrimeSet.addAll(unifyres);
@ -683,8 +709,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* Step 7: Filter empty sets;
*/
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet))
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) {
writeLog("Result1 " + eqPrimePrimeSet.toString());
Iterator<UnifyPair> upit = eqPrimePrimeSet.iterator().next().iterator();
if (upit.next().getLhsType() instanceof WildcardType
|| upit.next().getLhsType() instanceof WildcardType) {
System.out.println("");
}
}
return eqPrimePrimeSet;
}
@ -758,9 +790,19 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<UnifyPair> a = null;
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
Set<UnifyPair> a_last = a;
List<Set<UnifyPair>> nextSetasListRest= new ArrayList<>();
if (variance == 1) {
a = oup.max(nextSetasList.iterator());
nextSetasList.remove(a);
nextSetasListRest = new ArrayList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next();
if (//a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) {
nextSetasListRest.remove(a_next);
}
}
}
else if (variance == -1) {
a = oup.min(nextSetasList.iterator());
@ -799,10 +841,56 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
elems.add(a);
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
//writeLog("Vor unify2 Aufruf: " + elems.toString());
Set<Set<UnifyPair>> res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe);
Set<Set<UnifyPair>> res = new HashSet<>();
Set<Set<Set<UnifyPair>>> add_res = new HashSet<>();
if(parallel && (variance == 1)) {
/*
elems.add(a);
TypeUnify2Task fork = new TypeUnify2Task(elems, eq, fc, parallel, logFile, log);
fork.fork();
res = fork.join();
*/
Set<TypeUnifyTask> forks = new HashSet<>();
//TypeUnify2Task fork1 = new TypeUnify2Task(elems, eq, fc, parallel, logFile, log);
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe);
forks.add(forkOrig);
forkOrig.fork();
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, fc, parallel, logFile, log, rekTiefe);
forks.add(fork);
fork.fork();
}
//res = unify2(elems, eq, fc, parallel);
res = forkOrig.join();
for(TypeUnifyTask fork : forks) {
Set<Set<UnifyPair>> fork_res = fork.join();
add_res.add(fork_res);
}
}
else {
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 833
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe);
}
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = res;
if (res.iterator().next() instanceof WildcardType) {
System.out.println("");
}
}
else {
if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result))
@ -874,11 +962,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//result = result;
}}}}
else { if (variance == 0) {
writeLog("RESADD:" + result.toString() + " " + res.toString());
result.addAll(res);
}}}
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
}
else {
writeLog("RESADD:" + result.toString() + " " + res.toString());
result.addAll(res);
}
}
@ -901,6 +991,31 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
System.out.print("");
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetasList).iterator();
if (variance == 1) {
if (parallel) {
for (Set<Set<UnifyPair>> par_res : add_res) {
if (!isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = par_res;
if (par_res.iterator().next() instanceof WildcardType) {
System.out.println("");
}
}
else {
if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result))
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RESADD:" + result.toString() + " " + par_res.toString());
result.addAll(par_res);
}
}
}
break;
}
/* nextSetasList = nextSetasListRest; */
/* wird bereits vor den unify2-Aufruf durchgefuehrt und nextSetasListRest zugeordnet
*/
System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
while (nextSetasListIt.hasNext()) {
@ -1828,6 +1943,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
void writeLog(String str) {
if (log) {
try {
logFile.write("Thread no.:" + thNo + "\n");
logFile.write(str+"\n\n");
logFile.flush();