forked from JavaTX/JavaCompilerCore
modified: ../../src/de/dhbwstuttgart/core/JavaTXCompiler.java
modified: ../../src/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java modified: ../../src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java Parallelverarbeitungansatz implementiert Wirft noch ConcurrentModificationException Das Ergebnis der Parallelverarbeit fork.join muss in Ziel 530 muss noch geklaert werden
This commit is contained in:
parent
496842a3bf
commit
9d37e8f637
@ -175,8 +175,8 @@ public class JavaTXCompiler {
|
|||||||
return y; } )
|
return y; } )
|
||||||
.collect(Collectors.toCollection(HashSet::new));
|
.collect(Collectors.toCollection(HashSet::new));
|
||||||
varianceInheritance(xConsSet);
|
varianceInheritance(xConsSet);
|
||||||
Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
|
//Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
|
||||||
//Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure, logFile, log);
|
Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure, logFile, log);
|
||||||
System.out.println("RESULT: " + result);
|
System.out.println("RESULT: " + result);
|
||||||
logFile.write("RES: " + result.toString()+"\n");
|
logFile.write("RES: " + result.toString()+"\n");
|
||||||
logFile.flush();
|
logFile.flush();
|
||||||
|
@ -19,7 +19,10 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
|||||||
@Override
|
@Override
|
||||||
protected Set<Set<UnifyPair>> compute() {
|
protected Set<Set<UnifyPair>> compute() {
|
||||||
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, fc, parallel);
|
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, fc, parallel);
|
||||||
if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
|
/*if (isUndefinedPairSetSet(res)) {
|
||||||
else return res;
|
return new HashSet<>(); }
|
||||||
|
else
|
||||||
|
*/
|
||||||
|
return res;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -321,6 +321,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
/*
|
/*
|
||||||
* Step 6 a) Restart (fork) for pairs where subst was applied
|
* Step 6 a) Restart (fork) for pairs where subst was applied
|
||||||
*/
|
*/
|
||||||
|
/*
|
||||||
if(parallel) {
|
if(parallel) {
|
||||||
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
||||||
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
||||||
@ -340,7 +341,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
fork.fork();
|
fork.fork();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else { // sequentiell (Step 6b is included)
|
else */
|
||||||
|
{ // sequentiell (Step 6b is included)
|
||||||
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
|
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
|
||||||
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
||||||
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
||||||
@ -356,12 +358,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
eqPrimePrimeSet.add(eqPrime);
|
eqPrimePrimeSet.add(eqPrime);
|
||||||
}
|
}
|
||||||
else if(eqPrimePrime.isPresent()) {
|
else if(eqPrimePrime.isPresent()) {
|
||||||
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), fc, false);
|
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), fc, parallel);
|
||||||
|
|
||||||
eqPrimePrimeSet.addAll(unifyres);
|
eqPrimePrimeSet.addAll(unifyres);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
Set<Set<UnifyPair>> unifyres = unify(eqPrime, fc, false);
|
Set<Set<UnifyPair>> unifyres = unify(eqPrime, fc, parallel);
|
||||||
|
|
||||||
|
|
||||||
eqPrimePrimeSet.addAll(unifyres);
|
eqPrimePrimeSet.addAll(unifyres);
|
||||||
@ -374,10 +376,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
/*
|
/*
|
||||||
* Step 6 b) Build the union over everything.
|
* Step 6 b) Build the union over everything.
|
||||||
*/
|
*/
|
||||||
|
/*
|
||||||
if(parallel)
|
if(parallel)
|
||||||
for(TypeUnifyTask fork : forks)
|
for(TypeUnifyTask fork : forks)
|
||||||
eqPrimePrimeSet.addAll(fork.join());
|
eqPrimePrimeSet.addAll(fork.join());
|
||||||
|
*/
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Step 7: Filter empty sets;
|
* Step 7: Filter empty sets;
|
||||||
@ -508,18 +511,29 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
|
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
|
||||||
//writeLog("Vor unify2 Aufruf: " + eq.toString());
|
//writeLog("Vor unify2 Aufruf: " + eq.toString());
|
||||||
Set<Set<UnifyPair>> res = new HashSet<>();
|
Set<Set<UnifyPair>> res = new HashSet<>();
|
||||||
if(parallel) {
|
if(parallel && (variance == 1)) {
|
||||||
|
/*
|
||||||
|
elems.add(a);
|
||||||
|
TypeUnify2Task fork = new TypeUnify2Task(elems, eq, fc, parallel, logFile, log);
|
||||||
|
fork.fork();
|
||||||
|
res = fork.join();
|
||||||
|
*/
|
||||||
|
|
||||||
Set<TypeUnifyTask> forks = new HashSet<>();
|
Set<TypeUnifyTask> forks = new HashSet<>();
|
||||||
for(Set<UnifyPair> nSaL : nextSetasListRest) {
|
for(Set<UnifyPair> nSaL : nextSetasListRest) {
|
||||||
TypeUnify2Task fork = new TypeUnify2Task(elems, eq, fc, true, logFile, log);
|
elems.add(nSaL);
|
||||||
|
TypeUnify2Task fork = new TypeUnify2Task(elems, eq, fc, parallel, logFile, log);
|
||||||
forks.add(fork);
|
forks.add(fork);
|
||||||
fork.fork();
|
fork.fork();
|
||||||
}
|
}
|
||||||
for(TypeUnifyTask fork : forks) {
|
for(TypeUnifyTask fork : forks) {
|
||||||
res.addAll(fork.join());
|
res.addAll(fork.join());
|
||||||
|
System.out.println("");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
elems.add(a);
|
||||||
res = unify2(elems, eq, fc, parallel);
|
res = unify2(elems, eq, fc, parallel);
|
||||||
}
|
}
|
||||||
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
|
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
|
||||||
|
Loading…
Reference in New Issue
Block a user