modified: src/de/dhbwstuttgart/core/JavaTXCompiler.java
modified: src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java Zeilen 606 - 623 eingefuegt, um die parallel berechneten Ergebnisse richtig zu filtern TODO - Fuer Variance -1 muss das gleiche gemacht werden - ab Zeile 550: nextSetasListRest muss nach jedem fork aufruf gefiltert werden.
This commit is contained in:
parent
3e4e66d60d
commit
2c9f085cca
@ -39,7 +39,7 @@ import java.util.stream.Collectors;
|
|||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
public class JavaTXCompiler {
|
public class JavaTXCompiler {
|
||||||
final boolean parallel = false;
|
final boolean parallel = true;
|
||||||
final CompilationEnvironment environment;
|
final CompilationEnvironment environment;
|
||||||
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
|
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
|
||||||
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll?
|
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll?
|
||||||
|
@ -526,7 +526,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
|
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
|
||||||
//writeLog("Vor unify2 Aufruf: " + eq.toString());
|
//writeLog("Vor unify2 Aufruf: " + eq.toString());
|
||||||
Set<Set<UnifyPair>> res = new HashSet<>();
|
Set<Set<UnifyPair>> res = new HashSet<>();
|
||||||
Set<Set<UnifyPair>> add_res = new HashSet<>();
|
Set<Set<Set<UnifyPair>>> add_res = new HashSet<>();
|
||||||
if(parallel && (variance == 1)) {
|
if(parallel && (variance == 1)) {
|
||||||
/*
|
/*
|
||||||
elems.add(a);
|
elems.add(a);
|
||||||
@ -560,7 +560,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
res = forkOrig.join();
|
res = forkOrig.join();
|
||||||
for(TypeUnifyTask fork : forks) {
|
for(TypeUnifyTask fork : forks) {
|
||||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||||
add_res.addAll(fork_res);
|
add_res.add(fork_res);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -603,7 +603,24 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetasList).iterator();
|
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetasList).iterator();
|
||||||
if (variance == 1) {
|
if (variance == 1) {
|
||||||
if (parallel) {
|
if (parallel) {
|
||||||
result.addAll(add_res);
|
for (Set<Set<UnifyPair>> par_res : add_res) {
|
||||||
|
if (!isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) {
|
||||||
|
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||||
|
result = par_res;
|
||||||
|
if (par_res.iterator().next() instanceof WildcardType) {
|
||||||
|
System.out.println("");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result))
|
||||||
|
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|
||||||
|
|| result.isEmpty()) {
|
||||||
|
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||||
|
writeLog("RESADD:" + result.toString() + " " + par_res.toString());
|
||||||
|
result.addAll(par_res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
/* nextSetasList = nextSetasListRest; */
|
/* nextSetasList = nextSetasListRest; */
|
||||||
|
Loading…
Reference in New Issue
Block a user