Merge branch 'unify-test' of ssh://gohorb.ba-horb.de/bahome/projekt/git/JavaCompilerCore into unify-PartialOrderSet

Conflicts:
	src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java

	modified:   ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java
	modified:   ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
	modified:   ../../resources/bytecode/javFiles/Matrix.jav
	modified:   ../../resources/bytecode/javFiles/MatrixOP.jav
This commit is contained in:
Martin Plümicke 2019-02-11 14:41:28 +01:00
commit 0c03c2fbc8
4 changed files with 76 additions and 43 deletions

View File

@ -16,9 +16,14 @@ public class TypeUnify2Task extends TypeUnifyTask {
Set<Set<UnifyPair>> setToFlatten;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<Pair> cons) {
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<Pair> cons) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
}
Set<UnifyPair> getNextSetElement() {
return nextSetElement;
}
@Override

View File

@ -59,6 +59,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
private boolean printtag = false;
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll?
/**
* Element, das aus dem nextSet den Gleichunen dieses Threads hinzugefuegt wurde
*/
Set<UnifyPair> nextSetElement;
/**
* Fuer die Threads
*/
@ -142,6 +147,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
//x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.nextSetElement = nextSetElement;
this.fc = fc;
this.oup = new OrderingUnifyPair(fc);
this.parallel = parallel;
@ -686,15 +692,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
//eqPrimePrime Veraenderungen in subst repraesentieren.
try {
//try {
if (isSolvedForm(eqPrime)) {
logFile.write("eqPrime:" + eqPrime.toString()+"\n");
logFile.flush();
writeLog("eqPrime:" + eqPrime.toString()+"\n");
}
}
catch (IOException e) {
//}
//catch (IOException e) {
System.err.println("log-File nicht vorhanden");
}
//}
eqPrimePrimeSet.add(eqPrime);
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
@ -920,15 +925,16 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//writeLog("Vor unify2 Aufruf: " + elems.toString());
Set<Set<UnifyPair>> res = new HashSet<>();
Set<Set<Set<UnifyPair>>> add_res = new HashSet<>();
Set<Set<UnifyPair>> aParDef = new HashSet<>();
if(parallel && (variance == 1) && noOfThread <= MaxNoOfThreads) {
Set<TypeUnifyTask> forks = new HashSet<>();
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe, urm, cons);
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, cons);
//forks.add(forkOrig);
forkOrig.fork();
/* FORK ENDE */
@ -946,7 +952,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons);
forks.add(fork);
fork.fork();
}
@ -962,24 +968,29 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
};
/* FORK ENDE */
for(TypeUnifyTask fork : forks) {
for(TypeUnify2Task fork : forks) {
synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join();
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
writeLog("fork_res: " + fork_res.toString());
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
add_res.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
};
}
} else {
if(parallel && (variance == -1) && noOfThread <= MaxNoOfThreads) {
Set<TypeUnifyTask> forks = new HashSet<>();
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe, urm, cons);
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, cons);
//forks.add(forkOrig);
forkOrig.fork();
/* FORK ENDE */
@ -997,7 +1008,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons);
forks.add(fork);
fork.fork();
}
@ -1013,12 +1024,17 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
};
/* FORK ENDE */
for(TypeUnifyTask fork : forks) {
for(TypeUnify2Task fork : forks) {
synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join();
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
writeLog("fork_res: " + fork_res.toString());
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
add_res.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
};
}
} else {
@ -1031,7 +1047,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe, urm, cons);
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, cons);
//forks.add(forkOrig);
forkOrig.fork();
/* FORK ENDE */
@ -1047,7 +1063,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, cons);
forks.add(fork);
fork.fork();
}
@ -1198,7 +1214,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
if (!result.isEmpty() && !isUndefinedPairSetSet(res)) {
if (!result.isEmpty() && (!isUndefinedPairSetSet(res) || !aParDef.isEmpty())) {
if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print("");
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetasList).iterator();
@ -1232,10 +1248,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
while(aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
while (nextSetasListIt.hasNext()) {
Set<UnifyPair> a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) {
if (a_new.equals(a_next) ||
(oup.compare(a_new, a_next) == 1)) {
writeLog("Removed: " + a_next.toString());
nextSetasList.remove(a_next);
}
@ -1245,6 +1266,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
}
}
else { if (variance == -1) {
/* vorgezogen vor das if
if (parallel) {
@ -1274,10 +1296,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
while(aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
while (nextSetasListIt.hasNext()) {
Set<UnifyPair> a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == -1)) {
if (a_new.equals(a_next) ||
(oup.compare(a_new, a_next) == -1)) {
writeLog("Removed: " + a_next.toString());
nextSetasList.remove(a_next); //PL geaendert 2019-01-09
}
@ -1287,6 +1314,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
}
}
else { if (variance == 0) {
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
break; }
@ -1320,7 +1348,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
if (isUndefinedPairSetSet(res)) {
if (isUndefinedPairSetSet(res) && aParDef.isEmpty()) {
int nofstred= 0;
Set<UnifyPair> abhSubst = res.stream()
.map(b ->

View File

@ -1,6 +1,6 @@
import java.util.Vector;
import java.lang.Integer;
import java.lang.Float;
//import java.lang.Float;
//import java.lang.Byte;
//import java.lang.Boolean;

View File

@ -1,6 +1,6 @@
import java.util.Vector;
import java.lang.Integer;
//import java.lang.Byte;
import java.lang.Byte;
import java.lang.Boolean;
public class MatrixOP extends Vector<Vector<Integer>> {