modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java

modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/model/FiniteClosure.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/model/OrderingUnifyPair.java
Threads soweit fertig
This commit is contained in:
Martin Plümicke 2019-01-29 14:51:19 +01:00
parent 26634bb038
commit 9e1d58f4b1
4 changed files with 54 additions and 38 deletions

View File

@ -29,6 +29,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
return new HashSet<>(); }
else
*/
noOfThread--;
return res;
}
}

View File

@ -59,11 +59,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/**
* Fuer die Threads
*/
private static int noOfThread = 0;
protected static int noOfThread = 0;
private static int totalnoOfThread = 0;
int thNo;
protected boolean one = false;
Integer MaxNoOfThreads = 4;
Integer MaxNoOfThreads = 8;
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
FileWriter logFile;
@ -200,6 +200,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField);
noOfThread--;
if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
else return res;
}
@ -891,7 +892,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//for(Set<UnifyPair> a : newSet) {
i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
writeLog("a1: " + rekTiefe + " "+ a.toString()+ "\n");
writeLog("a1: " + rekTiefe + " "+ "variance: "+ variance + " " + a.toString()+ "\n");
//elems.add(a); PL 2019-01-20 Muss weg, weil das in jeweiligen Thread erfolgen muss. Fuer den sequentiellen Fall
//im else-Zweig
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
@ -899,28 +900,17 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> res = new HashSet<>();
Set<Set<Set<UnifyPair>>> add_res = new HashSet<>();
if(parallel && (variance == 1) && noOfThread <= MaxNoOfThreads) {
/*
elems.add(a);
TypeUnify2Task fork = new TypeUnify2Task(elems, eq, fc, parallel, logFile, log);
fork.fork();
res = fork.join();
*/
Set<TypeUnifyTask> forks = new HashSet<>();
//TypeUnify2Task fork1 = new TypeUnify2Task(elems, eq, fc, parallel, logFile, log);
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe);
//forks.add(forkOrig);
forkOrig.fork();
FORK ENDE */
/* FORK ENDE */
synchronized (this) {
writeLog("a in " + variance + " "+ a);
@ -939,23 +929,23 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
forks.add(fork);
fork.fork();
}
res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG
/* FORK ANFANG */
synchronized (this) {
res = forkOrig.join();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
noOfThread--;
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res);
};
FORK ENDE */
/* FORK ENDE */
for(TypeUnifyTask fork : forks) {
synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join();
writeLog("Join " + new Integer(fork.thNo).toString());
noOfThread--;
//noOfThread--; an das Ende von compute verschoben
add_res.add(fork_res);
};
}
@ -967,11 +957,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
List<Set<Set<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe);
//forks.add(forkOrig);
forkOrig.fork();
FORK ENDE */
/* FORK ENDE */
synchronized (this) {
writeLog("a in " + variance + " "+ a);
@ -990,23 +980,23 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
forks.add(fork);
fork.fork();
}
res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG
/* FORK ANFANG */
synchronized (this) {
res = forkOrig.join();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
noOfThread--;
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res);
};
FORK ENDE */
/* FORK ENDE */
for(TypeUnifyTask fork : forks) {
synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join();
writeLog("Join " + new Integer(fork.thNo).toString());
noOfThread--;
//noOfThread--; an das Ende von compute verschoben
add_res.add(fork_res);
};
}
@ -1019,11 +1009,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
List<Set<Set<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, logFile, log, rekTiefe);
//forks.add(forkOrig);
forkOrig.fork();
FORK ENDE */
/* FORK ENDE */
synchronized (this) {
writeLog("a in " + variance + " "+ a);
@ -1040,23 +1030,23 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
forks.add(fork);
fork.fork();
}
res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG
/* FORK ANFANG */
synchronized (this) {
res = forkOrig.join();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
noOfThread--;
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res); //vermutlich falsch
};
FORK ENDE */
/* FORK ENDE */
for(TypeUnifyTask fork : forks) {
synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join();
writeLog("Join " + new Integer(fork.thNo).toString());
noOfThread--;
//noOfThread--; an das Ende von compute verschoben
add_res.add(fork_res);
};
}}

View File

@ -110,6 +110,18 @@ implements IFiniteClosure {
}
}
void testSmaller() {
UnifyType tq1, tq2, tq3;
tq1 = new ExtendsType(PlaceholderType.freshPlaceholder());
List<UnifyType> l1 = new ArrayList<>();
List<UnifyType> l2 = new ArrayList<>();
l1.add(tq1);
tq2 = new ReferenceType("java.util.Vector", new TypeParams(l1));
l2.add(tq2);
tq3 = new ReferenceType("java.util.Vector", new TypeParams(l2));
Set<UnifyType> smaller = smaller(tq3, new HashSet<>());
}
/**
* Returns all types of the finite closure that are subtypes of the argument.
* @return The set of subtypes of the argument.
@ -641,8 +653,7 @@ implements IFiniteClosure {
*/
public int compare (UnifyType left, UnifyType right, PairOperator pairop) {
if ((left instanceof ExtendsType && right instanceof ReferenceType)
|| (right instanceof ExtendsType && left instanceof ReferenceType))
if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
System.out.println("");
/*
List<UnifyType> al = new ArrayList<>();
@ -693,6 +704,13 @@ implements IFiniteClosure {
HashSet<UnifyPair> hs = new HashSet<>();
hs.add(up);
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
{try {
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}}
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
long smallerLen = smallerRes.stream().filter(x -> !(x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)).count();
if (smallerLen == 0) return -1;
@ -702,6 +720,13 @@ implements IFiniteClosure {
hs = new HashSet<>();
hs.add(up);
Set<UnifyPair> greaterRes = unifyTask.applyTypeUnificationRules(hs, this);
if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
{try {
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}}
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
long greaterLen = greaterRes.stream().filter(x -> !(x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)).count();
if (greaterLen == 0) return 1;

View File

@ -229,7 +229,7 @@ public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
if (leftlewc.iterator().next().getLhsType() instanceof PlaceholderType) {
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner);
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getLhsType()) == null));
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
}
//4. Fall
else {