1
0

Compare commits

...

23 Commits

Author SHA1 Message Date
NoName11234
7818722c96 fixed missing oderConstraints 2024-05-23 10:50:44 +02:00
NoName11234
4dec73cada implemented scalark3 test 2024-05-22 20:48:26 +02:00
NoName11234
7d3ef78ccd small changes for compatibility of matrixk2 test 2024-05-17 14:56:04 +02:00
NoName11234
d8f744a774 added matrixk2 test 2024-05-17 14:47:30 +02:00
NoName11234
48293b3243 reworked test for changes to multithreading 2024-05-15 15:50:34 +02:00
NoName11234
bab3511259 implemented scalark2test 2024-05-15 15:49:18 +02:00
NoName11234
46eb7b12e6 added matrix test 2024-05-10 20:25:11 +02:00
NoName11234
80c3e5d02c fixed issues from merging 2024-05-10 19:15:22 +02:00
NoName11234
cb9798dd5a Merge branch 'performanceTestRecursionOptimization' into newMain 2024-05-10 18:55:58 +02:00
NoName11234
0cdbc7a19a Merge branch 'performanceTestUnifyResultModel' into newMain 2024-05-10 18:46:13 +02:00
NoName11234
3bb7f38805 implemented ConstraintSetRepository 2024-05-01 20:05:28 +02:00
NoName11234
6a92b0f194 added class ConstraintSetRepository 2024-05-01 19:42:24 +02:00
NoName11234
7e5e63dc5f removed class UnifyResultModel 2024-04-29 22:18:25 +02:00
NoName11234
69402677f7 implemented UnifyTaskModelParallel 2024-04-29 22:14:24 +02:00
NoName11234
2ffa86e821 added class UnifyTaskModelParallel 2024-04-29 21:43:30 +02:00
NoName11234
f75d646a73 implemented UnifyResultModelParallel 2024-04-29 19:53:25 +02:00
NoName11234
ce2401f3eb added class UnifyResultModelParallel 2024-04-29 16:53:23 +02:00
NoName11234
996ef1a735 Merge branch 'performanceTestPlaceholderGenerator' into performanceTestBase 2024-04-26 12:51:00 +02:00
NoName11234
2bb2f1a898 Merge branch 'performanceTestThreadCounter' into performanceTestBase 2024-04-26 12:50:44 +02:00
NoName11234
cf8653567c changed arraylist to concurrent set and removed snychronized 2024-04-25 19:17:12 +02:00
NoName11234
72908f6fb4 removed variance from typeunifytask 2024-04-25 19:08:26 +02:00
NoName11234
dd62d8d6b9 removed thread counter variables from typeunifytask 2024-04-25 16:50:14 +02:00
NoName11234
dc803bd6b8 changed parameters of forkjoinpools 2024-04-25 16:49:39 +02:00
13 changed files with 2238 additions and 589 deletions

View File

@ -0,0 +1,21 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.HashSet;
import java.util.Set;
public class ConstraintSetRepository {
private Set<Integer> sets = new HashSet<>();
public boolean containsSet(Set<Set<UnifyPair>> set){
Integer hash = Integer.valueOf(set.hashCode());
if(sets.contains(hash)){
return true;
}else{
sets.add(hash);
return false;
}
}
}

View File

@ -0,0 +1,11 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.Set;
public interface IUnifyResultListener {
void onNewTypeResultFound(Set<Set<UnifyPair>> evt);
}

View File

@ -3,16 +3,12 @@ package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify {
@ -28,9 +24,12 @@ public class TypeUnify {
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel ret, UnifyTaskModelParallel usedTasks) {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
ConstraintSetRepository constraintSetRepository = new ConstraintSetRepository();
usedTasks.setPool(pool);
ret.setPool(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, constraintSetRepository);
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
@ -54,9 +53,12 @@ public class TypeUnify {
* @param ret
* @return
*/
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
public UnifyResultModelParallel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel ret, UnifyTaskModelParallel usedTasks) {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
ConstraintSetRepository constraintSetRepository = new ConstraintSetRepository();
usedTasks.setPool(pool);
ret.setPool(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, constraintSetRepository);
pool.invoke(unifyTask);
return ret;
}
@ -72,10 +74,12 @@ public class TypeUnify {
* @param ret
* @return
*/
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, statistics);
ForkJoinPool pool = new ForkJoinPool();
public UnifyResultModelParallel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel ret, UnifyTaskModelParallel usedTasks) {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
ConstraintSetRepository constraintSetRepository = new ConstraintSetRepository();
usedTasks.setPool(pool);
ret.setPool(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, constraintSetRepository, statistics);
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
@ -108,8 +112,9 @@ public class TypeUnify {
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel ret, UnifyTaskModelParallel usedTasks) {
ConstraintSetRepository constraintSetRepository = new ConstraintSetRepository();
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, constraintSetRepository);
unifyTask.statistics = statistics;
Set<Set<UnifyPair>> res = unifyTask.compute();
try {

View File

@ -15,62 +15,58 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify2Task extends TypeUnifyTask {
Set<Set<UnifyPair>> setToFlatten;
Set<UnifyPair> methodSignatureConstraintUebergabe;
//statistics
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
Set<UnifyPair> nextSetElement,
IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks,
Set<UnifyPair> methodSignatureConstraintUebergabe, Writer statistics) {
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraintUebergabe );
}
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
}
Set<UnifyPair> getNextSetElement() {
return nextSetElement;
}
@Override
protected Set<Set<UnifyPair>> compute() {
if (one) {
System.out.println("two");
}
one = true;
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
Set<Set<UnifyPair>> setToFlatten;
Set<UnifyPair> methodSignatureConstraintUebergabe;
//statistics
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
Set<UnifyPair> nextSetElement,
IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm,
Set<UnifyPair> methodSignatureConstraintUebergabe, ConstraintSetRepository constraintSetRepository, Writer statistics) {
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraintUebergabe, constraintSetRepository);
}
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, Set<UnifyPair> methodSignatureConstraintUebergabe, ConstraintSetRepository constraintSetRepository) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, constraintSetRepository);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
}
Set<UnifyPair> getNextSetElement() {
return nextSetElement;
}
@Override
protected Set<Set<UnifyPair>> compute() {
if (one) {
System.out.println("two");
}
one = true;
Set<Set<UnifyPair>> res = new HashSet<>();
if (!constraintSetRepository.containsSet(setToFlatten)) {
res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
}
/*if (isUndefinedPairSetSet(res)) {
return new HashSet<>(); }
else
*/
//writeLog("xxx");
//noOfThread--;
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
else {
return res;
}
}
}
public void closeLogFile() {
*/
//writeLog("xxx");
//noOfThread--;
return res;
}
try {
logFile.close();
}
catch (IOException ioE) {
System.err.println("no log-File" + thNo);
}
}
public void closeLogFile() {
try {
logFile.close();
} catch (IOException ioE) {
System.err.println("no log-File" + thNo);
}
}
}

View File

@ -73,12 +73,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/**
* Fuer die Threads
*/
UnifyResultModel urm;
protected static int noOfThread = 0;
UnifyResultModelParallel urm;
private static int totalnoOfThread = 0;
int thNo;
protected boolean one = false;
Integer MaxNoOfThreads = 128;
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
Writer logFile;
@ -127,11 +125,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
static Integer noShortendElements = 0;
Boolean myIsCanceled = false;
volatile UnifyTaskModel usedTasks;
static Writer statistics;
protected ConstraintSetRepository constraintSetRepository;
public TypeUnifyTask() {
rules = new RuleSet();
@ -152,11 +147,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
//statistics
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Writer statistics) {
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, ConstraintSetRepository constraintSetRepository, Writer statistics) {
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, constraintSetRepository);
this.statistics = statistics;
}
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, ConstraintSetRepository constraintSetRepository) {
synchronized (this) {
if(statistics==null){
statistics = new NullWriter();
@ -179,8 +174,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
this.parallel = parallel;
this.logFile = logFile;
this.log = log;
noOfThread++;
totalnoOfThread++;
//writeLog("thNo1 " + thNo);
thNo = totalnoOfThread;
@ -210,8 +204,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
rules = new RuleSet(logFile);
this.rekTiefeField = rekTiefe;
this.urm = urm;
this.usedTasks = usedTasks;
this.usedTasks.add(this);
this.constraintSetRepository = constraintSetRepository;
}
}
@ -247,13 +240,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
*/
void myCancel(Boolean b) {
myIsCanceled = true;
}
public boolean myIsCancelled() {
return myIsCanceled;
}
protected Set<Set<UnifyPair>> compute() {
if (one) {
@ -269,7 +255,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, new HashSet<>());
noOfThread--;
try {
logFile.close();
}
@ -283,14 +268,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
throw new DebugException("Unresolved constraints: " + res.toString()); //return new HashSet<>();
}
else {
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
else {
return res;
}
}
return res;
}
}
/*
@ -323,12 +301,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//.collect(Collectors.toCollection(HashSet::new)));
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
rekTiefe++;
nOfUnify++;
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
@ -510,12 +482,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// .collect(Collectors.toCollection(HashSet::new));
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
Set<TypeUnifyTask> forks = new HashSet<>();
@ -689,15 +655,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
System.out.print("");
}
*/
Set<Set<UnifyPair>> result = new HashSet<>();
int variance = 0;
/* Varianzbestimmung Anfang
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
* Varianz = 1 => Argumentvariable
* Varianz = -1 => Rückgabevariable
* Varianz = 0 => unklar
* Varianz = 2 => Operatoren oderConstraints */
Set<Set<UnifyPair>> result = new HashSet<>();
ArrayList<UnifyPair> zeroNextElem = new ArrayList<>(nextSetasList.get(0));
UnifyPair fstBasePair = zeroNextElem.remove(0).getBasePair();
Boolean oderConstraint = false;
@ -715,9 +674,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.filter(d -> d.isPresent())
.map(e -> e.get())
.findAny();
if (xi.isPresent()) {
variance = xi.get();
}
}
else {
oderConstraint = true;
@ -726,29 +682,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
else {
oderConstraint = true;
}
//Varianz-Bestimmung Oder-Constraints
if (oderConstraint) {
if (printtag) System.out.println("nextSetasList " + nextSetasList);
Optional<Integer> optVariance =
nextSetasList.iterator()
.next()
.stream()
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
! (x.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT)
.map(x ->
((PlaceholderType)x.getGroundBasePair().getLhsType()).getVariance())
.reduce((n,m) -> { if ((n == 0) && (m==0)) return 0;
else if (n !=0) return n; //es muss mindestens eine Variance != 0 sein
else return m;
});
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
variance = optVariance.isPresent() ? optVariance.get() : 2;
}
/* Varianzbestimmung Ende */
//writeLog("nextSetasList: " + nextSetasList.toString());
Set<UnifyPair> nextSetElem = nextSetasList.get(0);
@ -815,80 +748,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetasList.toString());
/* staistics Nextvar an Hand Varianzbestimmung auskommentieren Anfang
if (variance == 1) {
a = oup.max(nextSetasList.iterator());
nextSetasList.remove(a);
if (oderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
nextSetasListRest = new ArrayList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next();
if (//a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) {
nextSetasListRest.remove(a_next);
}
}
//Alle maximale Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
nextSetasListRest = oup.maxElements(nextSetasListRest);
}
else if (variance == -1) {
a = oup.min(nextSetasList.iterator());
writeLog("Min: a in " + variance + " "+ a);
if (oderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
nextSetasList.remove(a);
nextSetasListRest = new ArrayList<>(nextSetasList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next();
if (//a.equals(a_next) ||
(oup.compare(a, a_next) == -1)) {
nextSetasListRest.remove(a_next);
}
}
//Alle minimalen Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
nextSetasListRest = oup.minElements(nextSetasListRest);
}
else if (variance == 2) {
a = nextSetasList.remove(0);
//Fuer alle Elemente wird parallele Berechnung angestossen.
nextSetasListRest = new ArrayList<>(nextSetasList);
}
else if (variance == 0) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!oderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = oup.max(nextSetasList.iterator());
}
else {
a = oup.min(nextSetasList.iterator());
}
nextSetasList.remove(a);
}
else {
if (oderConstraint) {
a = oup.max(nextSetasList.iterator());
nextSetasList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)a).getExtendConstraint());
}
else {
a = nextSetasList.remove(0);
}
}
}
Nextvar an Hand Varianzbestimmung auskommentieren Ende */
a = nextSetasList.remove(0); //statisticsList
//writeStatistics(a.toString());
@ -899,7 +759,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(oneElems);
writeLog("a1: " + rekTiefe + " "+ "variance: "+ variance + " " + a.toString()+ "\n");
//Ergebnisvariable für den aktuelle Thread
Set<Set<UnifyPair>> res = new HashSet<>();
@ -924,292 +783,50 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
* gestartet, der parallel weiterarbeitet.
*/
if(parallel && (variance == 1) && noOfThread <= MaxNoOfThreads) {
if(parallel){
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
//forks.add(forkOrig);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
forkOrig.fork();
}
/* FORK ENDE */
synchronized (this) {
writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraint, constraintSetRepository);
//Überprüfen, ob das Set bereits berechnet wurde
forkOrig.fork();
while (!nextSetasList.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasList.remove(0);
synchronized (this) { //nextSetasList.remove(nSaL);
writeLog("1 RM" + nSaL.toString());
}
if (!oderConstraint) {
/* statistics sameEq wird nicht betrachtet ANGFANG
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
nSaL = null;
noShortendElements++;
continue;
}
statistics sameEq wird nicht betrachtet ENDE */
}
else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)nSaL).getExtendConstraint());
Set<UnifyPair> nSaL = nextSetasList.removeFirst();
if (oderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), constraintSetRepository);
forks.add(fork);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
fork.fork();
}
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */
synchronized (this) {
writeLog("wait "+ forkOrig.thNo);
noOfThread--;
res = forkOrig.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
forkOrig.writeLog("final Orig 1");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res);
};
/* FORK ENDE */
res = forkOrig.join();
forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) {
synchronized (this) {
noOfThread--;
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
writeLog("fork_res: " + fork_res.toString());
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
add_res.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final 1");
fork.closeLogFile();
};
}
//noOfThread++;
} else {
if(parallel && (variance == -1) && noOfThread <= MaxNoOfThreads) {
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
//forks.add(forkOrig);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
forkOrig.fork();
}
/* FORK ENDE */
synchronized (this) {
writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasList.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasList.remove(0);
synchronized (this) { //nextSetasList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString());
}
if (!oderConstraint) {
/* statistics sameEq wird nicht betrachtet ANGFANG
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
nSaL = null;
noShortendElements++;
continue;
}
statistics sameEq wird nicht betrachtet ENDE */
}
else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
forks.add(fork);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
for (TypeUnify2Task fork : forks) {
Set<Set<UnifyPair>> fork_res = new HashSet<>();
fork_res = fork.join();
add_res.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
}
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */
synchronized (this) {
writeLog("wait "+ forkOrig.thNo);
noOfThread--;
res = forkOrig.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
forkOrig.writeLog("final Orig -1");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res);
};
/* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) {
synchronized (this) {
noOfThread--;
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
writeLog("fork_res: " + fork_res.toString());
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
add_res.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final -1");
fork.closeLogFile();
};
}else{
elems.add(a);
//Überprüfen ob das Set bereits berechnet wurde
if(!constraintSetRepository.containsSet(elems)){
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint));
}
//noOfThread++;
} else {
if(parallel && (variance == 2) && noOfThread <= MaxNoOfThreads) {
writeLog("var2einstieg");
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
//forks.add(forkOrig);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
forkOrig.fork();
}
/* FORK ENDE */
synchronized (this) {
writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasList.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasList.remove(0);
//nextSetasList.remove(nSaL); //PL einkommentiert 20-02-03
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
forks.add(fork);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
}
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */
synchronized (this) {
writeLog("wait "+ forkOrig.thNo);
noOfThread--;
res = forkOrig.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
forkOrig.writeLog("final Orig 2");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res); //vermutlich falsch
};
/* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) {
synchronized (this) {
noOfThread--;
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
add_res.add(fork_res);
fork.writeLog("final 2");
fork.closeLogFile();
};
}
//noOfThread++;
} else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint));
}}}
}
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
//if (hilf == 1)
@ -2612,7 +2229,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if (log && finalresult) {
try {
logFile.write("Thread no.:" + thNo + "\n");
logFile.write("noOfThread:" + noOfThread + "\n");
logFile.write("parallel:" + parallel + "\n");
logFile.write(str+"\n\n");
logFile.flush();

View File

@ -1,7 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
public interface UnifyResultListener {
void onNewTypeResultFound(UnifyResultEvent evt);
}

View File

@ -1,21 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class UnifyResultListenerImpl implements UnifyResultListener {
List<ResultSet> results = new ArrayList<>();
public synchronized void onNewTypeResultFound(UnifyResultEvent evt) {
results.addAll(evt.getNewTypeResult());
}
public List<ResultSet> getResults() {
return results;
}
}

View File

@ -1,41 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class UnifyResultModel {
ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons;
IFiniteClosure fc;
public UnifyResultModel(ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons,
IFiniteClosure fc) {
this.cons = cons;
this.fc = fc;
}
private List<UnifyResultListener> listeners = new ArrayList<>();
public void addUnifyResultListener(UnifyResultListener listenerToAdd) {
listeners.add(listenerToAdd);
}
public void removeUnifyResultListener(UnifyResultListener listenerToRemove) {
listeners.remove(listenerToRemove);
}
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
}
}

View File

@ -0,0 +1,51 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.*;
import java.util.concurrent.ForkJoinPool;
import java.util.stream.Collectors;
public class UnifyResultModelParallel {
private ForkJoinPool pool;
private ConstraintSet<Pair> cons;
private IFiniteClosure fc;
private List<IUnifyResultListener> listeners = new ArrayList<>();
public UnifyResultModelParallel(ConstraintSet<Pair> cons, IFiniteClosure fc){
this.cons = cons;
this.fc = fc;
}
public void setPool(ForkJoinPool pool){
this.pool = pool;
}
public void addUnifyResultListener(IUnifyResultListener listenerToAdd) {
listeners.add(listenerToAdd);
}
public void removeUnifyResultListener(IUnifyResultListener listenerToRemove) {
listeners.remove(listenerToRemove);
}
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet){
pool.execute(()->{
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
return y; //alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
}
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
for (IUnifyResultListener listener : listeners) {
listener.onNewTypeResultFound(eqPrimePrimeSetRet);
}
});
}
}

View File

@ -1,18 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
public class UnifyTaskModel {
ArrayList<TypeUnifyTask> usedTasks = new ArrayList<>();
public synchronized void add(TypeUnifyTask t) {
usedTasks.add(t);
}
public synchronized void cancel() {
for(TypeUnifyTask t : usedTasks) {
t.myCancel(true);
}
}
}

View File

@ -0,0 +1,18 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.concurrent.ForkJoinPool;
public class UnifyTaskModelParallel {
private ForkJoinPool pool;
public void setPool(ForkJoinPool pool){
this.pool = pool;
}
public void cancel(){
if(this.pool != null){
this.pool.shutdown();
}
}
}

View File

@ -1,13 +1,10 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.io.File;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@ -23,7 +20,7 @@ public final class PlaceholderType extends UnifyType{
* Static list containing the names of all existing placeholders.
* Used for generating fresh placeholders.
*/
public static final ArrayList<String> EXISTING_PLACEHOLDERS = new ArrayList<String>();
public static final Set<String> EXISTING_PLACEHOLDERS = ConcurrentHashMap.newKeySet();
/**
* Prefix of auto-generated placeholder names.
@ -96,7 +93,7 @@ public final class PlaceholderType extends UnifyType{
* A user could later instantiate a type using the same name that is equivalent to this type.
* @return A fresh placeholder type.
*/
public synchronized static PlaceholderType freshPlaceholder() {
public static PlaceholderType freshPlaceholder() {
String name = nextName + (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
// Add random chars while the name is in use.
while(EXISTING_PLACEHOLDERS.contains(name)) {

File diff suppressed because it is too large Load Diff