forked from i21017/JavaTypeUnify
Compare commits
15 Commits
performanc
...
main
Author | SHA1 | Date | |
---|---|---|---|
|
7818722c96 | ||
|
4dec73cada | ||
|
7d3ef78ccd | ||
|
d8f744a774 | ||
|
48293b3243 | ||
|
bab3511259 | ||
|
46eb7b12e6 | ||
|
80c3e5d02c | ||
|
cb9798dd5a | ||
|
0cdbc7a19a | ||
|
7e5e63dc5f | ||
|
69402677f7 | ||
|
2ffa86e821 | ||
|
f75d646a73 | ||
|
ce2401f3eb |
@ -0,0 +1,11 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
public interface IUnifyResultListener {
|
||||
|
||||
void onNewTypeResultFound(Set<Set<UnifyPair>> evt);
|
||||
|
||||
}
|
@ -3,16 +3,12 @@ package de.dhbwstuttgart.typeinference.unify;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify {
|
||||
@ -28,9 +24,12 @@ public class TypeUnify {
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, new ConstraintSetRepository());
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel ret, UnifyTaskModelParallel usedTasks) {
|
||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||
ConstraintSetRepository constraintSetRepository = new ConstraintSetRepository();
|
||||
usedTasks.setPool(pool);
|
||||
ret.setPool(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, constraintSetRepository);
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
try {
|
||||
@ -54,9 +53,12 @@ public class TypeUnify {
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, new ConstraintSetRepository());
|
||||
public UnifyResultModelParallel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel ret, UnifyTaskModelParallel usedTasks) {
|
||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||
ConstraintSetRepository constraintSetRepository = new ConstraintSetRepository();
|
||||
usedTasks.setPool(pool);
|
||||
ret.setPool(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, constraintSetRepository);
|
||||
pool.invoke(unifyTask);
|
||||
return ret;
|
||||
}
|
||||
@ -72,10 +74,12 @@ public class TypeUnify {
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, new ConstraintSetRepository(), statistics);
|
||||
public UnifyResultModelParallel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel ret, UnifyTaskModelParallel usedTasks) {
|
||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||
ConstraintSetRepository constraintSetRepository = new ConstraintSetRepository();
|
||||
usedTasks.setPool(pool);
|
||||
ret.setPool(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, constraintSetRepository, statistics);
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
try {
|
||||
@ -108,8 +112,9 @@ public class TypeUnify {
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks, new ConstraintSetRepository());
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel ret, UnifyTaskModelParallel usedTasks) {
|
||||
ConstraintSetRepository constraintSetRepository = new ConstraintSetRepository();
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, constraintSetRepository);
|
||||
unifyTask.statistics = statistics;
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
try {
|
||||
|
@ -15,66 +15,58 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify2Task extends TypeUnifyTask {
|
||||
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
||||
|
||||
//statistics
|
||||
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
Set<UnifyPair> nextSetElement,
|
||||
IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks,
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe, ConstraintSetRepository constraintSetRepository, Writer statistics) {
|
||||
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraintUebergabe, constraintSetRepository);
|
||||
|
||||
}
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe, ConstraintSetRepository constraintSetRepository) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, constraintSetRepository);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||
}
|
||||
|
||||
Set<UnifyPair> getNextSetElement() {
|
||||
return nextSetElement;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
if (one) {
|
||||
System.out.println("two");
|
||||
}
|
||||
one = true;
|
||||
Set<Set<UnifyPair>> res = new HashSet<>();
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
||||
|
||||
if(!constraintSetRepository.containsSet(setToFlatten)){
|
||||
res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
|
||||
}
|
||||
//statistics
|
||||
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
Set<UnifyPair> nextSetElement,
|
||||
IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm,
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe, ConstraintSetRepository constraintSetRepository, Writer statistics) {
|
||||
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraintUebergabe, constraintSetRepository);
|
||||
|
||||
}
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, Set<UnifyPair> methodSignatureConstraintUebergabe, ConstraintSetRepository constraintSetRepository) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, constraintSetRepository);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||
}
|
||||
|
||||
Set<UnifyPair> getNextSetElement() {
|
||||
return nextSetElement;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
if (one) {
|
||||
System.out.println("two");
|
||||
}
|
||||
one = true;
|
||||
Set<Set<UnifyPair>> res = new HashSet<>();
|
||||
|
||||
if (!constraintSetRepository.containsSet(setToFlatten)) {
|
||||
res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
|
||||
}
|
||||
/*if (isUndefinedPairSetSet(res)) {
|
||||
return new HashSet<>(); }
|
||||
else
|
||||
*/
|
||||
//writeLog("xxx");
|
||||
//noOfThread--;
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
else {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void closeLogFile() {
|
||||
*/
|
||||
//writeLog("xxx");
|
||||
//noOfThread--;
|
||||
return res;
|
||||
}
|
||||
|
||||
try {
|
||||
logFile.close();
|
||||
}
|
||||
catch (IOException ioE) {
|
||||
System.err.println("no log-File" + thNo);
|
||||
}
|
||||
|
||||
}
|
||||
public void closeLogFile() {
|
||||
|
||||
try {
|
||||
logFile.close();
|
||||
} catch (IOException ioE) {
|
||||
System.err.println("no log-File" + thNo);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
/**
|
||||
* Fuer die Threads
|
||||
*/
|
||||
UnifyResultModel urm;
|
||||
UnifyResultModelParallel urm;
|
||||
private static int totalnoOfThread = 0;
|
||||
int thNo;
|
||||
protected boolean one = false;
|
||||
@ -125,10 +125,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
static Integer noShortendElements = 0;
|
||||
|
||||
Boolean myIsCanceled = false;
|
||||
|
||||
volatile UnifyTaskModel usedTasks;
|
||||
|
||||
static Writer statistics;
|
||||
protected ConstraintSetRepository constraintSetRepository;
|
||||
|
||||
@ -151,11 +147,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
*/
|
||||
|
||||
//statistics
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, ConstraintSetRepository constraintSetRepository, Writer statistics) {
|
||||
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, constraintSetRepository);
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, ConstraintSetRepository constraintSetRepository, Writer statistics) {
|
||||
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, constraintSetRepository);
|
||||
this.statistics = statistics;
|
||||
}
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, ConstraintSetRepository constraintSetRepository) {
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, ConstraintSetRepository constraintSetRepository) {
|
||||
synchronized (this) {
|
||||
if(statistics==null){
|
||||
statistics = new NullWriter();
|
||||
@ -208,8 +204,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
rules = new RuleSet(logFile);
|
||||
this.rekTiefeField = rekTiefe;
|
||||
this.urm = urm;
|
||||
this.usedTasks = usedTasks;
|
||||
this.usedTasks.add(this);
|
||||
this.constraintSetRepository = constraintSetRepository;
|
||||
}
|
||||
}
|
||||
@ -246,13 +240,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
}
|
||||
*/
|
||||
void myCancel(Boolean b) {
|
||||
myIsCanceled = true;
|
||||
}
|
||||
|
||||
public boolean myIsCancelled() {
|
||||
return myIsCanceled;
|
||||
}
|
||||
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
if (one) {
|
||||
@ -281,14 +268,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
throw new DebugException("Unresolved constraints: " + res.toString()); //return new HashSet<>();
|
||||
}
|
||||
else {
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
else {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
}
|
||||
/*
|
||||
@ -321,12 +301,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
//.collect(Collectors.toCollection(HashSet::new)));
|
||||
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
|
||||
rekTiefe++;
|
||||
nOfUnify++;
|
||||
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
|
||||
@ -508,12 +482,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
// .collect(Collectors.toCollection(HashSet::new));
|
||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
||||
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
|
||||
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
|
||||
|
||||
Set<TypeUnifyTask> forks = new HashSet<>();
|
||||
@ -821,7 +789,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint, constraintSetRepository);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraint, constraintSetRepository);
|
||||
//Überprüfen, ob das Set bereits berechnet wurde
|
||||
forkOrig.fork();
|
||||
|
||||
while (!nextSetasList.isEmpty()) {
|
||||
@ -835,7 +804,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint), constraintSetRepository);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), constraintSetRepository);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
}
|
||||
|
@ -1,7 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
public interface UnifyResultListener {
|
||||
|
||||
void onNewTypeResultFound(UnifyResultEvent evt);
|
||||
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class UnifyResultListenerImpl implements UnifyResultListener {
|
||||
|
||||
List<ResultSet> results = new ArrayList<>();
|
||||
|
||||
public synchronized void onNewTypeResultFound(UnifyResultEvent evt) {
|
||||
results.addAll(evt.getNewTypeResult());
|
||||
}
|
||||
|
||||
public List<ResultSet> getResults() {
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class UnifyResultModel {
|
||||
|
||||
ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons;
|
||||
|
||||
IFiniteClosure fc;
|
||||
|
||||
public UnifyResultModel(ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons,
|
||||
IFiniteClosure fc) {
|
||||
this.cons = cons;
|
||||
this.fc = fc;
|
||||
}
|
||||
|
||||
private List<UnifyResultListener> listeners = new ArrayList<>();
|
||||
|
||||
public void addUnifyResultListener(UnifyResultListener listenerToAdd) {
|
||||
listeners.add(listenerToAdd);
|
||||
}
|
||||
|
||||
public void removeUnifyResultListener(UnifyResultListener listenerToRemove) {
|
||||
listeners.remove(listenerToRemove);
|
||||
}
|
||||
|
||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class UnifyResultModelParallel {
|
||||
private ForkJoinPool pool;
|
||||
private ConstraintSet<Pair> cons;
|
||||
private IFiniteClosure fc;
|
||||
private List<IUnifyResultListener> listeners = new ArrayList<>();
|
||||
|
||||
public UnifyResultModelParallel(ConstraintSet<Pair> cons, IFiniteClosure fc){
|
||||
this.cons = cons;
|
||||
this.fc = fc;
|
||||
}
|
||||
|
||||
public void setPool(ForkJoinPool pool){
|
||||
this.pool = pool;
|
||||
}
|
||||
public void addUnifyResultListener(IUnifyResultListener listenerToAdd) {
|
||||
listeners.add(listenerToAdd);
|
||||
}
|
||||
public void removeUnifyResultListener(IUnifyResultListener listenerToRemove) {
|
||||
listeners.remove(listenerToRemove);
|
||||
}
|
||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet){
|
||||
pool.execute(()->{
|
||||
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y; //alle Paare a <.? b erden durch a =. b ersetzt
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
|
||||
}
|
||||
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
for (IUnifyResultListener listener : listeners) {
|
||||
listener.onNewTypeResultFound(eqPrimePrimeSetRet);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
public class UnifyTaskModel {
|
||||
|
||||
ArrayList<TypeUnifyTask> usedTasks = new ArrayList<>();
|
||||
|
||||
public synchronized void add(TypeUnifyTask t) {
|
||||
usedTasks.add(t);
|
||||
}
|
||||
|
||||
public synchronized void cancel() {
|
||||
for(TypeUnifyTask t : usedTasks) {
|
||||
t.myCancel(true);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
public class UnifyTaskModelParallel {
|
||||
private ForkJoinPool pool;
|
||||
|
||||
public void setPool(ForkJoinPool pool){
|
||||
this.pool = pool;
|
||||
}
|
||||
|
||||
public void cancel(){
|
||||
if(this.pool != null){
|
||||
this.pool.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user