4 Commits

5 changed files with 498 additions and 14 deletions

View File

@@ -3,7 +3,10 @@ package de.dhbwstuttgart.typeinference.unify;
import java.io.Writer;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -12,6 +15,20 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify {
public static Writer statistics;
public Set<Set<UnifyPair>> unifyParallelVirtualThreads(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Boolean log, UnifyResultModelParallelES resultModel, UnifyTaskModelParallelES taskModel){
ExecutorService executorService = Executors.newVirtualThreadPerTaskExecutor();
taskModel.setExecutorService(executorService);
resultModel.setExecutorService(executorService);
TypeUnifyInitialTask initialTask = new TypeUnifyInitialTask(undConstrains, oderConstraints, fc, true, log, resultModel, executorService, new ConstraintSetRepository());
executorService.submit(initialTask);
try {
executorService.awaitTermination(9999, TimeUnit.DAYS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return resultModel.getResults();
}
/**
* unify parallel ohne result modell
* @param undConstrains
@@ -25,7 +42,7 @@ public class TypeUnify {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
taskModel.setPool(pool);
resultModel.setPool(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObjectFJP(logFile, pool), log, resultModel, pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObjectFJP(logFile, pool), log, resultModel, pool, new ConstraintSetRepository());
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
return res;
@@ -44,7 +61,7 @@ public class TypeUnify {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
taskModel.setPool(pool);
resultModel.setPool(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObjectFJP(logFile, pool), log, resultModel, pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObjectFJP(logFile, pool), log, resultModel, pool, new ConstraintSetRepository());
pool.invoke(unifyTask);
return resultModel;
}
@@ -63,7 +80,7 @@ public class TypeUnify {
taskModel.setPool(pool);
resultModel.setPool(pool);
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObjectFJP(logFile, pool), log, resultModel, pool, statistics);
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObjectFJP(logFile, pool), log, resultModel, pool, new ConstraintSetRepository(), statistics);
pool.invoke(unifyTask);
unifyTask.join();
@@ -90,7 +107,7 @@ public class TypeUnify {
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallelFJP resultModel, UnifyTaskModelParallelFJP taskModel) {
resultModel.setPool(ForkJoinPool.commonPool());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObjectFJP(logFile, ForkJoinPool.commonPool()), log, resultModel, ForkJoinPool.commonPool());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObjectFJP(logFile, ForkJoinPool.commonPool()), log, resultModel, ForkJoinPool.commonPool(), new ConstraintSetRepository());
unifyTask.statisticsFile = statistics;
Set<Set<UnifyPair>> res = unifyTask.compute();
return res;

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch;
@@ -7,12 +8,10 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.typeinference.unify.model.*;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.io.output.NullWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.function.BiFunction;
@@ -50,19 +49,20 @@ public class TypeUnifyInitialTask implements Runnable{
protected boolean parallel;
//Attribute für die Rekursionsoptimierung -> bereits in einem Thread berechnete Pfade kein zweites Mal berechnen
protected volatile ConstraintSetRepository constraintSetRepository = new ConstraintSetRepository();
protected volatile ConstraintSetRepository constraintSetRepository;
public TypeUnifyInitialTask(){
this.rules = new RuleSetFJP();
}
public TypeUnifyInitialTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Boolean log, UnifyResultModelParallelES urm, ExecutorService executorService) {
public TypeUnifyInitialTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Boolean log, UnifyResultModelParallelES urm, ExecutorService executorService, ConstraintSetRepository constraintSetRepository) {
this.eq = eq;
this.oderConstraintsField = oderConstraints;
this.fc = fc;
this.parallel = parallel;
this.log = log;
this.executorService = executorService;
this.constraintSetRepository = constraintSetRepository;
try {
if (log) {
@@ -78,7 +78,423 @@ public class TypeUnifyInitialTask implements Runnable{
}
@Override
public void run() {
Set<UnifyPair> neweq = new HashSet<>(eq);
/* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */
oderConstraintsField.stream()
.filter(x -> x.size() == 1)
.map(y -> y.stream().findFirst().get()).forEach(neweq::addAll);
ArrayList<Set<Constraint<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
.filter(x -> x.size() > 1)
.collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, new HashSet<>());
if (parallel) {
logFile.close();
} else {
logFile.closeNonThreaded();
}
if (isUndefinedPairSetSet(res)) {
//fuer debug-Zwecke
throw new DebugException("Unresolved constraints: " + res);
}
}
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Set<UnifyPair> methodSignatureConstraint){
/*
* ? extends ? extends Theta rausfiltern
*/
Set<UnifyPair> doubleExt = eq.stream().filter(UnifyPair::wrongWildcard).peek(UnifyPair::setUndefinedPair)
.collect(Collectors.toCollection(HashSet::new));
if (!doubleExt.isEmpty()) {
Set<Set<UnifyPair>> ret = new HashSet<>();
ret.add(doubleExt);
return ret;
}
/*
* Occurs-Check durchfuehren
*/
Set<UnifyPair> ocurrPairs = eq.stream().filter(x -> {
UnifyType lhs, rhs;
return (lhs = x.getLhsType()) instanceof PlaceholderType
&& !((rhs = x.getRhsType()) instanceof PlaceholderType)
&& rhs.getTypeParams().occurs((PlaceholderType) lhs);
})
.peek(UnifyPair::setUndefinedPair)
.collect(Collectors.toCollection(HashSet::new));
writeLog("ocurrPairs: " + ocurrPairs);
if (!ocurrPairs.isEmpty()) {
Set<Set<UnifyPair>> ret = new HashSet<>();
ret.add(ocurrPairs);
return ret;
}
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
Set<UnifyPair> eq0;
Set<UnifyPair> eq0Prime;
Optional<Set<UnifyPair>> eqSubst = Optional.of(eq);
do {
eq0Prime = eqSubst.get();
eq0 = applyTypeUnificationRules(eq0Prime, fc);
eqSubst = rules.subst(eq0, oderConstraints);
} while (eqSubst.isPresent());
eq0.forEach(UnifyPair::disableCondWildcards);
/*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
*/
Set<UnifyPair> eq1s = new HashSet<>();
Set<UnifyPair> eq2s = new HashSet<>();
splitEq(eq0, eq1s, eq2s);
/*
* Step 4: Create possible typings
*
* "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
* mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
*/
// There are up to 10 toplevel set. 8 of 10 are the result of the
// cartesian product of the sets created by pattern matching.
List<Set<? extends Set<UnifyPair>>> topLevelSets = new ArrayList<>();
//System.out.println(eq2s);
if (!eq1s.isEmpty()) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(eq1s);
topLevelSets.add(wrap); // Add Eq1'
}
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
//TODO: Occurscheck anwenden als Fehler identifizieren
Set<UnifyPair> bufferSet = eq2s.stream()
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
.collect(Collectors.toSet());
if (!bufferSet.isEmpty()) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(bufferSet);
topLevelSets.add(wrap);
eq2s.removeAll(bufferSet);
}
// Sets that originate from pair pattern matching
// Sets of the "second level"
Set<UnifyPair> undefinedPairs = new HashSet<>();
//writeLog("BufferSet: " + bufferSet.toString()+"\n");
List<Set<Constraint<UnifyPair>>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints);
Set<Set<Set<? extends Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
//PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
//nicht ausgewertet Faculty Beispiel im 1. Schritt
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
//Typen getestet werden.
// If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible.
if (!undefinedPairs.isEmpty()) {
Set<Set<UnifyPair>> error = new HashSet<>();
undefinedPairs = undefinedPairs.stream().peek(UnifyPair::setUndefinedPair).collect(Collectors.toCollection(HashSet::new));
error.add(undefinedPairs);
undefinedPairs.forEach(x -> writeLog("AllSubst: " + x.getAllSubstitutions().toString()));
return error;
}
/* Up to here, no cartesian products are calculated.
* filters for pairs and sets can be applied here */
//Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden
for (Set<Set<? extends Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
topLevelSets.addAll(secondLevelSet);
}
//Aufruf von computeCartesianRecursive ANFANG
return computeCartesianRecursive(new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, methodSignatureConstraint);
}
protected Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Set<UnifyPair> methodSignatureConstraint){
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
Set<UnifyPair> eqPrime = new HashSet<>();
setToFlatten.stream().forEach(eqPrime::addAll);
/*
* Step 5: Substitution
*/
writeLog("vor Subst: " + oderConstraints);
String ocString = oderConstraints.toString();
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime, newOderConstraints);
if (!ocString.equals(newOderConstraints.toString())) {
writeLog("nach Subst: " + newOderConstraints);
}
{// sequentiell (Step 6b is included)
if (eqPrime.equals(eq) && eqPrimePrime.isEmpty()
&& oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
//eqPrimePrime Veraenderungen in subst repraesentieren.
eqPrimePrimeSet.add(eqPrime);
if (isSolvedForm(eqPrime)) {
writeLog("eqPrime:" + eqPrime + "\n");
/* methodconstraintsets werden zum Ergebnis hinzugefuegt
* Anfang
*/
eqPrimePrimeSet.forEach(x -> x.addAll(methodSignatureConstraint));
//Substitutionen in methodcontraintsets werdne ausgeführt
eqPrimePrimeSet = eqPrimePrimeSet.stream().map(
x -> {
Optional<Set<UnifyPair>> help = rules.subst(x);
return help.isPresent() ?
help.get() :
x;
}).collect(Collectors.toSet());
/*
* Ende
*/
urm.notify(eqPrimePrimeSet);
}
} else if (eqPrimePrime.isPresent()) {
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, methodSignatureConstraint);
eqPrimePrimeSet.addAll(unifyres);
} else {
Set<Set<UnifyPair>> unifyres = unify(eqPrime, newOderConstraints, fc, parallel, methodSignatureConstraint);
eqPrimePrimeSet.addAll(unifyres);
}
}
/*
* Step 7: Filter empty sets;
*/
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) {
writeLog("Result1 " + eqPrimePrimeSet);
}
return eqPrimePrimeSet;
}
protected Set<Set<UnifyPair>> computeCartesianRecursive(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Set<UnifyPair> methodSignatureConstraint) {
//oneElems: Alle 1-elementigen Mengen, die nur ein Paar
//a <. theta, theta <. a oder a =. theta enthalten
Set<Set<UnifyPair>> oneElems = topLevelSets.stream()
.filter(x -> x.size() == 1)
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
//optNextSet: Eine mehrelementige Menge, wenn vorhanden
Optional<Set<? extends Set<UnifyPair>>> optNextSet = topLevelSets.stream().filter(x -> x.size() > 1).findAny();
if (optNextSet.isEmpty()) {//Alle Elemente sind 1-elementig
return unify2(oneElems, eq, oderConstraints, fc, parallel, methodSignatureConstraint);
}
Set<? extends Set<UnifyPair>> nextSet = optNextSet.get();
List<Set<UnifyPair>> nextSetasList = new ArrayList<>(nextSet);
Set<Set<UnifyPair>> result = new HashSet<>();
/* Varianzbestimmung Anfang
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
* Varianz = 1 => Argumentvariable
* Varianz = -1 => Rückgabevariable
* Varianz = 0 => unklar
* Varianz = 2 => Operatoren oderConstraints */
ArrayList<UnifyPair> zeroNextElem = new ArrayList<>(nextSetasList.getFirst());
UnifyPair fstBasePair = zeroNextElem.removeFirst().getBasePair();
boolean oderConstraint = false;
/*if (fstBasePair != null) {
boolean sameBase = true;
for (UnifyPair ele : nextSetasList.getFirst()) {//check ob a <. ty base oder ob Ueberladung
sameBase = sameBase && ele.getBasePair() != null && ele.getBasePair().equals(fstBasePair);
}
if (sameBase) { //angefuegt PL 2020-02-30
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
.reduce((a, b) -> {
if (a == b) return a;
else return 0;
})) //2 kommt insbesondere bei Oder-Constraints vor
.filter(d -> d.isPresent())
.map(e -> e.get())
.findAny();
if (xi.isPresent()) {
variance = xi.get();
}
} else {
oderConstraint = true;
}
} else {
oderConstraint = true;
}
/* Varianzbestimmung Ende */
Set<UnifyPair> a;
while (!nextSetasList.isEmpty()) {
/* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
* In der Regel ist dies genau ein Element
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
* gefuehrt hat.
*/
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
a = nextSetasList.removeFirst(); //statisticsList
//writeStatistics(a.toString());
if (oderConstraint) {//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
}
Set<Set<UnifyPair>> elems = new HashSet<>(oneElems);
//Ergebnisvariable für den aktuelle Thread
Set<Set<UnifyPair>> res = new HashSet<>();
//Menge der Ergebnisse der geforkten Threads
Set<Set<Set<UnifyPair>>> add_res = new HashSet<>();
Set<Set<UnifyPair>> aParDef = new HashSet<>();
/* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
* gestartet, der parallel weiterarbeitet.
*/
if(parallel){
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
TypeUnifySubTask subTask = new TypeUnifySubTask(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, log, urm, methodSignatureConstraint, this.executorService, this.constraintSetRepository);
//Überprüfen, ob das Set bereits berechnet wurde
if(!constraintSetRepository.containsSet(newElemsOrig)){
this.executorService.submit(subTask);
}
while (!nextSetasList.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasList.removeFirst();
if (oderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
subTask = new TypeUnifySubTask(newElems, newEq, newOderConstraints, nSaL, fc, parallel, log, urm, new HashSet<>(methodSignatureConstraint), this.executorService, this.constraintSetRepository);
//Überprüfen, ob das Set bereits berechnet wurde
if(!constraintSetRepository.containsSet(newElems)){
this.executorService.submit(subTask);
}
}
/*forks.forEach(x -> writeLog("wait: " + x.thNo));
for (TypeUnify2Task fork : forks) {
Set<Set<UnifyPair>> fork_res = new HashSet<>();
if(fork.hasBeenCalculated){
fork_res = fork.join();
add_res.add(fork_res);
}
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
}*/
}else{
elems.add(a);
//Überprüfen ob das Set bereits berechnet wurde
if(!constraintSetRepository.containsSet(elems)){
res = unify2(elems, eq, oderConstraints, fc, parallel, new HashSet<>(methodSignatureConstraint));
}
}
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
}
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = res;
} else {
if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result))
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
result.addAll(res);
}
}
if (parallel) {
for (Set<Set<UnifyPair>> par_res : add_res) {
if (!isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = par_res;
} else {
if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result))
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
result.addAll(par_res);
}
}
}
}
if (isUndefinedPairSetSet(res) && aParDef.isEmpty()) {
int nofstred;
Set<UnifyPair> abhSubst = res.stream()
.map(b ->
b.stream()
.map(x -> x.getAllSubstitutions())
.reduce((y, z) -> {
y.addAll(z);
return y;
}).get())
.reduce((y, z) -> {
y.addAll(z);
return y;
}).get();
abhSubst.addAll(
res.stream()
.map(b ->
b.stream()
.map(x -> x.getThisAndAllBases()) //getAllBases durch getThisAndAllBases ersetzt, weil auch im UnifyPair selbst schon ein Fehler liegen kann.
.reduce((y, z) -> {
y.addAll(z);
return y;
}).get())
.reduce((y, z) -> {
y.addAll(z);
return y;
}).get()
);
Set<UnifyPair> b = a;//effective final a
Set<UnifyPair> durchschnitt = abhSubst.stream()
.filter(x -> b.contains(x))
.collect(Collectors.toCollection(HashSet::new));
int len = nextSetasList.size();
nofstred = nextSetasList.size();
writeLog("res (undef): " + res.toString() + "\n" +
"abhSubst: " + abhSubst.toString() + "\n" +
"Durchschnitt: " + durchschnitt.toString() + "\n" +
"nextSet: " + nextSet.toString() + "\n" +
"nextSetasList: " + nextSetasList.toString() + "\n" +
"Number first erased Elements (undef): " + (len - nofstred) + "\n" +
"Number second erased Elements (undef): " + (nofstred - nextSetasList.size()) + "\n" +
"Number erased Elements (undef): " + (len - nextSetasList.size()));
}
}
return result;
}
protected boolean isUndefinedPairSet(Set<UnifyPair> s) {

View File

@@ -13,7 +13,7 @@ public class TypeUnifySubTask extends TypeUnifyInitialTask{
protected Set<UnifyPair> methodSignatureConstraintUebergabe;
public TypeUnifySubTask(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Boolean log, UnifyResultModelParallelES urm, Set<UnifyPair> methodSignatureConstraintUebergabe, ExecutorService executorService, ConstraintSetRepository constraintSetRepository) {
super(eq, oderConstraints, fc, parallel, log, urm, executorService);
super(eq, oderConstraints, fc, parallel, log, urm, executorService, constraintSetRepository);
this.setToFlatten = setToFlatten;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
this.constraintSetRepository = constraintSetRepository;

View File

@@ -17,13 +17,14 @@ public class UnifyResultModelParallelES {
private ConstraintSet<Pair> cons;
private IFiniteClosure fc;
private List<IUnifyResultListener> listeners = new ArrayList<>();
private Set<Set<UnifyPair>> results = new HashSet<>();
public UnifyResultModelParallelES(ConstraintSet<Pair> cons, IFiniteClosure fc){
this.cons = cons;
this.fc = fc;
}
public void setPool(ExecutorService executorService){
public void setExecutorService(ExecutorService executorService){
this.executorService = executorService;
}
public void addUnifyResultListener(IUnifyResultListener listenerToAdd) {
@@ -32,6 +33,9 @@ public class UnifyResultModelParallelES {
public void removeUnifyResultListener(IUnifyResultListener listenerToRemove) {
listeners.remove(listenerToRemove);
}
public Set<Set<UnifyPair>> getResults() {
return results;
}
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet){
executorService.submit(()->{
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
@@ -48,6 +52,7 @@ public class UnifyResultModelParallelES {
for (IUnifyResultListener listener : listeners) {
listener.onNewTypeResultFound(eqPrimePrimeSetRet);
}
results.addAll(eqPrimePrimeSetRet);
});
}
}

View File

@@ -3,9 +3,7 @@ package typeinference;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModelParallelFJP;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModelParallelFJP;
import de.dhbwstuttgart.typeinference.unify.*;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.*;
import org.apache.commons.io.output.NullWriter;
@@ -96,7 +94,7 @@ public class UnifyTest {
return pair1;
}
@Test
public void unifyTest(){
public void unifyTestForkJoinPool(){
UnifyType type1;
UnifyType type2;
@@ -142,5 +140,53 @@ public class UnifyTest {
System.out.println(solution.size());
System.out.println(solution);
}
@Test
public void unifyTextVirtualThreadExecutor(){
UnifyType type1;
UnifyType type2;
Set<UnifyPair> undConstraints = new HashSet<>();
undConstraints.add(genPairListOfInteger("a"));
undConstraints.add(genPairListOfString("a"));
undConstraints.add(genPairListOfInteger("b"));
undConstraints.add(genPairListOfString("b"));
undConstraints.add(genPairListOfInteger("c"));
undConstraints.add(genPairListOfString("c"));
undConstraints.add(genPairListOfInteger("d"));
undConstraints.add(genPairListOfString("d"));
undConstraints.add(genPairListOfInteger("e"));
undConstraints.add(genPairListOfString("e"));
undConstraints.add(genPairListOfInteger("e1"));
undConstraints.add(genPairListOfString("e1"));
undConstraints.add(genPairListOfInteger("e2"));
undConstraints.add(genPairListOfString("e2"));
undConstraints.add(genPairListOfInteger("e3"));
undConstraints.add(genPairListOfString("e3"));
List<Set<Constraint<UnifyPair>>> oderConstraints = new ArrayList<>();
Set<UnifyPair> constraints = new HashSet<>();
type1 = new ReferenceType("Object");
type2 = new ReferenceType("List", new TypeParams(new PlaceholderType("X")));
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
type1 = new ReferenceType("Object");
type2 = new ReferenceType("Integer");
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
type1 = new ReferenceType("Object");
type2 = new ReferenceType("String");
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
IFiniteClosure finiteClosure = new FiniteClosure(constraints, new NullWriter());
TypeUnify unifyAlgo = new TypeUnify();
ConstraintSet< Pair> cons = new ConstraintSet<>();
UnifyResultModelParallelES urm = new UnifyResultModelParallelES(cons, finiteClosure);
UnifyTaskModelParallelES tasks = new UnifyTaskModelParallelES();
Set<Set<UnifyPair>> solution = unifyAlgo.unifyParallelVirtualThreads(undConstraints, oderConstraints, finiteClosure, false, urm, tasks);
System.out.println(solution.size());
System.out.println(solution);
}
}