2 Commits

Author SHA1 Message Date
Fabian Holzwarth
a0c11b60e8 Remove unnecessary parameter and fix some parallelization 2025-06-07 16:11:34 +02:00
Fabian Holzwarth
4cddf73e6d feat: small fixes for correct parameters 2025-06-07 14:38:18 +02:00
9 changed files with 56 additions and 484 deletions

View File

@@ -9,8 +9,8 @@ mkdir $TDIR
cd $TDIR
git clone $REPO .
git checkout feat/master-unify-webservice-dev
git checkout 6c2d97b7703d954e4a42eef3ec374bcf313af75c # 2:13
git checkout feat/separate-variance
# git checkout 6c2d97b7703d954e4a42eef3ec374bcf313af75c # 2:13
# git checkout f722a00fbb6e69423d48a890e4a6283471763e64 # 1:35
# git checkout f0a4a51ce65639ce9a9470ff0fdb538fdf9c02cc # 2:19
# git checkout 1391206dfe59263cdb22f93371cfd1dd5465d97f # 1:29
@@ -24,7 +24,7 @@ date "+%Y.%m.%d %H:%M:%S"
# time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav >/dev/null;
mvn clean && mvn test
mvn clean compile -X && mvn test
echo -e "\Cleanup... "

View File

@@ -16,7 +16,7 @@ public class TypeUnify {
* unify parallel ohne result modell
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext, 0);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(true), 0);
ForkJoinPool pool = this.createThreadPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
@@ -44,7 +44,7 @@ public class TypeUnify {
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
*/
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext, 0);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(true), 0);
ForkJoinPool pool = this.createThreadPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
@@ -70,7 +70,7 @@ public class TypeUnify {
* unify sequentiell mit oderconstraints
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext, 0);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(false), 0);
Set<Set<UnifyPair>> res = unifyTask.compute();
try {
unifyContext.logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");

View File

@@ -4,6 +4,7 @@ package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.exceptions.TypeinferenceException;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.cartesianproduct.VarianceCase;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
@@ -43,6 +44,7 @@ import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
import java.util.stream.Collectors;
@@ -640,8 +642,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
Set<Set<UnifyPair>> computeCartesianRecursive(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set<UnifyPair> methodSignatureConstraint) {
Logger.print("Start computeCartesianRecursive with " + topLevelSets.size() + " topLevelSets");
Set<Set<UnifyPair>> singleElementSets = TypeUnifyTaskHelper.getSingleElementSets(topLevelSets);
singleElementSets.forEach(x -> {
if (x instanceof Constraint)
@@ -722,82 +722,17 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyPair> a = null;
Set<UnifyPair> a_last = null;
while (!nextSetAsList.isEmpty()) {
Set<UnifyPair> a_last = a;
VarianceCase varianceCase = VarianceCase.createFromVariance(variance, oderConstraint, this, context);
/* Liste der Faelle für die parallele Verarbeitung
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
* Deshalb wird ihre Berechnung parallel angestossen.
*/
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
/* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
* In der Regel ist dies genau ein Element
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
* gefuehrt hat.
*/
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetAsList.toString());
if (variance == 1) {
a = oup.max(nextSetAsList.iterator());
writeLog("Max: a in " + variance + " " + a);
nextSetAsList.remove(a);
if (oderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
//Alle maximale Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
Set<UnifyPair> finalA = a;
nextSetasListRest = oup.maxElements(
nextSetAsList.stream().filter(a_next -> oup.compare(finalA, a_next) != 1).toList()
);
} else if (variance == -1) {
a = oup.min(nextSetAsList.iterator());
writeLog("Min: a in " + variance + " " + a);
if (oderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
nextSetAsList.remove(a);
//Alle minimalen Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
Set<UnifyPair> finalA = a;
nextSetasListRest = oup.minElements(
nextSetAsList.stream().filter(a_next -> oup.compare(finalA, a_next) != -1).toList()
);
} else if (variance == 2) {
a = nextSetAsList.removeFirst();
//Fuer alle Elemente wird parallele Berechnung angestossen.
nextSetasListRest = new ArrayList<>(nextSetAsList);
} else if (variance == 0) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!oderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = oup.max(nextSetAsList.iterator());
} else {
a = oup.min(nextSetAsList.iterator());
}
nextSetAsList.remove(a);
} else if (oderConstraint) {
a = oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} else {
a = nextSetAsList.removeFirst();
}
}
varianceCase.selectNextData(this, nextSetAsList, optOrigPair);
if (oderConstraint) {//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) varianceCase.a).getmethodSignatureConstraint());
writeLog("ERSTELLUNG methodSignatureConstraint: " + methodSignatureConstraint);
//System.out.println("ERSTELLUNG methodSignatureConstraint: " +noOfThread+" "+methodSignatureConstraint);
//System.out.println("a: " +a);
@@ -807,7 +742,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
i++;
Set<Set<UnifyPair>> elems = new HashSet<>(singleElementSets);
writeLog("a1: " + rekTiefe + " " + "variance: " + variance + " " + a.toString() + "\n");
writeLog("a1: " + rekTiefe + " " + "variance: " + variance + " " + varianceCase.a.toString() + "\n");
//Ergebnisvariable für den aktuelle Thread
Set<Set<UnifyPair>> currentThreadResult;
@@ -820,8 +755,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/* Wenn bei (a \in theta) \in a zu Widerspruch in oneElems wird
* a verworfen und zu nächstem Element von nextSetasList gegangen
*/
if (!oderConstraint && !sameEqSet.isEmpty() && !checkNoContradiction(a, sameEqSet, result)) {
a = null;
if (!oderConstraint && !sameEqSet.isEmpty() && !checkNoContradiction(varianceCase.a, sameEqSet, result)) {
a_last = null;
noShortendElements++;
continue;
}
@@ -829,226 +764,26 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
* gestartet, der parallel weiterarbeitet.
*/
if (parallel && (variance == 1)) {
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context.newWithParallel(parallel), rekTiefe, methodSignatureConstraint);
//forks.add(forkOrig);
if (this.myIsCancelled()) {
return new HashSet<>();
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
writeLog("1 RM" + nSaL.toString());
if (!oderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
nSaL = null;
noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context.newWithParallel(parallel), rekTiefe, new HashSet<>(methodSignatureConstraint));
forks.add(fork);
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */
currentThreadResult = forkOrig.compute();
if (this.myIsCancelled()) {
return new HashSet<>();
}
//noOfThread++;
forkOrig.writeLog("final Orig 1");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
//forkResults.add(fork_res);;
/* FORK ENDE */
for (TypeUnify2Task fork : forks) {
Set<Set<UnifyPair>> fork_res = fork.join();
if (this.myIsCancelled()) {
return new HashSet<>();
}
writeLog("fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((isUndefinedPairSetSet(fork_res))).toString());
forkResults.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final 1");
fork.closeLogFile();
}
//noOfThread++;
} else if (parallel && (variance == -1)) {
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context.newWithParallel(parallel), rekTiefe, new HashSet<>(methodSignatureConstraint));
//forks.add(forkOrig);
if (this.myIsCancelled()) {
return new HashSet<>();
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString());
if (!oderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
nSaL = null;
noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context.newWithParallel(parallel), rekTiefe, new HashSet<>(methodSignatureConstraint));
forks.add(fork);
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */
currentThreadResult = forkOrig.compute();
if (this.myIsCancelled()) {
return new HashSet<>();
}
//noOfThread++;
forkOrig.writeLog("final Orig -1");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
//forkResults.add(fork_res);
/* FORK ENDE */
for (TypeUnify2Task fork : forks) {
Set<Set<UnifyPair>> fork_res = fork.join();
if (this.myIsCancelled()) {
return new HashSet<>();
}
//noOfThread++;
//noOfThread--; an das Ende von compute verschoben
writeLog("fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((isUndefinedPairSetSet(fork_res))).toString());
forkResults.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final -1");
fork.closeLogFile();
}
//noOfThread++;
} else if (parallel && (variance == 2)) {
writeLog("var2einstieg");
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context.newWithParallel(parallel), rekTiefe, new HashSet<>(methodSignatureConstraint));
//forks.add(forkOrig);
if (this.myIsCancelled()) {
return new HashSet<>();
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
Set<UnifyPair> methodSignatureConstraintForParallel = new HashSet<>(methodSignatureConstraint);
Set<UnifyPair> nSaL = a;
while (!nextSetasListRest.isEmpty()) {
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context.newWithParallel(parallel), rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
forks.add(fork);
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */
currentThreadResult = forkOrig.compute();
if (this.myIsCancelled()) {
return new HashSet<>();
}
//noOfThread++;
forkOrig.writeLog("final Orig 2");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
//forkResults.add(fork_res); //vermutlich falsch
/* FORK ENDE */
for (TypeUnify2Task fork : forks) {
Set<Set<UnifyPair>> fork_res = fork.join();
if (this.myIsCancelled()) {
return new HashSet<>();
}
forkResults.add(fork_res);
fork.writeLog("final 2");
fork.closeLogFile();
}
//noOfThread++;
} else {
//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
currentThreadResult = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint));
/*
* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
* gestartet, der parallel weiterarbeitet.
*/
if (parallel) {
currentThreadResult = varianceCase.computeParallel(
forkResults, elems, eq, oderConstraints, fc, rekTiefe, methodSignatureConstraint,
nextSetAsList, sameEqSet, result, aParDef
);
}
else {
// same as variance = 0
elems.add(varianceCase.a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
currentThreadResult = this.unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint));
}
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>) varianceCase.a).getmethodSignatureConstraint());
//System.out.println("REMOVE: " +methodSignatureConstraint);
}
if (!isUndefinedPairSetSet(currentThreadResult) && isUndefinedPairSetSet(result)) {
@@ -1060,7 +795,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|| result.isEmpty()) {
if ((!result.isEmpty() && !currentThreadResult.isEmpty() && !isUndefinedPairSetSet(currentThreadResult) && !isUndefinedPairSetSet(result)) //korrekte Loesungen aus und-constraints
&& (a.stream().map(x -> (x.getBasePair() != null)).reduce(true, (x, y) -> (x && y)))) //bei oder-Constraints nicht ausfuehren
&& (varianceCase.a.stream().map(x -> (x.getBasePair() != null)).reduce(true, (x, y) -> (x && y)))) //bei oder-Constraints nicht ausfuehren
{
//TODO: PL 2019-01-15: Bug 129: Im Moment wird nur das Maximum und das Minimum des aktuellen Elements betrachtet.
//Die zu vereinigenden Mengen können mehrere Elemente enthalten. Das ist bisher nicht berücksichtigt
@@ -1068,7 +803,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a
//PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar
// System.out.println("");
List<PlaceholderType> vars_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a);
List<PlaceholderType> vars_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(varianceCase.a);
Set<UnifyPair> fstElemRes = currentThreadResult.iterator().next();
Set<UnifyPair> compRes = fstElemRes.stream().filter(x -> vars_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
@@ -1085,49 +820,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<UnifyPair> fstElemResult = result.iterator().next();
Set<UnifyPair> compResult = fstElemResult.stream().filter(x -> varsLast_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
if (variance == 1) {
writeLog("a_last:" + a_last + " a: " + a);
writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
writeLog("compResult:" + compResult + " compRes: " + compRes);
int resOfCompare = oup.compare(compResult, compRes);
if (resOfCompare == -1) {
writeLog("Geloescht result: " + result);
result = currentThreadResult;
} else {
if (resOfCompare == 0) {
result.addAll(currentThreadResult);
} //else {
if (resOfCompare == 1) {
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
} else {
if (variance == -1) {
writeLog("a_last:" + a_last + " a: " + a);
writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
writeLog("compResult:" + compResult + " compRes: " + compRes);
int resOfCompare = oup.compare(compResult, compRes);
if (resOfCompare == 1) {
writeLog("Geloescht result: " + result);
result = currentThreadResult;
} else {
if (resOfCompare == 0) {
result.addAll(currentThreadResult);
} else {
if (resOfCompare == -1) {
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
}
} else {
if (variance == 0) {
writeLog("RES var=1 ADD:" + result.toString() + " " + currentThreadResult.toString());
result.addAll(currentThreadResult);
}
}
}
varianceCase.applyComputedResults(result, currentThreadResult, compResult, compRes);
} catch (NullPointerException e) {
writeLog("NullPointerException: " + a_last.toString());
}
@@ -1175,135 +868,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
if (variance == 1) {
// System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (oderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a_new, nextSetAsList);
writeLog("smallerSetasList: " + smallerSetasList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
.collect(Collectors.toCollection(ArrayList::new));
writeLog("notInherited: " + notInherited + "\n");
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> {
notErased.addAll(oup.smallerEqThan(x, smallerSetasList));
});
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
writeLog("notErased: " + notErased + "\n");
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = oup.smallerEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
}
} else if (variance == -1) {
// System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (oderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
writeLog("Removed: " + nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> greaterSetasList = oup.greaterThan(a_new, nextSetAsList);
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
greaterSetasList.add(a_new);
}
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
//Wenn x nicht vererbt ist, beginnt beim naechstgroesseren Element die naechste Ueberladung
notInherited.forEach(x -> {
notErased.addAll(oup.greaterEqThan(x, greaterSetasList));
});
//das kleineste Element ist das Element von dem a_new geerbt hat
//muss deshalb geloescht werden
Iterator<Set<UnifyPair>> notErasedIt = notErased.iterator();
if (notErasedIt.hasNext()) {
Set<UnifyPair> min = oup.min(notErasedIt);
notErased.remove(min);
notErased.remove(((Constraint<UnifyPair>) min).getExtendConstraint());
}
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = oup.greaterEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
}
} else if (variance == 0) {
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
if (!oderConstraint) {
break;
} else {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a, nextSetAsList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> notErased.addAll(oup.smallerEqThan(x, smallerSetasList)));
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
boolean shouldBreak = varianceCase.eraseInvalidSets(rekTiefe, aParDef, nextSetAsList);
if (shouldBreak) {
break;
}
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("a: " + rekTiefe + " variance: " + variance + varianceCase.a.toString());
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
@@ -1315,7 +885,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
);
Set<UnifyPair> durchschnitt = abhSubst.stream()
.filter(a::contains)
.filter(varianceCase.a::contains)
//.filter(y -> abhSubst.contains(y))
.collect(Collectors.toCollection(HashSet::new));
@@ -1358,7 +928,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// .collect(Collectors.toCollection(ArrayList::new));
writeLog("currentThreadResult (undef): " + currentThreadResult.toString());
writeLog("abhSubst: " + abhSubst.toString());
writeLog("a2: " + rekTiefe + " " + a.toString());
writeLog("a2: " + rekTiefe + " " + varianceCase.a.toString());
writeLog("Durchschnitt: " + durchschnitt.toString());
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetAsList.toString());
@@ -1379,12 +949,19 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//}
//else result.stream().filter(y -> !isUndefinedPairSet(y));
writeLog("currentThreadResult: " + currentThreadResult.toString());
a_last = varianceCase.a;
}
//2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen
writeLog("Return computeCR: " + result.toString());
Logger.print("Finished " + i2.incrementAndGet());
return result;
}
private static final AtomicInteger i2 = new AtomicInteger(0);
/**
* checks if there is for (a = ty) \in a in sameEqSet a constradiction
*

View File

@@ -4,10 +4,10 @@ import java.io.Writer;
public class UnifyContext {
Writer logFile;
Boolean log;
Boolean parallel;
UnifyResultModel resultModel;
final Writer logFile;
final Boolean log;
final Boolean parallel;
final UnifyResultModel resultModel;
volatile UnifyTaskModel usedTasks;
public UnifyContext(

View File

@@ -25,20 +25,19 @@ public class Variance0Case extends VarianceCase {
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
boolean oderConstraint,
Optional<UnifyPair> optOrigPair
) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!oderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
} else {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
}
nextSetAsList.remove(a);
} else if (oderConstraint) {
} else if (this.isOderConstraint) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());

View File

@@ -27,13 +27,12 @@ public class Variance1Case extends VarianceCase {
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
boolean oderConstraint,
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
writeLog("Max: a in " + variance + " " + a);
nextSetAsList.remove(a);
if (oderConstraint) {
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);

View File

@@ -25,7 +25,6 @@ public class Variance2Case extends VarianceCase {
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
boolean oderConstraint,
Optional<UnifyPair> optOrigPair
) {

View File

@@ -63,7 +63,6 @@ public abstract class VarianceCase {
public abstract void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
boolean oderConstraint,
Optional<UnifyPair> optOrigPair
);

View File

@@ -27,12 +27,11 @@ public class VarianceM1Case extends VarianceCase {
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
boolean oderConstraint,
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
writeLog("Min: a in " + variance + " " + a);
if (oderConstraint) {
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);