forked from i21017/JavaCompilerCore
Compare commits
1 Commits
feat/unify
...
dcdecd4b83
Author | SHA1 | Date | |
---|---|---|---|
|
dcdecd4b83 |
@@ -0,0 +1,11 @@
|
||||
package de.dhbwstuttgart.exceptions;
|
||||
|
||||
|
||||
/**
|
||||
* Eine Runtime Exception, die für den Fall genutzt wird, dass eine Unifikation abgebrochen wird.
|
||||
* Durch das Werfen einer Exception können Abbrüche auch aus Methodenaufrufen heraus
|
||||
* geprüft werden, da zuvor nur ein return X; stattfinden würde.
|
||||
*/
|
||||
public class UnifyCancelException extends RuntimeException {
|
||||
|
||||
}
|
@@ -0,0 +1,774 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
|
||||
import de.dhbwstuttgart.util.Pair;
|
||||
import de.dhbwstuttgart.util.Triple;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.RecursiveTask;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
class CartesianRecursiveHelper extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
// Inherited values from TypeUnifyTask
|
||||
protected Set<? extends Set<UnifyPair>> nextSet;
|
||||
protected List<Set<UnifyPair>> nextSetAsList;
|
||||
protected boolean parallel;
|
||||
protected boolean oderConstraint;
|
||||
protected int variance;
|
||||
protected Optional<UnifyPair> optOrigPair;
|
||||
protected Set<UnifyPair> methodSignatureConstraint;
|
||||
protected Set<Set<UnifyPair>> singleElementSets;
|
||||
protected int rekTiefe;
|
||||
protected Set<UnifyPair> sameEqSet;
|
||||
protected List<Set<Constraint<UnifyPair>>> oderConstraints;
|
||||
protected Set<UnifyPair> eq;
|
||||
protected IFiniteClosure fc;
|
||||
protected Writer logFile;
|
||||
protected boolean log;
|
||||
|
||||
// Custom Properties
|
||||
protected TypeUnifyTask typeUnifyTask;
|
||||
protected Set<Set<UnifyPair>> result;
|
||||
|
||||
public CartesianRecursiveHelper(TypeUnifyTask typeUnifyTask) {
|
||||
// TODO receive arguments
|
||||
}
|
||||
|
||||
protected Set<Set<UnifyPair>> run() {
|
||||
result = new HashSet<>();
|
||||
|
||||
Set<UnifyPair> a_last = null;
|
||||
while (!nextSetAsList.isEmpty()) {
|
||||
// determine value, then start as fork
|
||||
|
||||
// select the next case to work on from nextSetAsList (removing it)
|
||||
var nextCase = this.selectNextCases();
|
||||
Set<UnifyPair> a = nextCase.getValue1();
|
||||
List<Set<UnifyPair>> nextSetasListRest = nextCase.getValue2();
|
||||
List<Set<UnifyPair>> nextSetasListOderConstraints = nextCase.getValue3();
|
||||
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + nextSetAsList.toString());
|
||||
|
||||
if (oderConstraint) {
|
||||
//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
|
||||
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
|
||||
writeLog("ERSTELLUNG methodSignatureConstraint: " + methodSignatureConstraint);
|
||||
//System.out.println("ERSTELLUNG methodSignatureConstraint: " +noOfThread+" "+methodSignatureConstraint);
|
||||
//System.out.println("a: " +a);
|
||||
//System.out.println("eq: " +eq);
|
||||
//System.out.println();
|
||||
}
|
||||
|
||||
/* Wenn bei (a \in theta) \in a zu Widerspruch in oneElems wird
|
||||
* a verworfen und zu nächstem Element von nextSetasList gegangen
|
||||
*/
|
||||
if (!oderConstraint && !sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(a, sameEqSet, result)) {
|
||||
TypeUnifyTask.noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
|
||||
var innerLoopResult = innerLoop(a, a_last,nextSetasListRest, nextSetasListOderConstraints);
|
||||
if (innerLoopResult == null) {
|
||||
break;
|
||||
}
|
||||
|
||||
a_last = a;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Selects values for the next iteration in the run method:
|
||||
* - a : The element to ???
|
||||
* - nextSetAsList: The list of cases that have no relation to the selected a and will have to be worked on
|
||||
* - nextSetasListOderConstraints: The list of cases of which the receiver contains "? extends", typically one element
|
||||
*
|
||||
* @return The new a, the new nextSetAsList, the new nextSetasListOderConstraints
|
||||
*/
|
||||
protected Triple<Set<UnifyPair>, List<Set<UnifyPair>>, List<Set<UnifyPair>>> selectNextCases() {
|
||||
Set<UnifyPair> a;
|
||||
/* Liste der Faelle für die parallele Verarbeitung
|
||||
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
|
||||
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
|
||||
* Deshalb wird ihre Berechnung parallel angestossen.
|
||||
*/
|
||||
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
|
||||
|
||||
/* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
|
||||
* In der Regel ist dies genau ein Element
|
||||
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
|
||||
* gefuehrt hat.
|
||||
*/
|
||||
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
|
||||
|
||||
if (variance == 1) {
|
||||
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||
writeLog("Max: a in " + variance + " " + a);
|
||||
nextSetAsList.remove(a);
|
||||
if (oderConstraint) {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
}
|
||||
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
|
||||
|
||||
//Alle maximale Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
Set<UnifyPair> finalA = a;
|
||||
nextSetasListRest = typeUnifyTask.oup.maxElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
|
||||
);
|
||||
} else if (variance == -1) {
|
||||
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
|
||||
writeLog("Min: a in " + variance + " " + a);
|
||||
if (oderConstraint) {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
}
|
||||
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
|
||||
nextSetAsList.remove(a);
|
||||
|
||||
//Alle minimalen Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
Set<UnifyPair> finalA = a;
|
||||
nextSetasListRest = typeUnifyTask.oup.minElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
|
||||
);
|
||||
} else if (variance == 2) {
|
||||
a = nextSetAsList.removeFirst();
|
||||
|
||||
//Fuer alle Elemente wird parallele Berechnung angestossen.
|
||||
nextSetasListRest = new ArrayList<>(nextSetAsList);
|
||||
} else if (variance == 0) {
|
||||
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
|
||||
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
|
||||
if (!oderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
|
||||
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
|
||||
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||
} else {
|
||||
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
|
||||
}
|
||||
nextSetAsList.remove(a);
|
||||
} else if (oderConstraint) {
|
||||
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||
nextSetAsList.remove(a);
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
} else {
|
||||
a = nextSetAsList.removeFirst();
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new RuntimeException("Invalid variance in cartesian product calculation: " + variance);
|
||||
}
|
||||
|
||||
return new Triple<>(a, nextSetasListRest, nextSetasListOderConstraints);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private Object innerLoop(
|
||||
Set<UnifyPair> a,
|
||||
Set<UnifyPair> a_last,
|
||||
List<Set<UnifyPair>> nextSetasListRest,
|
||||
List<Set<UnifyPair>> nextSetasListOderConstraints
|
||||
) {
|
||||
Set<Set<UnifyPair>> elems = new HashSet<>(singleElementSets);
|
||||
writeLog("a1: " + rekTiefe + " " + "variance: " + variance + " " + a.toString() + "\n");
|
||||
|
||||
//Ergebnisvariable für den aktuelle Thread
|
||||
Set<Set<UnifyPair>> currentThreadResult;
|
||||
|
||||
//Menge der Ergebnisse der geforkten Threads
|
||||
Set<Set<Set<UnifyPair>>> forkResults = new HashSet<>();
|
||||
|
||||
Set<Set<UnifyPair>> aParDef = new HashSet<>();
|
||||
|
||||
/* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
|
||||
* gestartet, der parallel weiterarbeitet.
|
||||
*/
|
||||
if (parallel && (variance == 1)) {
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, methodSignatureConstraint);
|
||||
//forks.add(forkOrig);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("1 RM" + nSaL.toString());
|
||||
|
||||
if (!oderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
nSaL = null;
|
||||
typeUnifyTask.noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
forks.add(fork);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
currentThreadResult = forkOrig.compute();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
//forkResults.add(fork_res);;
|
||||
/* FORK ENDE */
|
||||
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
forkResults.add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 1");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else if (parallel && (variance == -1)) {
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
//forks.add(forkOrig);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("-1 RM" + nSaL.toString());
|
||||
|
||||
if (!oderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
nSaL = null;
|
||||
typeUnifyTask.noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
forks.add(fork);
|
||||
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
currentThreadResult = forkOrig.compute();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig -1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
//forkResults.add(fork_res);
|
||||
/* FORK ENDE */
|
||||
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
//noOfThread++;
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
forkResults.add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final -1");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else if (parallel && (variance == 2)) {
|
||||
writeLog("var2einstieg");
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
//forks.add(forkOrig);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
|
||||
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
|
||||
Set<UnifyPair> methodSignatureConstraintForParallel = new HashSet<>(methodSignatureConstraint);
|
||||
Set<UnifyPair> nSaL = a;
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
|
||||
nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
|
||||
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, new HashSet<>(methodSignatureConstraintForParallel));
|
||||
forks.add(fork);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
currentThreadResult = forkOrig.compute();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 2");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
//forkResults.add(fork_res); //vermutlich falsch
|
||||
/* FORK ENDE */
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
forkResults.add(fork_res);
|
||||
fork.writeLog("final 2");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else {
|
||||
//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
|
||||
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
||||
currentThreadResult = typeUnifyTask.unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
}
|
||||
|
||||
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
|
||||
if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden
|
||||
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
|
||||
//System.out.println("REMOVE: " +methodSignatureConstraint);
|
||||
}
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) && typeUnifyTask.isUndefinedPairSetSet(result)) {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
synchronized (result) {
|
||||
result.clear();
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
} else {
|
||||
if ((typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) && typeUnifyTask.isUndefinedPairSetSet(result))
|
||||
|| (!typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) && !typeUnifyTask.isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
|
||||
if ((!result.isEmpty() && !currentThreadResult.isEmpty() && !typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) && !typeUnifyTask.isUndefinedPairSetSet(result)) //korrekte Loesungen aus und-constraints
|
||||
&& (a.stream().map(x -> (x.getBasePair() != null)).reduce(true, (x, y) -> (x && y)))) //bei oder-Constraints nicht ausfuehren
|
||||
{
|
||||
//TODO: PL 2019-01-15: Bug 129: Im Moment wird nur das Maximum und das Minimum des aktuellen Elements betrachtet.
|
||||
//Die zu vereinigenden Mengen können mehrere Elemente enthalten. Das ist bisher nicht berücksichtigt
|
||||
|
||||
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a
|
||||
//PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar
|
||||
// System.out.println("");
|
||||
List<PlaceholderType> vars_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a);
|
||||
Set<UnifyPair> fstElemRes = currentThreadResult.iterator().next();
|
||||
Set<UnifyPair> compRes = fstElemRes.stream().filter(x -> vars_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
|
||||
//System.out.println(a_last);
|
||||
|
||||
try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen
|
||||
a_last.forEach(x -> {
|
||||
writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());
|
||||
});//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
|
||||
List<PlaceholderType> varsLast_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a_last);
|
||||
//[(java.util.Vector<java.lang.Integer> <. gen_aq, , 1), (CEK =. ? extends gen_aq, 1)] KANN VORKOMMEN
|
||||
//erstes Element genügt, da vars immer auf die gleichen Elemente zugeordnet werden muessen
|
||||
Set<UnifyPair> fstElemResult = result.iterator().next();
|
||||
Set<UnifyPair> compResult = fstElemResult.stream().filter(x -> varsLast_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
if (variance == 1) {
|
||||
writeLog("a_last:" + a_last + " a: " + a);
|
||||
writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
|
||||
writeLog("compResult:" + compResult + " compRes: " + compRes);
|
||||
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
|
||||
if (resOfCompare == -1) {
|
||||
writeLog("Geloescht result: " + result);
|
||||
synchronized (result) {
|
||||
result.clear();
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
} else {
|
||||
if (resOfCompare == 0) {
|
||||
result.addAll(currentThreadResult);
|
||||
} //else {
|
||||
if (resOfCompare == 1) {
|
||||
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
//result = result;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (variance == -1) {
|
||||
writeLog("a_last:" + a_last + " a: " + a);
|
||||
writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
|
||||
writeLog("compResult:" + compResult + " compRes: " + compRes);
|
||||
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
|
||||
if (resOfCompare == 1) {
|
||||
writeLog("Geloescht result: " + result);
|
||||
synchronized (result) {
|
||||
result.clear();
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
} else {
|
||||
if (resOfCompare == 0) {
|
||||
result.addAll(currentThreadResult);
|
||||
} else {
|
||||
if (resOfCompare == -1) {
|
||||
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
//result = result;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (variance == 0) {
|
||||
writeLog("RES var=1 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (NullPointerException e) {
|
||||
writeLog("NullPointerException: " + a_last.toString());
|
||||
}
|
||||
} else {
|
||||
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||
writeLog("RES Fst: result: " + result.toString() + " currentThreadResult: " + currentThreadResult.toString());
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
}
|
||||
//else {
|
||||
//wenn Korrekte Ergebnisse da und Feherfälle dazukommen Fehlerfälle ignorieren
|
||||
// if (isUndefinedPairSetSet(currentThreadResult) && !isUndefinedPairSetSet(result)) {
|
||||
// result = result;
|
||||
// }
|
||||
//}
|
||||
}
|
||||
|
||||
if (parallel) {
|
||||
for (Set<Set<UnifyPair>> par_res : forkResults) {
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(par_res) && typeUnifyTask.isUndefinedPairSetSet(result)) {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
synchronized (result) {
|
||||
result.clear();
|
||||
result.addAll(par_res);
|
||||
}
|
||||
if (!par_res.isEmpty() && par_res.iterator().next() instanceof WildcardType) {
|
||||
// System.out.println();
|
||||
}
|
||||
} else {
|
||||
if ((typeUnifyTask.isUndefinedPairSetSet(par_res) && typeUnifyTask.isUndefinedPairSetSet(result))
|
||||
|| (!typeUnifyTask.isUndefinedPairSetSet(par_res) && !typeUnifyTask.isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||
writeLog("RES var1 ADD:" + result.toString() + " " + par_res.toString());
|
||||
result.addAll(par_res);
|
||||
}
|
||||
}
|
||||
}
|
||||
//break;
|
||||
}
|
||||
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
|
||||
if (!result.isEmpty() && (!typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) || !aParDef.isEmpty())) {
|
||||
if (nextSetAsList.iterator().hasNext()
|
||||
&& nextSetAsList.getFirst().stream().anyMatch(x -> x.getLhsType().getName().equals("B"))
|
||||
&& nextSetAsList.size() > 1) {
|
||||
// System.out.print("");
|
||||
}
|
||||
|
||||
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
|
||||
if (variance == 1) {
|
||||
// System.out.println("");
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
writeLog("aParDef: " + aParDef.toString());
|
||||
aParDef.add(a);
|
||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||
if (oderConstraint) {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
while (aParDefIt.hasNext()) {
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
|
||||
writeLog("smallerSetasList: " + smallerSetasList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
writeLog("notInherited: " + notInherited + "\n");
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
notInherited.forEach(x -> {
|
||||
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
|
||||
});
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
writeLog("notErased: " + notErased + "\n");
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
} else {
|
||||
while (aParDefIt.hasNext()) {
|
||||
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
}
|
||||
} else if (variance == -1) {
|
||||
// System.out.println("");
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
writeLog("aParDef: " + aParDef.toString());
|
||||
aParDef.add(a);
|
||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||
if (oderConstraint) {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
while (aParDefIt.hasNext()) {
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> greaterSetasList = typeUnifyTask.oup.greaterThan(a_new, nextSetAsList);
|
||||
|
||||
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
|
||||
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
|
||||
greaterSetasList.add(a_new);
|
||||
}
|
||||
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
|
||||
//Wenn x nicht vererbt ist, beginnt beim naechstgroesseren Element die naechste Ueberladung
|
||||
notInherited.forEach(x -> {
|
||||
notErased.addAll(typeUnifyTask.oup.greaterEqThan(x, greaterSetasList));
|
||||
});
|
||||
|
||||
//das kleineste Element ist das Element von dem a_new geerbt hat
|
||||
//muss deshalb geloescht werden
|
||||
Iterator<Set<UnifyPair>> notErasedIt = notErased.iterator();
|
||||
if (notErasedIt.hasNext()) {
|
||||
Set<UnifyPair> min = typeUnifyTask.oup.min(notErasedIt);
|
||||
notErased.remove(min);
|
||||
notErased.remove(((Constraint<UnifyPair>) min).getExtendConstraint());
|
||||
}
|
||||
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
} else {
|
||||
while (aParDefIt.hasNext()) {
|
||||
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> erased = typeUnifyTask.oup.greaterEqThan(a_new, nextSetAsList);
|
||||
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
}
|
||||
} else if (variance == 0) {
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
if (!oderConstraint) {
|
||||
return null;
|
||||
} else {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
}
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
|
||||
|
||||
if (typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) && aParDef.isEmpty()) {
|
||||
int nofstred = 0;
|
||||
Set<UnifyPair> abhSubst = TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getAllSubstitutions);
|
||||
abhSubst.addAll(
|
||||
TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getThisAndAllBases)
|
||||
);
|
||||
|
||||
Set<UnifyPair> durchschnitt = abhSubst.stream()
|
||||
.filter(a::contains)
|
||||
//.filter(y -> abhSubst.contains(y))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
//Set<PlaceholderType> vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
|
||||
int len = nextSetAsList.size();
|
||||
Set<UnifyPair> undefRes = currentThreadResult.stream().reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
}).get(); //flatten aller undef results
|
||||
|
||||
/*
|
||||
Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream()
|
||||
.map(x -> {
|
||||
Set<UnifyPair> su = x.getAllSubstitutions(); //alle benutzten Substitutionen
|
||||
su.add(x.getGroundBasePair()); // urspruengliches Paar
|
||||
su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen
|
||||
return new Pair<>(su, x.getGroundBasePair());
|
||||
})
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
*/
|
||||
|
||||
if (currentThreadResult.size() > 1) {
|
||||
// System.out.println();
|
||||
}
|
||||
writeLog("nextSetasList vor filter-Aufruf: " + nextSetAsList);
|
||||
if (!oderConstraint) {//PL 2023-02-08 eingefuegt: Bei oderconstraints sind Subststitutionen nicht als Substitutionen in idesem Sinne zu sehen
|
||||
nextSetAsList = nextSetAsList.stream().filter(x -> {
|
||||
//Boolean ret = false;
|
||||
//for (PlaceholderType var : vars) {
|
||||
// ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get();
|
||||
//}
|
||||
return (!x.containsAll(durchschnitt));
|
||||
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
}
|
||||
writeLog("nextSetasList nach filter-Aufruf: " + nextSetAsList);
|
||||
nofstred = nextSetAsList.size();
|
||||
//NOCH NICHT korrekt PL 2018-10-12
|
||||
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
|
||||
// .collect(Collectors.toCollection(ArrayList::new));
|
||||
writeLog("currentThreadResult (undef): " + currentThreadResult.toString());
|
||||
writeLog("abhSubst: " + abhSubst.toString());
|
||||
writeLog("a2: " + rekTiefe + " " + a.toString());
|
||||
writeLog("Durchschnitt: " + durchschnitt.toString());
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + nextSetAsList.toString());
|
||||
writeLog("Number first erased Elements (undef): " + (len - nofstred));
|
||||
writeLog("Number second erased Elements (undef): " + (nofstred - nextSetAsList.size()));
|
||||
writeLog("Number erased Elements (undef): " + (len - nextSetAsList.size()));
|
||||
typeUnifyTask.noAllErasedElements += (len - nextSetAsList.size());
|
||||
writeLog("Number of all erased Elements (undef): " + typeUnifyTask.noAllErasedElements.toString());
|
||||
typeUnifyTask.noBacktracking++;
|
||||
writeLog("Number of Backtracking: " + typeUnifyTask.noBacktracking);
|
||||
// System.out.println("");
|
||||
}
|
||||
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
|
||||
// return result;
|
||||
//}
|
||||
//else {
|
||||
// result.removeIf(y -> isUndefinedPairSet(y));
|
||||
//}
|
||||
//else result.stream().filter(y -> !isUndefinedPairSet(y));
|
||||
writeLog("currentThreadResult: " + currentThreadResult.toString());
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
protected void writeLog(String s) {
|
||||
|
||||
}
|
||||
}
|
@@ -2,6 +2,7 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.TypeinferenceException;
|
||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
@@ -726,666 +727,18 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
|
||||
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
|
||||
Set<UnifyPair> a = null;
|
||||
while (!nextSetAsList.isEmpty()) {
|
||||
Set<UnifyPair> a_last = a;
|
||||
|
||||
/* Liste der Faelle für die parallele Verarbeitung
|
||||
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
|
||||
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
|
||||
* Deshalb wird ihre Berechnung parallel angestossen.
|
||||
*/
|
||||
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
|
||||
|
||||
/* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
|
||||
* In der Regel ist dies genau ein Element
|
||||
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
|
||||
* gefuehrt hat.
|
||||
*/
|
||||
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
|
||||
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + nextSetAsList.toString());
|
||||
if (variance == 1) {
|
||||
a = oup.max(nextSetAsList.iterator());
|
||||
writeLog("Max: a in " + variance + " " + a);
|
||||
nextSetAsList.remove(a);
|
||||
if (oderConstraint) {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
}
|
||||
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
|
||||
|
||||
//Alle maximale Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
Set<UnifyPair> finalA = a;
|
||||
nextSetasListRest = oup.maxElements(
|
||||
nextSetAsList.stream().filter(a_next -> oup.compare(finalA, a_next) != 1).toList()
|
||||
);
|
||||
} else if (variance == -1) {
|
||||
a = oup.min(nextSetAsList.iterator());
|
||||
writeLog("Min: a in " + variance + " " + a);
|
||||
if (oderConstraint) {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
}
|
||||
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
|
||||
nextSetAsList.remove(a);
|
||||
|
||||
//Alle minimalen Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
Set<UnifyPair> finalA = a;
|
||||
nextSetasListRest = oup.minElements(
|
||||
nextSetAsList.stream().filter(a_next -> oup.compare(finalA, a_next) != -1).toList()
|
||||
);
|
||||
} else if (variance == 2) {
|
||||
a = nextSetAsList.removeFirst();
|
||||
|
||||
//Fuer alle Elemente wird parallele Berechnung angestossen.
|
||||
nextSetasListRest = new ArrayList<>(nextSetAsList);
|
||||
} else if (variance == 0) {
|
||||
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
|
||||
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
|
||||
if (!oderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
|
||||
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
|
||||
a = oup.max(nextSetAsList.iterator());
|
||||
} else {
|
||||
a = oup.min(nextSetAsList.iterator());
|
||||
}
|
||||
nextSetAsList.remove(a);
|
||||
} else if (oderConstraint) {
|
||||
a = oup.max(nextSetAsList.iterator());
|
||||
nextSetAsList.remove(a);
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
} else {
|
||||
a = nextSetAsList.removeFirst();
|
||||
}
|
||||
}
|
||||
|
||||
if (oderConstraint) {//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
|
||||
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
|
||||
writeLog("ERSTELLUNG methodSignatureConstraint: " + methodSignatureConstraint);
|
||||
//System.out.println("ERSTELLUNG methodSignatureConstraint: " +noOfThread+" "+methodSignatureConstraint);
|
||||
//System.out.println("a: " +a);
|
||||
//System.out.println("eq: " +eq);
|
||||
//System.out.println();
|
||||
}
|
||||
|
||||
i++;
|
||||
Set<Set<UnifyPair>> elems = new HashSet<>(singleElementSets);
|
||||
writeLog("a1: " + rekTiefe + " " + "variance: " + variance + " " + a.toString() + "\n");
|
||||
|
||||
//Ergebnisvariable für den aktuelle Thread
|
||||
Set<Set<UnifyPair>> currentThreadResult;
|
||||
|
||||
//Menge der Ergebnisse der geforkten Threads
|
||||
Set<Set<Set<UnifyPair>>> forkResults = new HashSet<>();
|
||||
|
||||
Set<Set<UnifyPair>> aParDef = new HashSet<>();
|
||||
|
||||
/* Wenn bei (a \in theta) \in a zu Widerspruch in oneElems wird
|
||||
* a verworfen und zu nächstem Element von nextSetasList gegangen
|
||||
*/
|
||||
if (!oderConstraint && !sameEqSet.isEmpty() && !checkNoContradiction(a, sameEqSet, result)) {
|
||||
a = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
|
||||
* gestartet, der parallel weiterarbeitet.
|
||||
*/
|
||||
if (parallel && (variance == 1)) {
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
|
||||
//forks.add(forkOrig);
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("1 RM" + nSaL.toString());
|
||||
|
||||
if (!oderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
nSaL = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
forks.add(fork);
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
currentThreadResult = forkOrig.compute();
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
//forkResults.add(fork_res);;
|
||||
/* FORK ENDE */
|
||||
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((isUndefinedPairSetSet(fork_res))).toString());
|
||||
forkResults.add(fork_res);
|
||||
if (!isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 1");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else if (parallel && (variance == -1)) {
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
//forks.add(forkOrig);
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("-1 RM" + nSaL.toString());
|
||||
|
||||
if (!oderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
nSaL = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
forks.add(fork);
|
||||
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
currentThreadResult = forkOrig.compute();
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig -1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
//forkResults.add(fork_res);
|
||||
/* FORK ENDE */
|
||||
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
//noOfThread++;
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((isUndefinedPairSetSet(fork_res))).toString());
|
||||
forkResults.add(fork_res);
|
||||
if (!isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final -1");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else if (parallel && (variance == 2)) {
|
||||
writeLog("var2einstieg");
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
//forks.add(forkOrig);
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
|
||||
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
|
||||
Set<UnifyPair> methodSignatureConstraintForParallel = new HashSet<>(methodSignatureConstraint);
|
||||
Set<UnifyPair> nSaL = a;
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
|
||||
nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
|
||||
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraintForParallel));
|
||||
forks.add(fork);
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
currentThreadResult = forkOrig.compute();
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 2");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
//forkResults.add(fork_res); //vermutlich falsch
|
||||
/* FORK ENDE */
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
forkResults.add(fork_res);
|
||||
fork.writeLog("final 2");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else {
|
||||
//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
|
||||
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
||||
currentThreadResult = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
}
|
||||
|
||||
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
|
||||
if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden
|
||||
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
|
||||
//System.out.println("REMOVE: " +methodSignatureConstraint);
|
||||
}
|
||||
if (!isUndefinedPairSetSet(currentThreadResult) && isUndefinedPairSetSet(result)) {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
result = currentThreadResult;
|
||||
} else {
|
||||
if ((isUndefinedPairSetSet(currentThreadResult) && isUndefinedPairSetSet(result))
|
||||
|| (!isUndefinedPairSetSet(currentThreadResult) && !isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
|
||||
if ((!result.isEmpty() && !currentThreadResult.isEmpty() && !isUndefinedPairSetSet(currentThreadResult) && !isUndefinedPairSetSet(result)) //korrekte Loesungen aus und-constraints
|
||||
&& (a.stream().map(x -> (x.getBasePair() != null)).reduce(true, (x, y) -> (x && y)))) //bei oder-Constraints nicht ausfuehren
|
||||
{
|
||||
//TODO: PL 2019-01-15: Bug 129: Im Moment wird nur das Maximum und das Minimum des aktuellen Elements betrachtet.
|
||||
//Die zu vereinigenden Mengen können mehrere Elemente enthalten. Das ist bisher nicht berücksichtigt
|
||||
|
||||
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a
|
||||
//PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar
|
||||
// System.out.println("");
|
||||
List<PlaceholderType> vars_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a);
|
||||
Set<UnifyPair> fstElemRes = currentThreadResult.iterator().next();
|
||||
Set<UnifyPair> compRes = fstElemRes.stream().filter(x -> vars_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
|
||||
//System.out.println(a_last);
|
||||
|
||||
try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen
|
||||
a_last.forEach(x -> {
|
||||
writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());
|
||||
});//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
|
||||
List<PlaceholderType> varsLast_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a_last);
|
||||
//[(java.util.Vector<java.lang.Integer> <. gen_aq, , 1), (CEK =. ? extends gen_aq, 1)] KANN VORKOMMEN
|
||||
//erstes Element genügt, da vars immer auf die gleichen Elemente zugeordnet werden muessen
|
||||
Set<UnifyPair> fstElemResult = result.iterator().next();
|
||||
Set<UnifyPair> compResult = fstElemResult.stream().filter(x -> varsLast_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
if (variance == 1) {
|
||||
writeLog("a_last:" + a_last + " a: " + a);
|
||||
writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
|
||||
writeLog("compResult:" + compResult + " compRes: " + compRes);
|
||||
int resOfCompare = oup.compare(compResult, compRes);
|
||||
if (resOfCompare == -1) {
|
||||
writeLog("Geloescht result: " + result);
|
||||
result = currentThreadResult;
|
||||
} else {
|
||||
if (resOfCompare == 0) {
|
||||
result.addAll(currentThreadResult);
|
||||
} //else {
|
||||
if (resOfCompare == 1) {
|
||||
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
//result = result;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (variance == -1) {
|
||||
writeLog("a_last:" + a_last + " a: " + a);
|
||||
writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
|
||||
writeLog("compResult:" + compResult + " compRes: " + compRes);
|
||||
int resOfCompare = oup.compare(compResult, compRes);
|
||||
if (resOfCompare == 1) {
|
||||
writeLog("Geloescht result: " + result);
|
||||
result = currentThreadResult;
|
||||
} else {
|
||||
if (resOfCompare == 0) {
|
||||
result.addAll(currentThreadResult);
|
||||
} else {
|
||||
if (resOfCompare == -1) {
|
||||
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
//result = result;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (variance == 0) {
|
||||
writeLog("RES var=1 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (NullPointerException e) {
|
||||
writeLog("NullPointerException: " + a_last.toString());
|
||||
}
|
||||
} else {
|
||||
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||
writeLog("RES Fst: result: " + result.toString() + " currentThreadResult: " + currentThreadResult.toString());
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
}
|
||||
//else {
|
||||
//wenn Korrekte Ergebnisse da und Feherfälle dazukommen Fehlerfälle ignorieren
|
||||
// if (isUndefinedPairSetSet(currentThreadResult) && !isUndefinedPairSetSet(result)) {
|
||||
// result = result;
|
||||
// }
|
||||
//}
|
||||
}
|
||||
|
||||
if (parallel) {
|
||||
for (Set<Set<UnifyPair>> par_res : forkResults) {
|
||||
if (!isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
result = par_res;
|
||||
if (!par_res.isEmpty() && par_res.iterator().next() instanceof WildcardType) {
|
||||
// System.out.println();
|
||||
}
|
||||
} else {
|
||||
if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result))
|
||||
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||
writeLog("RES var1 ADD:" + result.toString() + " " + par_res.toString());
|
||||
result.addAll(par_res);
|
||||
}
|
||||
}
|
||||
}
|
||||
//break;
|
||||
}
|
||||
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
|
||||
if (!result.isEmpty() && (!isUndefinedPairSetSet(currentThreadResult) || !aParDef.isEmpty())) {
|
||||
if (nextSetAsList.iterator().hasNext()
|
||||
&& nextSetAsList.getFirst().stream().anyMatch(x -> x.getLhsType().getName().equals("B"))
|
||||
&& nextSetAsList.size() > 1) {
|
||||
// System.out.print("");
|
||||
}
|
||||
|
||||
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
|
||||
if (variance == 1) {
|
||||
// System.out.println("");
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
writeLog("aParDef: " + aParDef.toString());
|
||||
aParDef.add(a);
|
||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||
if (oderConstraint) {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
while (aParDefIt.hasNext()) {
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a_new, nextSetAsList);
|
||||
writeLog("smallerSetasList: " + smallerSetasList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
writeLog("notInherited: " + notInherited + "\n");
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
notInherited.forEach(x -> {
|
||||
notErased.addAll(oup.smallerEqThan(x, smallerSetasList));
|
||||
});
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
writeLog("notErased: " + notErased + "\n");
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
} else {
|
||||
while (aParDefIt.hasNext()) {
|
||||
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> erased = oup.smallerEqThan(a_new, nextSetAsList);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
}
|
||||
} else if (variance == -1) {
|
||||
// System.out.println("");
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
writeLog("aParDef: " + aParDef.toString());
|
||||
aParDef.add(a);
|
||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||
if (oderConstraint) {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
while (aParDefIt.hasNext()) {
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> greaterSetasList = oup.greaterThan(a_new, nextSetAsList);
|
||||
|
||||
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
|
||||
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
|
||||
greaterSetasList.add(a_new);
|
||||
}
|
||||
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
|
||||
//Wenn x nicht vererbt ist, beginnt beim naechstgroesseren Element die naechste Ueberladung
|
||||
notInherited.forEach(x -> {
|
||||
notErased.addAll(oup.greaterEqThan(x, greaterSetasList));
|
||||
});
|
||||
|
||||
//das kleineste Element ist das Element von dem a_new geerbt hat
|
||||
//muss deshalb geloescht werden
|
||||
Iterator<Set<UnifyPair>> notErasedIt = notErased.iterator();
|
||||
if (notErasedIt.hasNext()) {
|
||||
Set<UnifyPair> min = oup.min(notErasedIt);
|
||||
notErased.remove(min);
|
||||
notErased.remove(((Constraint<UnifyPair>) min).getExtendConstraint());
|
||||
}
|
||||
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
} else {
|
||||
while (aParDefIt.hasNext()) {
|
||||
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> erased = oup.greaterEqThan(a_new, nextSetAsList);
|
||||
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
}
|
||||
} else if (variance == 0) {
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
if (!oderConstraint) {
|
||||
break;
|
||||
} else {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a, nextSetAsList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
notInherited.forEach(x -> notErased.addAll(oup.smallerEqThan(x, smallerSetasList)));
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
}
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
|
||||
|
||||
if (isUndefinedPairSetSet(currentThreadResult) && aParDef.isEmpty()) {
|
||||
int nofstred = 0;
|
||||
Set<UnifyPair> abhSubst = TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getAllSubstitutions);
|
||||
abhSubst.addAll(
|
||||
TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getThisAndAllBases)
|
||||
);
|
||||
|
||||
Set<UnifyPair> durchschnitt = abhSubst.stream()
|
||||
.filter(a::contains)
|
||||
//.filter(y -> abhSubst.contains(y))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
//Set<PlaceholderType> vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
|
||||
int len = nextSetAsList.size();
|
||||
Set<UnifyPair> undefRes = currentThreadResult.stream().reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
}).get(); //flatten aller undef results
|
||||
|
||||
/*
|
||||
Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream()
|
||||
.map(x -> {
|
||||
Set<UnifyPair> su = x.getAllSubstitutions(); //alle benutzten Substitutionen
|
||||
su.add(x.getGroundBasePair()); // urspruengliches Paar
|
||||
su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen
|
||||
return new Pair<>(su, x.getGroundBasePair());
|
||||
})
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
*/
|
||||
|
||||
if (currentThreadResult.size() > 1) {
|
||||
// System.out.println();
|
||||
}
|
||||
writeLog("nextSetasList vor filter-Aufruf: " + nextSetAsList);
|
||||
if (!oderConstraint) {//PL 2023-02-08 eingefuegt: Bei oderconstraints sind Subststitutionen nicht als Substitutionen in idesem Sinne zu sehen
|
||||
nextSetAsList = nextSetAsList.stream().filter(x -> {
|
||||
//Boolean ret = false;
|
||||
//for (PlaceholderType var : vars) {
|
||||
// ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get();
|
||||
//}
|
||||
return (!x.containsAll(durchschnitt));
|
||||
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
}
|
||||
writeLog("nextSetasList nach filter-Aufruf: " + nextSetAsList);
|
||||
nofstred = nextSetAsList.size();
|
||||
//NOCH NICHT korrekt PL 2018-10-12
|
||||
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
|
||||
// .collect(Collectors.toCollection(ArrayList::new));
|
||||
writeLog("currentThreadResult (undef): " + currentThreadResult.toString());
|
||||
writeLog("abhSubst: " + abhSubst.toString());
|
||||
writeLog("a2: " + rekTiefe + " " + a.toString());
|
||||
writeLog("Durchschnitt: " + durchschnitt.toString());
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + nextSetAsList.toString());
|
||||
writeLog("Number first erased Elements (undef): " + (len - nofstred));
|
||||
writeLog("Number second erased Elements (undef): " + (nofstred - nextSetAsList.size()));
|
||||
writeLog("Number erased Elements (undef): " + (len - nextSetAsList.size()));
|
||||
noAllErasedElements += (len - nextSetAsList.size());
|
||||
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
|
||||
noBacktracking++;
|
||||
writeLog("Number of Backtracking: " + noBacktracking);
|
||||
// System.out.println("");
|
||||
}
|
||||
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
|
||||
// return result;
|
||||
//}
|
||||
//else {
|
||||
// result.removeIf(y -> isUndefinedPairSet(y));
|
||||
//}
|
||||
//else result.stream().filter(y -> !isUndefinedPairSet(y));
|
||||
writeLog("currentThreadResult: " + currentThreadResult.toString());
|
||||
Set<Set<UnifyPair>> result;
|
||||
try {
|
||||
CartesianRecursiveHelper cartesianRecursiveHelper = new CartesianRecursiveHelper(
|
||||
this
|
||||
);
|
||||
result = cartesianRecursiveHelper.run();
|
||||
}
|
||||
catch (UnifyCancelException cancelException) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
|
||||
|
||||
//2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen
|
||||
writeLog("Return computeCR: " + result.toString());
|
||||
return result;
|
||||
|
@@ -15,14 +15,12 @@ public abstract class OrderingExtend<T> extends com.google.common.collect.Orderi
|
||||
|
||||
T max = max(iterable);
|
||||
ret.add(max);
|
||||
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T elem = it.next();
|
||||
if (!(compare(max, elem) == 1) && !max.equals(elem)) {
|
||||
believe.add(elem);
|
||||
}
|
||||
}
|
||||
|
||||
for (T elem : iterable) {
|
||||
if (!(compare(max, elem) == 1) && !max.equals(elem)) {
|
||||
believe.add(elem);
|
||||
}
|
||||
}
|
||||
iterable = believe;
|
||||
}
|
||||
return ret;
|
||||
@@ -35,14 +33,12 @@ public abstract class OrderingExtend<T> extends com.google.common.collect.Orderi
|
||||
|
||||
T min = min(iterable);
|
||||
ret.add(min);
|
||||
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T elem = it.next();
|
||||
if (!(compare(min, elem) == -1) && !min.equals(elem)) {
|
||||
believe.add(elem);
|
||||
}
|
||||
}
|
||||
|
||||
for (T elem : iterable) {
|
||||
if (!(compare(min, elem) == -1) && !min.equals(elem)) {
|
||||
believe.add(elem);
|
||||
}
|
||||
}
|
||||
iterable = believe;
|
||||
}
|
||||
return ret;
|
||||
@@ -58,13 +54,11 @@ public abstract class OrderingExtend<T> extends com.google.common.collect.Orderi
|
||||
|
||||
public List<T> smallerThan(T elem, Iterable<T> iterable) {
|
||||
ArrayList<T> ret = new ArrayList<>();
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T itElem = it.next();
|
||||
if (!itElem.equals(elem) && compare(elem, itElem) == 1) {
|
||||
ret.add(itElem);
|
||||
}
|
||||
}
|
||||
for (T itElem : iterable) {
|
||||
if (!itElem.equals(elem) && compare(elem, itElem) == 1) {
|
||||
ret.add(itElem);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
@@ -72,18 +66,15 @@ public abstract class OrderingExtend<T> extends com.google.common.collect.Orderi
|
||||
List<T> ret = greaterThan(elem, iterable);
|
||||
ret.add(elem);
|
||||
return ret;
|
||||
|
||||
}
|
||||
|
||||
public List<T> greaterThan(T elem, Iterable<T> iterable) {
|
||||
ArrayList<T> ret = new ArrayList<>();
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T itElem = it.next();
|
||||
if (!itElem.equals(elem) && (compare(elem, itElem) == -1)) {
|
||||
ret.add(itElem);
|
||||
}
|
||||
}
|
||||
for (T itElem : iterable) {
|
||||
if (!itElem.equals(elem) && (compare(elem, itElem) == -1)) {
|
||||
ret.add(itElem);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
50
src/main/java/de/dhbwstuttgart/util/Triple.java
Normal file
50
src/main/java/de/dhbwstuttgart/util/Triple.java
Normal file
@@ -0,0 +1,50 @@
|
||||
package de.dhbwstuttgart.util;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class Triple<T1, T2, T3> {
|
||||
private final T1 value1;
|
||||
private final T2 value2;
|
||||
private final T3 value3;
|
||||
|
||||
|
||||
public Triple(T1 value1, T2 value2, T3 value3) {
|
||||
this.value1 = value1;
|
||||
this.value2 = value2;
|
||||
this.value3 = value3;
|
||||
}
|
||||
|
||||
public T1 getValue1() {
|
||||
return value1;
|
||||
}
|
||||
|
||||
public T2 getValue2() {
|
||||
return value2;
|
||||
}
|
||||
|
||||
public T3 getValue3() {
|
||||
return value3;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "(" + value1.toString() + "," + value2.toString() + "," + value3.toString() + ")\n";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
|
||||
Triple<?, ?, ?> oTriple = (Triple<?, ?, ?>) o;
|
||||
return Objects.equals(value1, oTriple.value1)
|
||||
&& Objects.equals(value2, oTriple.value2)
|
||||
&& Objects.equals(value3, oTriple.value3);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(value1, value2, value3);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user