2 Commits

Author SHA1 Message Date
Fabian Holzwarth
613dceae1d feat: added Logger class, remove empty println start cleanup of computeCartesianRecursive 2025-05-23 14:12:25 +02:00
Fabian Holzwarth
81cac06e16 feat: add tool for merging many hash sets in parallel 2025-05-23 14:11:52 +02:00
7 changed files with 218 additions and 130 deletions

View File

@@ -194,7 +194,7 @@ public class UnifyTypeFactory {
&& ((PlaceholderType)lhs).isWildcardable()
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
if (lhs.getName().equals("AQ")) {
System.out.println("");
// System.out.println("");
}
((PlaceholderType)rhs).enableWildcardtable();
}
@@ -203,7 +203,7 @@ public class UnifyTypeFactory {
&& ((PlaceholderType)rhs).isWildcardable()
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
if (rhs.getName().equals("AQ")) {
System.out.println("");
// System.out.println("");
}
((PlaceholderType)lhs).enableWildcardtable();
}

View File

@@ -15,7 +15,8 @@ public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
return task.compute();
}
private static final int THRESHOLD = 3;
private static final int LIST_THRESHOLD = 3;
private static final int ELEMENT_THRESHOLD = 1000;
private final List<Set<T>> list;
private final int start;
@@ -31,8 +32,16 @@ public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
protected Set<T> compute() {
int size = end - start;
int totalElements = 0;
for (int i = start+1; i < end; i++) {
totalElements += list.get(i).size();
}
System.out.println("ConcurrentSetMerge? -> " + (size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD ? "true" : "false"));
// size will always be at least one
if (size <= THRESHOLD) {
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
Set<T> result = this.list.get(start);
for (int i = start+1; i < end; i++) {
result.addAll(list.get(i));

View File

@@ -22,6 +22,7 @@ import de.dhbwstuttgart.typeinference.unify.model.Unifier;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
import de.dhbwstuttgart.util.Logger;
import de.dhbwstuttgart.util.Pair;
import java.io.File;
import java.io.FileWriter;
@@ -657,82 +658,84 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
Set<Set<UnifyPair>> computeCartesianRecursive(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set<UnifyPair> methodSignatureConstraint) {
//oneElems: Alle 1-elementigen Mengen, die nur ein Paar
//a <. theta, theta <. a oder a =. theta enthalten
Set<Set<UnifyPair>> oneElems = new HashSet<>();
oneElems.addAll(topLevelSets.stream()
.filter(x -> x.size() == 1)
.map(y -> y.stream().findFirst().get())
.collect(Collectors.toCollection(HashSet::new)));
oneElems.forEach(x -> {
Logger.print("Start computeCartesianRecursive with " + topLevelSets.size() + " topLevelSets");
Set<Set<UnifyPair>> singleElementSets = TypeUnifyTaskHelper.getSingleElementSets(topLevelSets);
singleElementSets.forEach(x -> {
if (x instanceof Constraint)
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) x).getmethodSignatureConstraint());
});
//optNextSet: Eine mehrelementige Menge, wenn vorhanden
Optional<Set<? extends Set<UnifyPair>>> optNextSet = topLevelSets.stream().filter(x -> x.size() > 1).findAny();
if (!optNextSet.isPresent()) {//Alle Elemente sind 1-elementig
Set<Set<UnifyPair>> result = unify2(oneElems, eq, oderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint);
return result;
/*
* @var optionalAnyMultiElementSet Eine mehrelementige Menge, wenn vorhanden
*/
Optional<Set<? extends Set<UnifyPair>>> optionalAnyMultiElementSet = topLevelSets.stream().filter(x -> x.size() > 1).findAny();
if (optionalAnyMultiElementSet.isEmpty()) {
// Alle Elemente sind 1-elementig
return unify2(singleElementSets, eq, oderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint);
}
Set<? extends Set<UnifyPair>> nextSet = optNextSet.get();
/*
* @var nextSet is any set with multiple elements
*/
Set<? extends Set<UnifyPair>> nextSet = optionalAnyMultiElementSet.get();
//writeLog("nextSet: " + nextSet.toString());
List<Set<UnifyPair>> nextSetasList = new ArrayList<>(nextSet);
List<Set<UnifyPair>> nextSetAsList = new ArrayList<>(nextSet);
/*
try {
//List<Set<UnifyPair>>
//List<Set<UnifyPair>>
//nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet);
}
catch (java.lang.IllegalArgumentException e) {
System.out.print("");
}
*/
Set<Set<UnifyPair>> result = new HashSet<>();
int variance = 0;
/* Varianzbestimmung Anfang
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
// check if all elements of nextSet have the same base
boolean hasSameBase;
if (nextSetAsList.isEmpty()) {
hasSameBase = false;
}
else {
hasSameBase = true;
UnifyPair firstBasePair = null;
for (var unifyPair : nextSetAsList.getFirst().stream().toList()) {
if (firstBasePair == null) {
firstBasePair = unifyPair.getBasePair();
if (firstBasePair == null) {
hasSameBase = false;
break;
}
}
else if (unifyPair.getBasePair() == null || !unifyPair.getBasePair().equals(firstBasePair)) {
hasSameBase = false;
break;
}
}
}
/*
* oderConstraint = true => entweder existiert kein Basepair oder unterschiedliche Basepairs
*/
boolean oderConstraint = !hasSameBase;
/* Varianzbestimmung
* Varianz = 1 => Argumentvariable
* Varianz = -1 => Rückgabevariable
* Varianz = 0 => unklar
* Varianz = 2 => Operatoren oderConstraints */
ArrayList<UnifyPair> zeroNextElem = new ArrayList<>(nextSetasList.get(0));
UnifyPair fstBasePair = zeroNextElem.remove(0).getBasePair();
Boolean oderConstraint = false;
if (fstBasePair != null) {
Boolean sameBase = true;
for (UnifyPair ele : nextSetasList.get(0)) {//check ob a <. ty base oder ob Ueberladung
sameBase = sameBase && ele.getBasePair() != null && ele.getBasePair().equals(fstBasePair);
}
if (sameBase) { //angefuegt PL 2020-02-30
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
.reduce((a, b) -> {
if (a == b) return a;
else return 0;
})) //2 kommt insbesondere bei Oder-Constraints vor
.filter(d -> d.isPresent())
.map(e -> e.get())
.findAny();
if (xi.isPresent()) {
variance = xi.get();
}
} else {
oderConstraint = true;
}
} else {
oderConstraint = true;
int variance;
if (!oderConstraint) {
variance = TypeUnifyTaskHelper.calculateVariance(nextSetAsList);
}
//Varianz-Bestimmung Oder-Constraints
if (oderConstraint) {
if (printtag) System.out.println("nextSetasList " + nextSetasList);
else {
//Varianz-Bestimmung Oder-Constraints
if (printtag) {
System.out.println("nextSetasList " + nextSetAsList);
}
Optional<Integer> optVariance =
nextSetasList.iterator()
.next()
nextSetAsList
.getFirst()
.stream()
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
!(x.getRhsType() instanceof PlaceholderType) &&
@@ -749,10 +752,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
variance = optVariance.isPresent() ? optVariance.get() : 2;
}
/* Varianzbestimmung Ende */
// TODO: continue cleaning code and move some to TypeUnifyTaskHelper
//writeLog("nextSetasList: " + nextSetasList.toString());
Set<UnifyPair> nextSetElem = nextSetasList.get(0);
Set<UnifyPair> nextSetElem = nextSetAsList.get(0);
//writeLog("BasePair1: " + nextSetElem + " " + nextSetElem.iterator().next().getBasePair());
/* sameEqSet-Bestimmung: Wenn a = ty \in nextSet dann enthaelt sameEqSet
@@ -784,7 +788,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
tyVar = origPair.getRhsType();
}
UnifyType tyVarEF = tyVar;
sameEqSet = oneElems.stream().map(xx -> xx.iterator().next())
sameEqSet = singleElementSets.stream().map(xx -> xx.iterator().next())
.filter(x -> (((x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
|| (x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType)))))
.collect(Collectors.toCollection(HashSet::new));
@@ -792,8 +796,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
/* sameEqSet-Bestimmung Ende */
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyPair> a = null;
while (nextSetasList.size() > 0) {
while (nextSetAsList.size() > 0) {
Set<UnifyPair> a_last = a;
/* Liste der Faelle für die parallele Verarbeitung
@@ -811,16 +817,16 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetasList.toString());
writeLog("nextSetasList: " + nextSetAsList.toString());
if (variance == 1) {
a = oup.max(nextSetasList.iterator());
a = oup.max(nextSetAsList.iterator());
writeLog("Max: a in " + variance + " " + a);
nextSetasList.remove(a);
nextSetAsList.remove(a);
if (oderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
nextSetasListRest = new ArrayList<>(nextSetasList);
nextSetasListRest = new ArrayList<>(nextSetAsList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next();
@@ -834,14 +840,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//nur für diese wird parallele Berechnung angestossen.
nextSetasListRest = oup.maxElements(nextSetasListRest);
} else if (variance == -1) {
a = oup.min(nextSetasList.iterator());
a = oup.min(nextSetAsList.iterator());
writeLog("Min: a in " + variance + " " + a);
if (oderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
nextSetasList.remove(a);
nextSetasListRest = new ArrayList<>(nextSetasList);
nextSetAsList.remove(a);
nextSetasListRest = new ArrayList<>(nextSetAsList);
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
while (nextSetasListItRest.hasNext()) {
Set<UnifyPair> a_next = nextSetasListItRest.next();
@@ -854,27 +860,27 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//nur für diese wird parallele Berechnung angestossen.
nextSetasListRest = oup.minElements(nextSetasListRest);
} else if (variance == 2) {
a = nextSetasList.remove(0);
a = nextSetAsList.remove(0);
//Fuer alle Elemente wird parallele Berechnung angestossen.
nextSetasListRest = new ArrayList<>(nextSetasList);
nextSetasListRest = new ArrayList<>(nextSetAsList);
} else if (variance == 0) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!oderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = oup.max(nextSetasList.iterator());
a = oup.max(nextSetAsList.iterator());
} else {
a = oup.min(nextSetasList.iterator());
a = oup.min(nextSetAsList.iterator());
}
nextSetasList.remove(a);
nextSetAsList.remove(a);
} else {
if (oderConstraint) {
a = oup.max(nextSetasList.iterator());
nextSetasList.remove(a);
a = oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} else {
a = nextSetasList.remove(0);
a = nextSetAsList.remove(0);
}
}
}
@@ -889,12 +895,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(oneElems);
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(singleElementSets);
writeLog("a1: " + rekTiefe + " " + "variance: " + variance + " " + a.toString() + "\n");
//Ergebnisvariable für den aktuelle Thread
Set<Set<UnifyPair>> res = new HashSet<>();
//Menge der Ergebnisse der geforkten Threads
Set<Set<Set<UnifyPair>>> add_res = new HashSet<>();
@@ -938,7 +945,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
synchronized (this) {
nextSetasList.remove(nSaL);
nextSetAsList.remove(nSaL);
writeLog("1 RM" + nSaL.toString());
}
@@ -1030,7 +1037,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
synchronized (this) {
nextSetasList.remove(nSaL);
nextSetAsList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString());
}
@@ -1131,7 +1138,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
while (!nextSetasListRest.isEmpty()) {
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
nSaL = nextSetasListRest.remove(0);
nextSetasList.remove(nSaL); //PL einkommentiert 20-02-03
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
@@ -1207,7 +1214,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a
//PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar
System.out.println("");
// System.out.println("");
List<PlaceholderType> vars_a =
a.stream().filter(x -> ((x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
&& (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getLhsType() instanceof PlaceholderType))
@@ -1297,50 +1304,45 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
if (parallel) {
List<Set<Set<UnifyPair>>> partialResults = new ArrayList<>();
for (Set<Set<UnifyPair>> par_res : add_res) {
if (!isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
// result = par_res;
partialResults = new ArrayList<>();
partialResults.add(par_res);
result = par_res;
if (!par_res.isEmpty() && par_res.iterator().next() instanceof WildcardType) {
System.out.println("");
}
} else {
}
else {
if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result))
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RES var1 ADD:" + result.toString() + " " + par_res.toString());
// result.addAll(par_res);
partialResults.add(par_res);
result.addAll(par_res);
}
}
}
result = ConcurrentSetMergeTask.merge(partialResults);
//break;
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
if (!result.isEmpty() && (!isUndefinedPairSetSet(res) || !aParDef.isEmpty())) {
if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size() > 1)
if (nextSetAsList.iterator().hasNext() && nextSetAsList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetAsList.size() > 1)
System.out.print("");
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetasList).iterator();
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetAsList).iterator();
if (variance == 1) {
System.out.println("");
// System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (oderConstraint) {
nextSetasList.removeAll(nextSetasListOderConstraints);
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a_new, nextSetasList);
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a_new, nextSetAsList);
writeLog("smallerSetasList: " + smallerSetasList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
@@ -1353,39 +1355,39 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
writeLog("notErased: " + notErased + "\n");
erased.removeAll(notErased);
nextSetasList.removeAll(erased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetasList);
writeLog("Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = oup.smallerEqThan(a_new, nextSetasList);
nextSetasList.removeAll(erased);
List<Set<UnifyPair>> erased = oup.smallerEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetasList);
writeLog("Not Removed: " + nextSetAsList);
}
}
} else {
if (variance == -1) {
System.out.println("");
// System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (oderConstraint) {
nextSetasList.removeAll(nextSetasListOderConstraints);
nextSetAsList.removeAll(nextSetasListOderConstraints);
writeLog("Removed: " + nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> greaterSetasList = oup.greaterThan(a_new, nextSetasList);
List<Set<UnifyPair>> greaterSetasList = oup.greaterThan(a_new, nextSetAsList);
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
@@ -1412,24 +1414,24 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
erased.removeAll(notErased);
nextSetasList.removeAll(erased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetasList);
writeLog("Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = oup.greaterEqThan(a_new, nextSetasList);
List<Set<UnifyPair>> erased = oup.greaterEqThan(a_new, nextSetAsList);
nextSetasList.removeAll(erased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetasList);
writeLog("Not Removed: " + nextSetAsList);
}
}
} else {
@@ -1438,10 +1440,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if (!oderConstraint) {
break;
} else {
nextSetasList.removeAll(nextSetasListOderConstraints);
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a, nextSetasList);
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a, nextSetAsList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
@@ -1451,11 +1453,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
});
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetasList.removeAll(erased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetasList);
writeLog("Not Removed: " + nextSetAsList);
}
@@ -1503,7 +1505,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//.filter(y -> abhSubst.contains(y))
.collect(Collectors.toCollection(HashSet::new));
//Set<PlaceholderType> vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
int len = nextSetasList.size();
int len = nextSetAsList.size();
Set<UnifyPair> undefRes = res.stream().reduce((y, z) -> {
y.addAll(z);
return y;
@@ -1519,9 +1521,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if (res.size() > 1) {
System.out.println();
}
writeLog("nextSetasList vor filter-Aufruf: " + nextSetasList);
writeLog("nextSetasList vor filter-Aufruf: " + nextSetAsList);
if (!oderConstraint) {//PL 2023-02-08 eingefuegt: Bei oderconstraints sind Subststitutionen nicht als Substitutionen in idesem Sinne zu sehen
nextSetasList = nextSetasList.stream().filter(x -> {
nextSetAsList = nextSetAsList.stream().filter(x -> {
//Boolean ret = false;
//for (PlaceholderType var : vars) {
// ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get();
@@ -1530,8 +1532,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
.collect(Collectors.toCollection(ArrayList::new));
}
writeLog("nextSetasList nach filter-Aufruf: " + nextSetasList);
nofstred = nextSetasList.size();
writeLog("nextSetasList nach filter-Aufruf: " + nextSetAsList);
nofstred = nextSetAsList.size();
//NOCH NICHT korrekt PL 2018-10-12
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
// .collect(Collectors.toCollection(ArrayList::new));
@@ -1540,15 +1542,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
writeLog("a2: " + rekTiefe + " " + a.toString());
writeLog("Durchschnitt: " + durchschnitt.toString());
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetasList.toString());
writeLog("nextSetasList: " + nextSetAsList.toString());
writeLog("Number first erased Elements (undef): " + (len - nofstred));
writeLog("Number second erased Elements (undef): " + (nofstred - nextSetasList.size()));
writeLog("Number erased Elements (undef): " + (len - nextSetasList.size()));
noAllErasedElements = noAllErasedElements + (len - nextSetasList.size());
writeLog("Number second erased Elements (undef): " + (nofstred - nextSetAsList.size()));
writeLog("Number erased Elements (undef): " + (len - nextSetAsList.size()));
noAllErasedElements = noAllErasedElements + (len - nextSetAsList.size());
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
noBacktracking++;
writeLog("Number of Backtracking: " + noBacktracking);
System.out.println("");
// System.out.println("");
}
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
// return result;

View File

@@ -0,0 +1,68 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/**
* A collection of capsuled (and thus static) functions to split up large algorithms in TypeUnifyTask
*/
public class TypeUnifyTaskHelper {
/**
* Filter all topLevelSets for those with a single element that contain only one pair:
* a <. theta,
* theta <. a or
* a =. theta
*/
public static Set<Set<UnifyPair>> getSingleElementSets(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets) {
return topLevelSets.stream()
.filter(x -> x.size() == 1)
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
}
/**
* Varianzbestimmung Anfang
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
* Varianz = 1 => Argumentvariable
* Varianz = -1 => Rückgabevariable
* Varianz = 0 => unklar
* Varianz = 2 => Operatoren oderConstraints
*/
public static int calculateVariance(List<Set<UnifyPair>> nextSetasList) {
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
.reduce((a, b) -> {
if (a == b) return a;
else return 0;
})) //2 kommt insbesondere bei Oder-Constraints vor
.filter(d -> d.isPresent())
.map(e -> e.get())
.findAny();
return xi.orElse(0);
}
/**
* Find the first occurrence (if any) of a UnifyPair with operator EQUALSDOT while having
* one side equal to its base pair counterpart
*/
public static Optional<UnifyPair> findEqualityConstrainedUnifyPair(Set<UnifyPair> nextSetElement) {
return nextSetElement.stream().filter(x ->
x.getPairOp()
.equals(PairOperator.EQUALSDOT))
.filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType()
.equals(x.getBasePair().getLhsType()) ||
x.getLhsType()
.equals(x.getBasePair().getRhsType())
).findFirst();
}
}

View File

@@ -207,7 +207,7 @@ implements IFiniteClosure {
result.add(new Pair<>(t, fBounded));
}
catch (StackOverflowError e) {
System.out.println("");
// System.out.println("");
}
// if C<...> <* C<...> then ... (third case in definition of <*)
@@ -700,8 +700,8 @@ implements IFiniteClosure {
public int compare (UnifyType left, UnifyType right, PairOperator pairop) {
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {}
if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
System.out.println("");
// if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
// System.out.println("");
/*
pairop = PairOperator.SMALLERDOTWC;
List<UnifyType> al = new ArrayList<>();

View File

@@ -79,12 +79,12 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{
System.out.println("");
// System.out.println("");
}
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
{
System.out.println("");
// System.out.println("");
}
}
else {
@@ -106,11 +106,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{
System.out.println("");
// System.out.println("");
}
if (right instanceof SuperType)
{
System.out.println("");
// System.out.println("");
}
}
else {

View File

@@ -0,0 +1,9 @@
package de.dhbwstuttgart.util;
public class Logger {
public static void print(String s) {
System.out.println(s);
}
}