Merge branch 'unify-test' of ssh://gohorb.ba-horb.de/bahome/projekt/git/JavaCompilerCore into bigRefactoring

This commit is contained in:
Martin Plümicke 2019-03-20 16:30:21 +01:00
commit 4ba4292db4
8 changed files with 331 additions and 274 deletions

View File

@ -55,7 +55,7 @@ import org.antlr.v4.parse.ANTLRParser.throwsSpec_return;
public class JavaTXCompiler { public class JavaTXCompiler {
final CompilationEnvironment environment; final CompilationEnvironment environment;
Boolean resultmodel = true; Boolean resultmodel = false;
public final Map<File, SourceFile> sourceFiles = new HashMap<>(); public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"src/test/java/logFiles" geschrieben werden soll? Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"src/test/java/logFiles" geschrieben werden soll?

View File

@ -1,6 +1,7 @@
package de.dhbwstuttgart.typeinference.unify; package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer; import java.io.Writer;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@ -30,6 +31,13 @@ public class TypeUnify {
ForkJoinPool pool = new ForkJoinPool(); ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask); pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join(); Set<Set<UnifyPair>> res = unifyTask.join();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return res; return res;
} }
@ -67,6 +75,13 @@ public class TypeUnify {
ForkJoinPool pool = new ForkJoinPool(); ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask); pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join(); Set<Set<UnifyPair>> res = unifyTask.join();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return ret; return ret;
} }
@ -91,6 +106,13 @@ public class TypeUnify {
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) { public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret);
Set<Set<UnifyPair>> res = unifyTask.compute(); Set<Set<UnifyPair>> res = unifyTask.compute();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return res; return res;
} }

View File

@ -33,7 +33,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
System.out.println("two"); System.out.println("two");
} }
one = true; one = true;
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField); Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, true);
/*if (isUndefinedPairSetSet(res)) { /*if (isUndefinedPairSetSet(res)) {
return new HashSet<>(); } return new HashSet<>(); }
else else

View File

@ -115,6 +115,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
static int noBacktracking; static int noBacktracking;
static Integer noShortendElements = 0;
public TypeUnifyTask() { public TypeUnifyTask() {
rules = new RuleSet(); rules = new RuleSet();
} }
@ -218,7 +220,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
ArrayList<Set<Set<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream() ArrayList<Set<Set<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
.filter(x -> x.size()>1) .filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField); Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, true);
noOfThread--; noOfThread--;
try { try {
logFile.close(); logFile.close();
@ -241,252 +243,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/ */
public Set<Set<UnifyPair>> computeCartesianRecursiveOderConstraints(Set<Set<UnifyPair>> fstElems, List<Set<Set<UnifyPair>>> topLevelSets, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
fstElems.addAll(topLevelSets.stream()
.filter(x -> x.size()==1)
.map(y -> y.stream().findFirst().get())
.collect(Collectors.toCollection(HashSet::new)));
ArrayList<Set<Set<UnifyPair>>> remainingSets = topLevelSets.stream()
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
Set<UnifyPair> eq = new HashSet<>();
fstElems.stream().forEach(x -> eq.addAll(x));
Set<Set<UnifyPair>> result = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe);
return result;
}
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0);
writeLog("nextSet: " + nextSet.toString());
List<Set<UnifyPair>> nextSetasList =new ArrayList<>(nextSet);
try {
//List<Set<UnifyPair>>
//nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet);
}
catch (java.lang.IllegalArgumentException e) {
System.out.print("");
}
Set<Set<UnifyPair>> result = new HashSet<>();
int variance = 0;
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType)
.filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType)c.getLhsType()).getVariance())
.reduce((a,b)-> {if (a==b) return a; else return 0; }))
.filter(d -> d.isPresent())
.map(e -> e.get())
.findAny();
if (xi.isPresent()) {
variance = xi.get();
}
//if (variance == 1 && nextSetasList.size() > 1) {
// List<Set<UnifyPair>> al = new ArrayList<>(nextSetasList.size());
// for (int ii = 0; ii < nextSetasList.size();ii++) {
// al.add(0,nextSetasList.get(ii));
// }
// nextSetasList = al;
//}
//Set<UnifyPair> a = nextSetasListIt.next();
/*if (nextSetasList.size()>1) {zu loeschen
if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D"))
System.out.print("");
if (variance == 1) {
a_next = oup.max(nextSetasList.iterator());
}
else if (variance == -1) {
a_next = oup.min(nextSetasList.iterator());
}
else if (variance == 0) {
a_next = nextSetasList.iterator().next();
}
}
else {
a_next = nextSetasList.iterator().next();
}
*/
if (!nextSetasList.iterator().hasNext())
System.out.print("");
if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print("");
writeLog("nextSetasList: " + nextSetasList.toString());
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
Set<UnifyPair> a = null;
if (variance == 1) {
a = oup.max(nextSetasList.iterator());
nextSetasList.remove(a);
}
else if (variance == -1) {
a = oup.min(nextSetasList.iterator());
nextSetasList.remove(a);
}
else if (variance == 0) {
a = nextSetasList.remove(0);
}
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
//nextSetasList.remove(a);
/* zu loeschen
if (nextSetasList.size() > 0) {
if (nextSetasList.size()>1) {
if (variance == 1) {
a_next = oup.max(nextSetasList.iterator());
}
else if (variance == -1) {
a_next = oup.min(nextSetasList.iterator());
}
else {
a_next = nextSetasList.iterator().next();
}
}
else {
a_next = nextSetasList.iterator().next();
}
}
*/
//PL 2018-03-01
//TODO: 1. Maximum und Minimum unterscheiden
//TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
//for(Set<UnifyPair> a : newSet) {
i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
elems.add(a);
Set<Set<UnifyPair>> res = new HashSet<>();
if (remainingSets.isEmpty()) {
noou++;
writeLog("Vor unify Aufruf: " + eq.toString());
writeLog("No of Unify " + noou);
System.out.println(noou);
Set<UnifyPair> eq = new HashSet<>();
elems.stream().forEach(x -> eq.addAll(x));
res = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe);
}
else {//duerfte gar nicht mehr vorkommen PL 2018-04-03
res = computeCartesianRecursiveOderConstraints(elems, remainingSets, fc, parallel, rekTiefe);
}
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = res;
}
else {
if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result))
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
result.addAll(res);
}
//else {
//wenn Korrekte Ergebnisse da und Feherfälle dazukommen Fehlerfälle ignorieren
// if (isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) {
// result = result;
// }
//}
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
if (!result.isEmpty() && !isUndefinedPairSetSet(res)) {
if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print("");
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetasList).iterator();
if (variance == 1) {
System.out.println("");
while (nextSetasListIt.hasNext()) {
Set<UnifyPair> a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) {
nextSetasList.remove(a_next);
}
else {
System.out.println("");
}
}
}
else { if (variance == -1) {
System.out.println("");
while (nextSetasListIt.hasNext()) {
Set<UnifyPair> a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == -1)) {
nextSetasList.remove(0);
}
else {
System.out.println("");
}
}
}
else if (variance == 0) {
//break;
}}
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
/* PL 2018-11-05 wird falsch weil es auf der obersten Ebene ist.
if (isUndefinedPairSetSet(res)) {
int nofstred= 0;
Set<UnifyPair> abhSubst = res.stream()
.map(b ->
b.stream()
.map(x -> x.getAllSubstitutions())
.reduce((y,z) -> { y.addAll(z); return y;}).get())
.reduce((y,z) -> { y.addAll(z); return y;}).get();
Set<UnifyPair> b = a;//effective final a
Set<UnifyPair> durchschnitt = abhSubst.stream()
.filter(x -> b.contains(x))
//.filter(y -> abhSubst.contains(y))
.collect(Collectors.toCollection(HashSet::new));
//Set<PlaceholderType> vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
int len = nextSetasList.size();
Set<UnifyPair> undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results
Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream()
.map(x -> { Set<UnifyPair> su = x.getAllSubstitutions(); //alle benutzten Substitutionen
su.add(x.getGroundBasePair()); // urspruengliches Paar
su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen
return new Pair<>(su, x.getGroundBasePair());})
.collect(Collectors.toCollection(HashSet::new));
if (res.size() > 1) {
System.out.println();
}
nextSetasList = nextSetasList.stream().filter(x -> {
//Boolean ret = false;
//for (PlaceholderType var : vars) {
// ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get();
//}
return (!x.containsAll(durchschnitt));//Was passiert wenn durchschnitt leer ist??
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
.collect(Collectors.toCollection(ArrayList::new));
nofstred = nextSetasList.size();
//NOCH NICHT korrekt PL 2018-10-12
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
// .collect(Collectors.toCollection(ArrayList::new));
writeLog("res (undef): " + res.toString());
writeLog("abhSubst: " + abhSubst.toString());
writeLog("a: " + a.toString());
writeLog("Durchschnitt: " + durchschnitt.toString());
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetasList.toString());
writeLog("Number first erased Elements (undef): " + (len - nofstred));
writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size()));
writeLog("Number erased Elements (undef): " + (len - nextSetasList.size()));
noAllErasedElements = noAllErasedElements + (len - nextSetasList.size());
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
noBacktracking++;
writeLog("Number of Backtracking: " + noBacktracking);
System.out.println("");
}
*/
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
// return result;
//}
//else {
// result.removeIf(y -> isUndefinedPairSet(y));
//}
//else result.stream().filter(y -> !isUndefinedPairSet(y));
} // End of while (nextSetasList.size() > 0)
return result;
}
/** /**
@ -495,7 +251,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* @param fc The finite closure * @param fc The finite closure
* @return The set of all principal type unifiers * @return The set of all principal type unifiers
*/ */
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) {
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT) //Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
// ).collect(Collectors.toCollection(HashSet::new)); // ).collect(Collectors.toCollection(HashSet::new));
//writeLog(nOfUnify.toString() + " AA: " + aas.toString()); //writeLog(nOfUnify.toString() + " AA: " + aas.toString());
@ -530,10 +286,20 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
return ret; return ret;
} }
Set<UnifyPair> eq0;
Set<UnifyPair> eq0Prime;
Optional<Set<UnifyPair>> eqSubst = Optional.of(eq);
do {
eq0Prime = eqSubst.get();
eq0 = applyTypeUnificationRules(eq0Prime, fc);
eqSubst = rules.subst(eq0, oderConstraints);
} while (eqSubst.isPresent());
Set<UnifyPair> eq0 = applyTypeUnificationRules(eq, fc);
eq0.forEach(x -> x.disableCondWildcards()); eq0.forEach(x -> x.disableCondWildcards());
writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString());
writeLog(nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
/* /*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs * Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
*/ */
@ -597,6 +363,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> error = new HashSet<>(); Set<Set<UnifyPair>> error = new HashSet<>();
undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new)); undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new));
error.add(undefinedPairs); error.add(undefinedPairs);
undefinedPairs.forEach(x -> writeLog("AllSubst: " +x.getAllSubstitutions().toString()));
return error; return error;
} }
@ -637,12 +404,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Aufruf von computeCartesianRecursive ANFANG //Aufruf von computeCartesianRecursive ANFANG
//writeLog("topLevelSets: " + topLevelSets.toString()); //writeLog("topLevelSets: " + topLevelSets.toString());
return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe); return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe, finalresult);
} }
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) {
//Aufruf von computeCartesianRecursive ENDE //Aufruf von computeCartesianRecursive ENDE
//keine Ahnung woher das kommt //keine Ahnung woher das kommt
@ -719,15 +486,17 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// System.err.println("log-File nicht vorhanden"); // System.err.println("log-File nicht vorhanden");
//} //}
eqPrimePrimeSet.add(eqPrime); eqPrimePrimeSet.add(eqPrime);
if (finalresult) {
urm.notify(eqPrimePrimeSet); urm.notify(eqPrimePrimeSet);
} }
}
else if(eqPrimePrime.isPresent()) { else if(eqPrimePrime.isPresent()) {
Set<Set<UnifyPair>> unifyres = unifyres1 = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe); Set<Set<UnifyPair>> unifyres = unifyres1 = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe, finalresult);
eqPrimePrimeSet.addAll(unifyres); eqPrimePrimeSet.addAll(unifyres);
} }
else { else {
Set<Set<UnifyPair>> unifyres = unifyres2 = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe); Set<Set<UnifyPair>> unifyres = unifyres2 = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe, finalresult);
eqPrimePrimeSet.addAll(unifyres); eqPrimePrimeSet.addAll(unifyres);
@ -759,7 +528,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) { Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) {
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets); //ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
fstElems.addAll(topLevelSets.stream() fstElems.addAll(topLevelSets.stream()
.filter(x -> x.size()==1) .filter(x -> x.size()==1)
@ -769,7 +538,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.filter(x -> x.size()>1) .filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
Set<Set<UnifyPair>> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe); Set<Set<UnifyPair>> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe, finalresult);
return result; return result;
} }
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0); Set<Set<UnifyPair>> nextSet = remainingSets.remove(0);
@ -784,16 +553,35 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
Set<Set<UnifyPair>> result = new HashSet<>(); Set<Set<UnifyPair>> result = new HashSet<>();
int variance = 0; int variance = 0;
ArrayList<UnifyPair> zeroNextElem = new ArrayList<>(nextSetasList.get(0));
UnifyPair fstBasePair = zeroNextElem.remove(0).getBasePair();
if (fstBasePair != null) {
Boolean sameBase = true;
for (UnifyPair ele : nextSetasList.get(0)) {//check ob a <. ty base oder ob Ueberladung
sameBase = sameBase && ele.getBasePair() != null && ele.getBasePair().equals(fstBasePair);
}
if (sameBase) {
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType) Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType)
.filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0) .filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType)c.getLhsType()).getVariance()) .map(c -> ((PlaceholderType)c.getLhsType()).getVariance())
.reduce((a,b)-> {if (a==b) return a; else return 2; })) //2 kommt insbesondere bei Oder-Constraints vor .reduce((a,b)-> {if (a==b) return a; else return 0; })) //2 kommt insbesondere bei Oder-Constraints vor
.filter(d -> d.isPresent()) .filter(d -> d.isPresent())
.map(e -> e.get()) .map(e -> e.get())
.findAny(); .findAny();
if (xi.isPresent()) { if (xi.isPresent()) {
variance = xi.get(); variance = xi.get();
} }
}
else {
variance = 2;
}
}
else {
variance = 2;
}
//if (variance == 1 && nextSetasList.size() > 1) { //if (variance == 1 && nextSetasList.size() > 1) {
// List<Set<UnifyPair>> al = new ArrayList<>(nextSetasList.size()); // List<Set<UnifyPair>> al = new ArrayList<>(nextSetasList.size());
// for (int ii = 0; ii < nextSetasList.size();ii++) { // for (int ii = 0; ii < nextSetasList.size();ii++) {
@ -824,6 +612,42 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1) if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print(""); System.out.print("");
writeLog("nextSetasList: " + nextSetasList.toString()); writeLog("nextSetasList: " + nextSetasList.toString());
Set<UnifyPair> nextSetElem = nextSetasList.get(0);
writeLog("BasePair1: " + nextSetElem + " " + nextSetElem.iterator().next().getBasePair());
/* sameEqSet-Bestimmung: Wenn a = ty \in nextSet dann enthaelt sameEqSet alle Paare a < ty1 oder ty2 < a aus fstElems */
Set<UnifyPair> sameEqSet = new HashSet<>();
if (variance != 2) {
Optional<UnifyPair> optOrigPair = nextSetElem.stream().filter(x -> (
//x.getBasePair() != null && ist gegeben wenn variance != 2
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
(x.getPairOp().equals(PairOperator.EQUALSDOT)
/*
(x.getBasePair().getLhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getRhsType())
*/
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
x.getLhsType().equals(x.getBasePair().getRhsType())
).findFirst();
writeLog("optOrigPair: " + optOrigPair);
if (optOrigPair.isPresent()) {
UnifyPair origPair = optOrigPair.get();
UnifyType tyVar;
if (!((tyVar = origPair.getLhsType()) instanceof PlaceholderType)) {
tyVar = origPair.getRhsType();
}
UnifyType tyVarEF = tyVar;
sameEqSet = fstElems.stream().map(xx -> xx.iterator().next())
.filter(x -> (((x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
|| (x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType)))))
.collect(Collectors.toCollection(HashSet::new));
}
}
/* sameEqSet-Bestimmung Ende */
Set<UnifyPair> a = null; Set<UnifyPair> a = null;
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) { while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
Set<UnifyPair> a_last = a; Set<UnifyPair> a_last = a;
@ -931,6 +755,37 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> res = new HashSet<>(); Set<Set<UnifyPair>> res = new HashSet<>();
Set<Set<Set<UnifyPair>>> add_res = new HashSet<>(); Set<Set<Set<UnifyPair>>> add_res = new HashSet<>();
Set<Set<UnifyPair>> aParDef = new HashSet<>(); Set<Set<UnifyPair>> aParDef = new HashSet<>();
/* PL 2019-03-11 Anfang eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
if (variance != 2 && !sameEqSet.isEmpty()) {
Optional<UnifyPair> optAPair = a.stream().filter(x -> (
//x.getBasePair() != null && ist gegeben wenn variance != 2
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
(x.getPairOp().equals(PairOperator.EQUALSDOT)
/*
(x.getBasePair().getLhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getRhsType())
*/
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
x.getLhsType().equals(x.getBasePair().getRhsType())
).findFirst();
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
UnifyPair aPair = optAPair.get();
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
writeLog("variance: " + new Integer(variance).toString() + "sameEqSet:" + sameEqSet);
if (!checkA(aPair, sameEqSet, elems, result)) {
a = null;
noShortendElements++;
continue;
}
}
}
/* PL 2019-03-11 Ende eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
if(parallel && (variance == 1) && noOfThread <= MaxNoOfThreads) { if(parallel && (variance == 1) && noOfThread <= MaxNoOfThreads) {
Set<TypeUnify2Task> forks = new HashSet<>(); Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq); Set<UnifyPair> newEqOrig = new HashSet<>(eq);
@ -953,6 +808,35 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
synchronized (this) { nextSetasList.remove(nSaL); synchronized (this) { nextSetasList.remove(nSaL);
writeLog("1 RM" + nSaL.toString()); writeLog("1 RM" + nSaL.toString());
} }
/* PL 2019-03-13 Anfang eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
Optional<UnifyPair> optAPair = nSaL.stream().filter(x -> (
//x.getBasePair() != null && ist gegeben wenn variance != 2
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
(x.getPairOp().equals(PairOperator.EQUALSDOT)
/*
(x.getBasePair().getLhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getRhsType())
*/
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
x.getLhsType().equals(x.getBasePair().getRhsType())
).findFirst();
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
UnifyPair aPair = optAPair.get();
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
writeLog("variance: " + new Integer(variance).toString() + "sameEqSet:" + sameEqSet);
if (!sameEqSet.isEmpty() && !checkA(aPair, sameEqSet, elems, result)) {
nSaL = null;
noShortendElements++;
continue;
}
}
/* PL 2019-03-13 Ende eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
Set<UnifyPair> newEq = new HashSet<>(eq); Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
@ -1017,6 +901,35 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
synchronized (this) { nextSetasList.remove(nSaL); synchronized (this) { nextSetasList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString()); writeLog("-1 RM" + nSaL.toString());
} }
/* PL 2019-03-13 Anfang eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
Optional<UnifyPair> optAPair = nSaL.stream().filter(x -> (
//x.getBasePair() != null && ist gegeben wenn variance != 2
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
(x.getPairOp().equals(PairOperator.EQUALSDOT)
/*
(x.getBasePair().getLhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getRhsType())
*/
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
x.getLhsType().equals(x.getBasePair().getRhsType())
).findFirst();
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
UnifyPair aPair = optAPair.get();
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
writeLog("variance: " + new Integer(variance).toString() + "sameEqSet:" + sameEqSet);
if (!sameEqSet.isEmpty() && !checkA(aPair, sameEqSet, elems, result)) {
nSaL = null;
noShortendElements++;
continue;
}
}
/* PL 2019-03-13 Ende eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
Set<UnifyPair> newEq = new HashSet<>(eq); Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
@ -1118,7 +1031,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} else { } else {
//parallel = false; //Wenn MaxNoOfThreads erreicht ist, sequentiell weiterarbeiten //parallel = false; //Wenn MaxNoOfThreads erreicht ist, sequentiell weiterarbeiten
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859 elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe); res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, finalresult);
}}} }}}
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) { if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen //wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
@ -1154,6 +1067,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last //Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
//System.out.println(a_last); //System.out.println(a_last);
a_last.forEach(x -> {writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());}); a_last.forEach(x -> {writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());});
try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen
List<PlaceholderType> varsLast_a = List<PlaceholderType> varsLast_a =
a_last.stream().filter(x -> ((x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName()) a_last.stream().filter(x -> ((x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
&& (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getLhsType() instanceof PlaceholderType)) && (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getLhsType() instanceof PlaceholderType))
@ -1201,6 +1115,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
result.addAll(res); result.addAll(res);
}}} }}}
} }
catch (NullPointerException e) {
writeLog("NullPointerException: " + a_last.toString());
}
}
else { else {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RES Fst:" + result.toString() + " " + res.toString()); writeLog("RES Fst:" + result.toString() + " " + res.toString());
@ -1450,6 +1368,42 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
return result; return result;
} }
protected Boolean checkA (UnifyPair aPair, Set<UnifyPair> sameEqSet, Set<Set<UnifyPair>> elems, Set<Set<UnifyPair>> result) {
writeLog("checkA: " + aPair + "sameEqSet: " + sameEqSet);
for (UnifyPair sameEq : sameEqSet) {
if (sameEq.getLhsType() instanceof PlaceholderType) {
Set<UnifyPair> localEq = new HashSet<>();
Set<UnifyPair> unitedSubst = new HashSet<>(aPair.getSubstitution());
unitedSubst.addAll(sameEq.getSubstitution());
localEq.add(new UnifyPair(aPair.getRhsType(), sameEq.getRhsType(), sameEq.getPairOp(), unitedSubst, null));
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0, false);
if (isUndefinedPairSetSet(localRes)) {
if (result.isEmpty() || isUndefinedPairSetSet(result)) {
result.addAll(localRes);
}
//writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
return false;
}
}
else {
Set<UnifyPair> localEq = new HashSet<>();
Set<UnifyPair> unitedSubst = new HashSet<>(aPair.getSubstitution());
unitedSubst.addAll(sameEq.getSubstitution());
localEq.add(new UnifyPair(sameEq.getLhsType(), aPair.getRhsType(), sameEq.getPairOp(), unitedSubst, null));
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0, false);
if (isUndefinedPairSetSet(localRes)) {
if (result.isEmpty() || isUndefinedPairSetSet(result)) {
result.addAll(localRes);
}
//writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
return false;
}
}
}
//writeLog("TRUE: " + aPair + "sameEqSet: " + sameEqSet);
return true;
}
protected boolean couldBecorrect(Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair, Set<UnifyPair> nextElem) { protected boolean couldBecorrect(Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair, Set<UnifyPair> nextElem) {
return reducedUndefResSubstGroundedBasePair.stream() return reducedUndefResSubstGroundedBasePair.stream()
.map(pair -> { .map(pair -> {
@ -1782,7 +1736,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Bei allen die Abhaengigkeit der Elemente aus eq2sAsList als evtl. als Substitution Bei allen die Abhaengigkeit der Elemente aus eq2sAsList als evtl. als Substitution
hinzufuegen hinzufuegen
*/ */
Set<UnifyPair> consideredElements = new HashSet<>();
for(UnifyPair pair : eq2sAsList) { for(UnifyPair pair : eq2sAsList) {
if (consideredElements.contains(pair)) {
continue;
}
PairOperator pairOp = pair.getPairOp(); PairOperator pairOp = pair.getPairOp();
UnifyType lhsType = pair.getLhsType(); UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType(); UnifyType rhsType = pair.getRhsType();
@ -1806,7 +1764,70 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
remElem.add(new UnifyPair(pair.getLhsType(), new SuperType(pair.getRhsType()), PairOperator.EQUALSDOT)); remElem.add(new UnifyPair(pair.getLhsType(), new SuperType(pair.getRhsType()), PairOperator.EQUALSDOT));
x1.remove(remElem); x1.remove(remElem);
} }
/* ZU LOESCHEN ANFANG
//System.out.println(x1); //System.out.println(x1);
Set<UnifyPair> sameEqSet = eq2sAsList.stream()
.filter(x -> ((x.getLhsType().equals(lhsType) || x.getRhsType().equals(lhsType)) && !x.equals(pair)))
.collect(Collectors.toCollection(HashSet::new));
consideredElements.addAll(sameEqSet);
Set<Set<UnifyPair>> x2 = x1;
Set<Set<UnifyPair>> x1Res = new HashSet<>();
writeLog("pair:\n" + pair.toString());
writeLog("x1 Start:\n" + x1.toString());
writeLog("sameEqSet:\n" + sameEqSet.toString());
for (UnifyPair sameEq : sameEqSet) {
writeLog("x1 Original:\n" + x1.toString());
if (sameEq.getLhsType() instanceof PlaceholderType) {
x1 = x1.stream().filter(y -> {
UnifyPair type = y.stream().filter(z -> z.getLhsType().equals(lhsType)).findFirst().get();
Set<UnifyPair> localEq = new HashSet<>();
Set<UnifyPair> unitedSubst = new HashSet<>(type.getSubstitution());
unitedSubst.addAll(sameEq.getSubstitution());
localEq.add(new UnifyPair(type.getRhsType(), sameEq.getRhsType(), sameEq.getPairOp(), unitedSubst, null));
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0, false);
Boolean localCorr = !isUndefinedPairSetSet(localRes);
if (!localCorr) {
collectErr.addAll(localRes);
}
else {
localRes.forEach(z -> z.addAll(y));
x1Res.addAll(localRes);
}
return localCorr;
}
).collect(Collectors.toCollection(HashSet::new));
}
else {
x1 = x1.stream().filter(y -> {
UnifyPair type = y.stream().filter(z -> z.getLhsType().equals(lhsType)).findFirst().get();
Set<UnifyPair> localEq = new HashSet<>();
Set<UnifyPair> unitedSubst = new HashSet<>(type.getSubstitution());
unitedSubst.addAll(sameEq.getSubstitution());
localEq.add(new UnifyPair(sameEq.getLhsType(), type.getRhsType(), sameEq.getPairOp(), unitedSubst, null));
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0, false);
Boolean localCorr = !isUndefinedPairSetSet(localRes);
if (!localCorr) {
collectErr.addAll(localRes);
}
else {
localRes.forEach(z -> z.addAll(y));
x1Res.addAll(localRes);
}
return localCorr;
}
).collect(Collectors.toCollection(HashSet::new));
}
writeLog("x1 nach Loeschung von " + sameEq.toString()+" :\n" + x1.toString());
}
Set<Set<UnifyPair>> x1ResPrime;
if (sameEqSet.isEmpty()) {
x1ResPrime = x1;
}
else {
x1ResPrime = x1Res;
}
result.get(0).add(x1ResPrime);
ZU LOESCHEN ENDE */
result.get(0).add(x1); result.get(0).add(x1);
if (x1.isEmpty()) { if (x1.isEmpty()) {
undefined.add(pair); //Theta ist nicht im FC => Abbruch undefined.add(pair); //Theta ist nicht im FC => Abbruch

View File

@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@ -31,14 +32,25 @@ public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
* in dem compare(Theta, Theta') aufgerufen wird. * in dem compare(Theta, Theta') aufgerufen wird.
*/ */
public int compareEq (UnifyPair left, UnifyPair right) { public int compareEq (UnifyPair left, UnifyPair right) {
try {
//if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht //if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht
if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) { if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC); return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC);
} }
else { else {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT); return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT);
}}
catch (ClassCastException e) {
try {
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() +"\n\n");
((FiniteClosure)fc).logFile.flush();
}
catch (IOException ie) {
}
return -99;
} }
} }
/* /*
public int compareEq (UnifyPair left, UnifyPair right) { public int compareEq (UnifyPair left, UnifyPair right) {
if (left == null || right == null) if (left == null || right == null)

View File

@ -233,10 +233,12 @@ public class UnifyPair {
public String toString() { public String toString() {
String ret = ""; String ret = "";
if (lhs instanceof PlaceholderType) { if (lhs instanceof PlaceholderType) {
ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " " + ((PlaceholderType)lhs).isInnerType(); ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " " + ((PlaceholderType)lhs).isInnerType()
+ " " + ((PlaceholderType)lhs).isWildcardable();
} }
if (rhs instanceof PlaceholderType) { if (rhs instanceof PlaceholderType) {
ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " " + ((PlaceholderType)rhs).isInnerType(); ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " " + ((PlaceholderType)rhs).isInnerType()
+ " " + ((PlaceholderType)rhs).isWildcardable();
} }
return "(" + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])"; return "(" + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])";
} }

View File

@ -1,6 +1,6 @@
import java.util.Vector; import java.util.Vector;
import java.lang.Integer; import java.lang.Integer;
//import java.lang.Byte; import java.lang.Byte;
import java.lang.Boolean; import java.lang.Boolean;
public class MatrixOP extends Vector<Vector<Integer>> { public class MatrixOP extends Vector<Vector<Integer>> {