diff --git a/pom.xml b/pom.xml
index 8ae93bf58..3c31a7e8e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,8 +1,7 @@
-
+http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0
de.dhbwstuttgart
JavaTXcompiler
@@ -68,7 +67,7 @@
src/main/antlr4/java8
- ${project.basedir}/target/generated-sources/de/dhbwstuttgart/parser/antlr
+ ${project.basedir}/target/generated-sources/antlr4/de/dhbwstuttgart/parser/antlr
-package
de.dhbwstuttgart.parser.antlr
@@ -82,7 +81,7 @@
src/main/antlr4/sat
- ${project.basedir}/target/generated-sources/de/dhbwstuttgart/sat/asp/parser/antlr
+ ${project.basedir}/target/generated-sources/antlr4/de/dhbwstuttgart/sat/asp/parser/antlr
-package
de.dhbwstuttgart.sat.asp.parser.antlr
@@ -139,19 +138,10 @@
-
- org.eclipse.tycho
- tycho-p2-repository-plugin
- ${tycho.version}
-
-
- package
-
- archive-repository
-
-
-
-
+
@@ -179,3 +169,5 @@
+
+
diff --git a/src/main/java/de/dhbwstuttgart/bytecode/BytecodeGenMethod.java b/src/main/java/de/dhbwstuttgart/bytecode/BytecodeGenMethod.java
index d46702891..ff2c49a30 100644
--- a/src/main/java/de/dhbwstuttgart/bytecode/BytecodeGenMethod.java
+++ b/src/main/java/de/dhbwstuttgart/bytecode/BytecodeGenMethod.java
@@ -832,7 +832,9 @@ public class BytecodeGenMethod implements StatementVisitor {
System.out.println(methods[i]);
}
methodRefl = getMethod(methodCall.name,methodCall.arglist.getArguments().size(),methCallType, typesOfParams,methods);
- }catch (Exception e2) {
+ }
+ catch (Exception e2) {
+ System.out.println("");
//do nothing
}
}
diff --git a/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java b/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java
index a860adf98..f8d4a363b 100644
--- a/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java
+++ b/src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java
@@ -1,3 +1,4 @@
+//PL 2018-12-19: typeInferenceOld nach typeInference uebertragen
package de.dhbwstuttgart.core;
@@ -15,6 +16,7 @@ import de.dhbwstuttgart.syntaxtree.ParameterList;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
+import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.syntaxtree.visual.ASTTypePrinter;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
@@ -29,6 +31,7 @@ import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import java.io.File;
@@ -44,7 +47,7 @@ public class JavaTXCompiler {
final CompilationEnvironment environment;
public final Map sourceFiles = new HashMap<>();
- Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll?
+ Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"src/test/java/logFiles" geschrieben werden soll?
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Arrays.asList(sourceFile));
@@ -103,7 +106,8 @@ public class JavaTXCompiler {
return new ArrayList<>(allClasses);
}
- public List typeInference() throws ClassNotFoundException {
+/*
+ public List typeInferenceOld() throws ClassNotFoundException {
List allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
//Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
for(SourceFile sf : this.sourceFiles.values()) {
@@ -187,10 +191,7 @@ public class JavaTXCompiler {
}
}
return x;//HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS ANDERE SEITE
- })
- /* PL 2018-11-07 wird in varianceInheritance erledigt
- .map( y -> {
-
+ }).map( y -> {
if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) {
if (((PlaceholderType)y.getLhsType()).getVariance() != 0 && ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType()).getVariance());
@@ -200,20 +201,16 @@ public class JavaTXCompiler {
}
}
return y; } )
- */
.collect(Collectors.toCollection(HashSet::new));
- varianceInheritance(xConsSet);
-
-
-
- Set> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
- //Set> result = unify.unify(xConsSet, finiteClosure);
- System.out.println("RESULT: " + result);
- logFile.write("RES: " + result.toString()+"\n");
- logFile.flush();
- results.addAll(result);
+ varianceInheritance(xConsSet);
+ Set> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
+ //Set> result = unify.unify(xConsSet, finiteClosure);
+ System.out.println("RESULT: " + result);
+ logFile.write("RES: " + result.toString()+"\n");
+ logFile.flush();
+ results.addAll(result);
}
-
+
results = results.stream().map(x -> {
Optional> res = new RuleSet().subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
@@ -236,12 +233,14 @@ public class JavaTXCompiler {
return results.stream().map((unifyPairs ->
new ResultSet(UnifyTypeFactory.convert(unifyPairs, generateTPHMap(cons))))).collect(Collectors.toList());
}
-
+ */
/**
- * Vererbt alle Variancen
+ * Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a)
+ * wenn a eine Variance !=0 hat auf alle Typvariablen in Theta.
* @param eq The set of constraints
*/
- private void varianceInheritance(Set eq) {
+ /*
+ private void varianceInheritance(Set eq) {
Set usedTPH = new HashSet<>();
Set phSet = eq.stream().map(x -> {
Set pair = new HashSet<>();
@@ -267,6 +266,203 @@ public class JavaTXCompiler {
phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x)));
}
}
+ */
+
+
+ public List typeInference() throws ClassNotFoundException {
+ List allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
+ //Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
+ for(SourceFile sf : this.sourceFiles.values()) {
+ allClasses.addAll(getAvailableClasses(sf));
+ allClasses.addAll(sf.getClasses());
+ }
+
+ final ConstraintSet cons = getConstraints();
+ Set> results = new HashSet<>();
+ try {
+ FileWriter logFile = new FileWriter(new File(System.getProperty("user.dir")+"/src/test/java/logFiles/"+"log_"+sourceFiles.keySet().iterator().next().getName()));
+
+ FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses,logFile);
+ System.out.println(finiteClosure);
+ ConstraintSet unifyCons = UnifyTypeFactory.convert(cons);
+
+ Function distributeInnerVars =
+ x -> {
+ UnifyType lhs, rhs;
+ if (((lhs = x.getLhsType()) instanceof PlaceholderType)
+ && ((rhs = x.getRhsType()) instanceof PlaceholderType)
+ && (((PlaceholderType)lhs).isInnerType()
+ || ((PlaceholderType)rhs).isInnerType()))
+ {
+ ((PlaceholderType)lhs).setInnerType(true);
+ ((PlaceholderType)rhs).setInnerType(true);
+ }
+ return x;
+
+ };
+ logFile.write(unifyCons.toString());
+ unifyCons = unifyCons.map(distributeInnerVars);
+ logFile.write(unifyCons.toString());
+ TypeUnify unify = new TypeUnify();
+ //Set> results = new HashSet<>(); Nach vorne gezogen
+ logFile.write("FC:\\" + finiteClosure.toString()+"\n");
+ for(SourceFile sf : this.sourceFiles.values()) {
+ logFile.write(ASTTypePrinter.print(sf));
+ }
+ logFile.flush();
+
+ Set methodParaTypeVarNames = allClasses.stream().map(x -> x.getMethods().stream().map(y -> y.getParameterList().getFormalparalist()
+ .stream().filter(z -> z.getType() instanceof TypePlaceholder)
+ .map(z -> ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(HashSet::new)))
+ .reduce(new HashSet(), (a,b) -> { a.addAll(b); return a;}, (a,b) -> { a.addAll(b); return a;} ) )
+ .reduce(new HashSet(), (a,b) -> { a.addAll(b); return a;} );
+
+ Set constructorParaTypeVarNames = allClasses.stream().map(x -> x.getConstructors().stream().map(y -> y.getParameterList().getFormalparalist()
+ .stream().filter(z -> z.getType() instanceof TypePlaceholder)
+ .map(z -> ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(HashSet::new)))
+ .reduce(new HashSet(), (a,b) -> { a.addAll(b); return a;}, (a,b) -> { a.addAll(b); return a;} ) )
+ .reduce(new HashSet(), (a,b) -> { a.addAll(b); return a;} );
+
+ Set paraTypeVarNames = methodParaTypeVarNames;
+ paraTypeVarNames.addAll(constructorParaTypeVarNames);
+
+
+ Set returnTypeVarNames = allClasses.stream().map(x -> x.getMethods().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
+ .map(z -> ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;} ).get();
+
+ Set fieldTypeVarNames = allClasses.stream().map(x -> x.getFieldDecl().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
+ .map(z -> ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;} ).get();
+
+ returnTypeVarNames.addAll(fieldTypeVarNames);
+
+ unifyCons = unifyCons.map(x -> {
+ //Hier muss ueberlegt werden, ob
+ //1. alle Argument- und Retuntyp-Variablen in allen UnifyPairs
+ // mit disableWildcardtable() werden.
+ //2. alle Typvariablen mit Argument- oder Retuntyp-Variablen
+ //in Beziehung auch auf disableWildcardtable() gesetzt werden muessen
+ //PL 2018-04-23
+ if ((x.getLhsType() instanceof PlaceholderType)) {
+ if (paraTypeVarNames.contains(x.getLhsType().getName())) {
+ ((PlaceholderType)x.getLhsType()).setVariance((byte)1);
+ ((PlaceholderType)x.getLhsType()).disableWildcardtable();
+ }
+ if (returnTypeVarNames.contains(x.getLhsType().getName())) {
+ ((PlaceholderType)x.getLhsType()).setVariance((byte)-1);
+ ((PlaceholderType)x.getLhsType()).disableWildcardtable();
+ }
+ }
+ if ((x.getRhsType() instanceof PlaceholderType)) {
+ if (paraTypeVarNames.contains(x.getRhsType().getName())) {
+ ((PlaceholderType)x.getRhsType()).setVariance((byte)1);
+ ((PlaceholderType)x.getRhsType()).disableWildcardtable();
+ }
+ if (returnTypeVarNames.contains(x.getRhsType().getName())) {
+ ((PlaceholderType)x.getRhsType()).setVariance((byte)-1);
+ ((PlaceholderType)x.getRhsType()).disableWildcardtable();
+ }
+ }
+ return x;//HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS ANDERE SEITE
+ });
+ Set varianceTPHold;
+ Set varianceTPH = new HashSet<>();
+ varianceTPH = varianceInheritanceConstraintSet(unifyCons);
+
+ /* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt
+ do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen
+ //anderen Seite übertragen
+ varianceTPHold = new HashSet<>(varianceTPH);
+ varianceTPH = varianceInheritanceConstraintSet(unifyCons);
+ unifyCons.map( y -> {
+ if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) {
+ if (((PlaceholderType)y.getLhsType()).getVariance() != 0 && ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
+ ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType()).getVariance());
+ }
+ if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
+ ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType()).getVariance());
+ }
+ }
+ return y; } ); }
+ while (!varianceTPHold.equals(varianceTPH));
+ */
+
+ //Set> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
+ //Set> result = unify.unify(xConsSet, finiteClosure);
+ Set> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, logFile, log);
+ System.out.println("RESULT: " + result);
+ logFile.write("RES: " + result.toString()+"\n");
+ logFile.flush();
+ results.addAll(result);
+
+
+ results = results.stream().map(x -> {
+ Optional> res = new RuleSet().subst(x.stream().map(y -> {
+ if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
+ return y; //alle Paare a <.? b erden durch a =. b ersetzt
+ }).collect(Collectors.toCollection(HashSet::new)));
+ if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
+ return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
+ }
+ else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
+ }).collect(Collectors.toCollection(HashSet::new));
+ System.out.println("RESULT Final: " + results);
+ logFile.write("RES_FINAL: " + results.toString()+"\n");
+ logFile.flush();
+ logFile.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
+ logFile.flush();
+ }
+ catch (IOException e) {
+ System.err.println("kein LogFile");
+ }
+ return results.stream().map((unifyPairs ->
+ new ResultSet(UnifyTypeFactory.convert(unifyPairs, generateTPHMap(cons))))).collect(Collectors.toList());
+ }
+
+
+ /**
+ * Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a)
+ * wenn a eine Variance !=0 hat auf alle Typvariablen in Theta.
+ * @param eq The set of constraints
+ */
+ private Set varianceInheritanceConstraintSet(ConstraintSet cons) {
+ Set eq = cons.getAll();
+ Set usedTPH = new HashSet<>();
+ Set phSet = eq.stream().map(x -> {
+ Set pair = new HashSet<>();
+ if (x.getLhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getLhsType());
+ if (x.getRhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getRhsType());
+ return pair;
+ }).reduce(new HashSet<>(), (a,b) -> { a.addAll(b); return a;} , (c,d) -> { c.addAll(d); return c;});
+
+ ArrayList phSetVariance = new ArrayList<>(phSet);
+ phSetVariance.removeIf(x -> (x.getVariance() == 0));
+ while(!phSetVariance.isEmpty()) {
+ PlaceholderType a = phSetVariance.remove(0);
+ usedTPH.add(a);
+ //HashMap ht = new HashMap<>();
+ //ht.put(a, a.getVariance());
+ //ConstraintSet eq1 = cons;
+ //eq1.removeIf(x -> !(x.getLhsType() instanceof PlaceholderType && ((PlaceholderType)x.getLhsType()).equals(a)));
+ //durch if-Abfrage im foreach geloest
+ cons.forEach(x -> {
+ if (x.getLhsType() instanceof PlaceholderType && ((PlaceholderType)x.getLhsType()).equals(a)) {
+ x.getRhsType().accept(new distributeVariance(), a.getVariance());
+ }
+ });
+ //` eq1 = new HashSet<>(eq);
+ //eq1.removeIf(x -> !(x.getRhsType() instanceof PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)));
+ //durch if-Abfrage im foreach geloest
+ cons.forEach(x -> {
+ if (x.getRhsType() instanceof PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)) {
+ x.getLhsType().accept(new distributeVariance(), a.getVariance());
+ }
+ });
+ phSetVariance = new ArrayList<>(phSet); //macht vermutlich keinen Sinn PL 2018-10-18, doch, es koennen neue TPHs mit Variancen dazugekommen sein PL 2018-11-07
+ phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x)));
+ }
+ return usedTPH;
+ }
+
private Map generateTPHMap(ConstraintSet constraints) {
HashMap ret = new HashMap<>();
diff --git a/src/main/java/de/dhbwstuttgart/syntaxtree/factory/UnifyTypeFactory.java b/src/main/java/de/dhbwstuttgart/syntaxtree/factory/UnifyTypeFactory.java
index 71985334e..f3fbdd636 100644
--- a/src/main/java/de/dhbwstuttgart/syntaxtree/factory/UnifyTypeFactory.java
+++ b/src/main/java/de/dhbwstuttgart/syntaxtree/factory/UnifyTypeFactory.java
@@ -1,5 +1,6 @@
package de.dhbwstuttgart.syntaxtree.factory;
+import java.io.FileWriter;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -29,7 +30,7 @@ public class UnifyTypeFactory {
private static ArrayList PLACEHOLDERS = new ArrayList<>();
- public static FiniteClosure generateFC(List fromClasses) throws ClassNotFoundException {
+ public static FiniteClosure generateFC(List fromClasses, FileWriter logFile) throws ClassNotFoundException {
/*
Die transitive Hülle muss funktionieren.
Man darf schreiben List extends AL
@@ -40,7 +41,7 @@ public class UnifyTypeFactory {
Generell dürfen sie immer die gleichen Namen haben.
TODO: die transitive Hülle bilden
*/
- return new FiniteClosure(FCGenerator.toUnifyFC(fromClasses));
+ return new FiniteClosure(FCGenerator.toUnifyFC(fromClasses), logFile);
}
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr){
@@ -63,26 +64,26 @@ public class UnifyTypeFactory {
* Convert from
* ASTType -> UnifyType
*/
- public static UnifyType convert(RefTypeOrTPHOrWildcardOrGeneric t){
+ public static UnifyType convert(RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType){
if(t instanceof GenericRefType){
- return UnifyTypeFactory.convert((GenericRefType)t);
+ return UnifyTypeFactory.convert((GenericRefType)t, innerType);
}else
if(t instanceof FunN){
- return UnifyTypeFactory.convert((FunN)t);
+ return UnifyTypeFactory.convert((FunN)t, innerType);
}else if(t instanceof TypePlaceholder){
- return UnifyTypeFactory.convert((TypePlaceholder)t);
+ return UnifyTypeFactory.convert((TypePlaceholder)t, innerType);
}else if(t instanceof ExtendsWildcardType){
- return UnifyTypeFactory.convert((ExtendsWildcardType)t);
+ return UnifyTypeFactory.convert((ExtendsWildcardType)t, innerType);
}else if(t instanceof SuperWildcardType){
- return UnifyTypeFactory.convert((SuperWildcardType)t);
+ return UnifyTypeFactory.convert((SuperWildcardType)t, innerType);
}else if(t instanceof RefType){
- return UnifyTypeFactory.convert((RefType)t);
+ return UnifyTypeFactory.convert((RefType)t, innerType);
}
//Es wurde versucht ein Typ umzuwandeln, welcher noch nicht von der Factory abgedeckt ist
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
}
- public static UnifyType convert(RefType t){
+ public static UnifyType convert(RefType t, Boolean innerType){
//Check if it is a FunN Type:
Pattern p = Pattern.compile("Fun(\\d+)");
Matcher m = p.matcher(t.getName().toString());
@@ -90,14 +91,14 @@ public class UnifyTypeFactory {
if(b){
Integer N = Integer.valueOf(m.group(1));
if((N + 1) == t.getParaList().size()){
- return convert(new FunN(t.getParaList()));
+ return convert(new FunN(t.getParaList()), false);
}
}
UnifyType ret;
if(t.getParaList() != null && t.getParaList().size() > 0){
List params = new ArrayList<>();
for(RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()){
- params.add(UnifyTypeFactory.convert(pT));
+ params.add(UnifyTypeFactory.convert(pT, true));
}
ret = new ReferenceType(t.getName().toString(),new TypeParams(params));
}else{
@@ -106,39 +107,45 @@ public class UnifyTypeFactory {
return ret;
}
- public static UnifyType convert(FunN t){
+ public static UnifyType convert(FunN t, Boolean innerType){
UnifyType ret;
List params = new ArrayList<>();
if(t.getParaList() != null && t.getParaList().size() > 0){
for(RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()){
- params.add(UnifyTypeFactory.convert(pT));
+ params.add(UnifyTypeFactory.convert(pT, false));
}
}
ret = FunNType.getFunNType(new TypeParams(params));
return ret;
}
- public static UnifyType convert(TypePlaceholder tph){
+ public static UnifyType convert(TypePlaceholder tph, Boolean innerType){
+ if (tph.getName().equals("AFR")) {
+ System.out.println("XXX"+innerType);
+ }
PlaceholderType ntph = new PlaceholderType(tph.getName());
int in = PLACEHOLDERS.indexOf(ntph);
if (in == -1) {
PLACEHOLDERS.add(ntph);
+ ntph.setInnerType(innerType);
return ntph;
}
else {
- return PLACEHOLDERS.get(in);
+ PlaceholderType oldpht = PLACEHOLDERS.get(in);
+ oldpht.setInnerType(oldpht.isInnerType() || innerType);
+ return oldpht;
}
}
- public static UnifyType convert(GenericRefType t){
+ public static UnifyType convert(GenericRefType t, Boolean innerType){
return new ReferenceType(t.getParsedName());
}
- public static UnifyType convert(WildcardType t){
+ public static UnifyType convert(WildcardType t, Boolean innerType){
if(t.isExtends())
- return new ExtendsType(UnifyTypeFactory.convert(t.getInnerType()));
+ return new ExtendsType(UnifyTypeFactory.convert(t.getInnerType(), false));
else if(t.isSuper())
- return new SuperType(UnifyTypeFactory.convert(t.getInnerType()));
+ return new SuperType(UnifyTypeFactory.convert(t.getInnerType(), false));
else throw new NotImplementedException();
}
@@ -152,22 +159,42 @@ public class UnifyTypeFactory {
}
public static UnifyPair convert(Pair p) {
+ UnifyPair ret = null;
if(p.GetOperator().equals(PairOperator.SMALLERDOT)) {
- UnifyPair ret = generateSmallerDotPair(UnifyTypeFactory.convert(p.TA1)
- , UnifyTypeFactory.convert(p.TA2));
- return ret;
+ ret = generateSmallerDotPair(UnifyTypeFactory.convert(p.TA1, false)
+ , UnifyTypeFactory.convert(p.TA2, false));
+ //return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLERNEQDOT)) {
- UnifyPair ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(p.TA1)
- , UnifyTypeFactory.convert(p.TA2));
- return ret;
+ ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(p.TA1, false)
+ , UnifyTypeFactory.convert(p.TA2, false));
+ //return ret;
}else if(p.GetOperator().equals(PairOperator.EQUALSDOT)) {
- UnifyPair ret = generateEqualDotPair(UnifyTypeFactory.convert(p.TA1)
- , UnifyTypeFactory.convert(p.TA2));
- return ret;
+ ret = generateEqualDotPair(UnifyTypeFactory.convert(p.TA1, false)
+ , UnifyTypeFactory.convert(p.TA2, false));
+ //return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLER)){
- return generateSmallerPair(UnifyTypeFactory.convert(p.TA1),
- UnifyTypeFactory.convert(p.TA2));
+ ret = generateSmallerPair(UnifyTypeFactory.convert(p.TA1, false),
+ UnifyTypeFactory.convert(p.TA2, false));
}else throw new NotImplementedException();
+ UnifyType lhs, rhs;
+ if (((lhs = ret.getLhsType()) instanceof PlaceholderType)
+ && ((PlaceholderType)lhs).isWildcardable()
+ && (rhs = ret.getLhsType()) instanceof PlaceholderType) {
+ if (lhs.getName().equals("AQ")) {
+ System.out.println("");
+ }
+ ((PlaceholderType)rhs).enableWildcardtable();
+ }
+
+ if (((rhs = ret.getLhsType()) instanceof PlaceholderType)
+ && ((PlaceholderType)rhs).isWildcardable()
+ && (lhs = ret.getLhsType()) instanceof PlaceholderType) {
+ if (rhs.getName().equals("AQ")) {
+ System.out.println("");
+ }
+ ((PlaceholderType)lhs).enableWildcardtable();
+ }
+ return ret;
}
/**
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java b/src/main/java/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java
index 01356fe9a..4d1f076f3 100644
--- a/src/main/java/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java
+++ b/src/main/java/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java
@@ -5,6 +5,7 @@ import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.*;
+import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
@@ -52,4 +53,29 @@ public class ConstraintSet {
ret.oderConstraints = newOder;
return ret;
}
+
+ public void forEach (Consumer super A> c) {
+ undConstraints.stream().forEach(c);
+ for(Set> oderConstraint : oderConstraints){
+ oderConstraint.parallelStream().forEach((Constraint as) ->
+ as.stream().forEach(c));
+ }
+ }
+
+ public Set getAll () {
+ Set ret = new HashSet<>();
+ ret.addAll(undConstraints);
+ for(Set> oderConstraint : oderConstraints){
+ oderConstraint.parallelStream().forEach((Constraint as) -> ret.addAll(as));
+ }
+ return ret;
+ }
+
+ public List>> getOderConstraints() {
+ return oderConstraints;
+ }
+
+ public Set getUndConstraints() {
+ return undConstraints;
+ }
}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java b/src/main/java/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java
index 2f705c67d..4fd14fbc8 100644
--- a/src/main/java/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java
+++ b/src/main/java/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java
@@ -1,3 +1,4 @@
+//PL 2018-12-19: Merge chekcen
package de.dhbwstuttgart.typeinference.typeAlgo;
import de.dhbwstuttgart.exceptions.NotImplementedException;
@@ -228,6 +229,13 @@ public class TYPEStmt implements StatementVisitor{
binary.operation.equals(BinaryExpr.Operator.ADD)||
binary.operation.equals(BinaryExpr.Operator.SUB)){
Set> numericAdditionOrStringConcatenation = new HashSet<>();
+
+// TODO PL 2018-11-06
+
+ // Auf importierte Typen einschraenken
+ // pruefen, ob Typen richtig bestimmt werden.
+
+
//Zuerst der Fall für Numerische AusdrücPairOpnumericeratorke, das sind Mul, Mod und Div immer:
//see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/Match.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/Match.java
index 78f7360f2..3e9956075 100644
--- a/src/main/java/de/dhbwstuttgart/typeinference/unify/Match.java
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/Match.java
@@ -22,6 +22,8 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
public class Match implements IMatch {
@Override
+ //A =. A ==> True
+ //A =. A ==> False
public Optional match(ArrayList termsList) {
// Start with the identity unifier. Substitutions will be added later.
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
index fa190cb7f..d2428defc 100644
--- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
@@ -1,9 +1,11 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
+import java.util.List;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
+import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
@@ -21,5 +23,11 @@ public class TypeUnify {
Set> res = unifyTask.compute();
return res;
}
+
+ public Set> unifyOderConstraints(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) {
+ TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log);
+ Set> res = unifyTask.compute();
+ return res;
+ }
}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
index 801cae504..a149be4cc 100644
--- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
@@ -1,3 +1,4 @@
+//PL 2018-12-19: Merge checken
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
@@ -18,6 +19,7 @@ import java.util.function.BinaryOperator;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
@@ -71,7 +73,9 @@ public class TypeUnifyTask extends RecursiveTask>> {
*/
protected IRuleSet rules;
- protected Set eq;
+ protected Set eq; //und-constraints
+
+ protected List>> oderConstraintsField;
protected IFiniteClosure fc;
@@ -85,6 +89,8 @@ public class TypeUnifyTask extends RecursiveTask>> {
Integer noAllErasedElements = 0;
+ static Integer noou = 0;
+
static int noBacktracking;
public TypeUnifyTask() {
@@ -101,6 +107,25 @@ public class TypeUnifyTask extends RecursiveTask>> {
rules = new RuleSet(logFile);
}
+ public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) {
+ this.eq = eq;
+ //this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
+ this.oderConstraintsField = oderConstraints.stream().map(x -> {
+ Set> ret = new HashSet<>();
+ for (Constraint y : x) {
+ ret.add(new HashSet<>(y));
+ }
+ return ret;
+ }).collect(Collectors.toCollection(ArrayList::new));
+
+ //x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
+ this.fc = fc;
+ this.oup = new OrderingUnifyPair(fc);
+ this.parallel = parallel;
+ this.logFile = logFile;
+ this.log = log;
+ rules = new RuleSet(logFile);
+ }
/**
* Vererbt alle Variancen
@@ -134,12 +159,279 @@ public class TypeUnifyTask extends RecursiveTask>> {
}
}
*/
- @Override
+
protected Set> compute() {
- Set> res = unify(eq, fc, parallel);
+ Set neweq = new HashSet<>(eq);
+ /* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */
+ oderConstraintsField.stream()
+ .filter(x -> x.size()==1)
+ .map(y -> y.stream().findFirst().get()).forEach(x -> neweq.addAll(x));
+ ArrayList>> remainingOderconstraints = oderConstraintsField.stream()
+ .filter(x -> x.size()>1)
+ .collect(Collectors.toCollection(ArrayList::new));
+ Set> res = unify(neweq, remainingOderconstraints, fc, parallel, 0);
if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
else return res;
}
+/*
+ @Override
+ protected Set> compute() {
+ Set> fstElems = new HashSet<>();
+ fstElems.add(eq);
+ Set> res = computeCartesianRecursiveOderConstraints(fstElems, oderConstraints, fc, parallel);
+ if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
+ else return res;
+ }
+*/
+
+
+ public Set> computeCartesianRecursiveOderConstraints(Set> fstElems, List>> topLevelSets, IFiniteClosure fc, boolean parallel, int rekTiefe) {
+ //ArrayList>> remainingSets = new ArrayList<>(topLevelSets);
+ fstElems.addAll(topLevelSets.stream()
+ .filter(x -> x.size()==1)
+ .map(y -> y.stream().findFirst().get())
+ .collect(Collectors.toCollection(HashSet::new)));
+ ArrayList>> remainingSets = topLevelSets.stream()
+ .filter(x -> x.size()>1)
+ .collect(Collectors.toCollection(ArrayList::new));
+ if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
+ Set eq = new HashSet<>();
+ fstElems.stream().forEach(x -> eq.addAll(x));
+ Set> result = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe);
+ return result;
+ }
+ Set> nextSet = remainingSets.remove(0);
+ writeLog("nextSet: " + nextSet.toString());
+ List> nextSetasList =new ArrayList<>(nextSet);
+ try {
+ //List>
+ //nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet);
+ }
+ catch (java.lang.IllegalArgumentException e) {
+ System.out.print("");
+ }
+ Set> result = new HashSet<>();
+ int variance = 0;
+ Optional xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType)
+ .filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0)
+ .map(c -> ((PlaceholderType)c.getLhsType()).getVariance())
+ .reduce((a,b)-> {if (a==b) return a; else return 0; }))
+ .filter(d -> d.isPresent())
+ .map(e -> e.get())
+ .findAny();
+ if (xi.isPresent()) {
+ variance = xi.get();
+ }
+ //if (variance == 1 && nextSetasList.size() > 1) {
+ // List> al = new ArrayList<>(nextSetasList.size());
+ // for (int ii = 0; ii < nextSetasList.size();ii++) {
+ // al.add(0,nextSetasList.get(ii));
+ // }
+ // nextSetasList = al;
+ //}
+ //Set a = nextSetasListIt.next();
+ /*if (nextSetasList.size()>1) {zu loeschen
+ if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D"))
+ System.out.print("");
+ if (variance == 1) {
+ a_next = oup.max(nextSetasList.iterator());
+ }
+ else if (variance == -1) {
+ a_next = oup.min(nextSetasList.iterator());
+ }
+ else if (variance == 0) {
+ a_next = nextSetasList.iterator().next();
+ }
+ }
+ else {
+ a_next = nextSetasList.iterator().next();
+ }
+ */
+ if (!nextSetasList.iterator().hasNext())
+ System.out.print("");
+ if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1)
+ System.out.print("");
+ writeLog("nextSetasList: " + nextSetasList.toString());
+ while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
+ Set a = null;
+ if (variance == 1) {
+ a = oup.max(nextSetasList.iterator());
+ nextSetasList.remove(a);
+ }
+ else if (variance == -1) {
+ a = oup.min(nextSetasList.iterator());
+ nextSetasList.remove(a);
+ }
+ else if (variance == 0) {
+ a = nextSetasList.remove(0);
+ }
+ //writeLog("nextSet: " + nextSetasList.toString()+ "\n");
+ //nextSetasList.remove(a);
+ /* zu loeschen
+ if (nextSetasList.size() > 0) {
+ if (nextSetasList.size()>1) {
+ if (variance == 1) {
+ a_next = oup.max(nextSetasList.iterator());
+ }
+ else if (variance == -1) {
+ a_next = oup.min(nextSetasList.iterator());
+ }
+ else {
+ a_next = nextSetasList.iterator().next();
+ }
+ }
+ else {
+ a_next = nextSetasList.iterator().next();
+ }
+ }
+ */
+ //PL 2018-03-01
+ //TODO: 1. Maximum und Minimum unterscheiden
+ //TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
+ //for(Set a : newSet) {
+ i++;
+ Set> elems = new HashSet>(fstElems);
+ elems.add(a);
+ Set> res = new HashSet<>();
+ if (remainingSets.isEmpty()) {
+ noou++;
+ writeLog("Vor unify Aufruf: " + eq.toString());
+ writeLog("No of Unify " + noou);
+ System.out.println(noou);
+ Set eq = new HashSet<>();
+ elems.stream().forEach(x -> eq.addAll(x));
+ res = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe);
+ }
+ else {//duerfte gar nicht mehr vorkommen PL 2018-04-03
+ res = computeCartesianRecursiveOderConstraints(elems, remainingSets, fc, parallel, rekTiefe);
+
+ }
+ if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
+ //wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
+ result = res;
+ }
+ else {
+ if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result))
+ || (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
+ || result.isEmpty()) {
+ //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
+ result.addAll(res);
+ }
+ //else {
+ //wenn Korrekte Ergebnisse da und Feherfälle dazukommen Fehlerfälle ignorieren
+ // if (isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) {
+ // result = result;
+ // }
+ //}
+ }
+
+
+
+
+ /* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
+ if (!result.isEmpty() && !isUndefinedPairSetSet(res)) {
+ if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1)
+ System.out.print("");
+ Iterator> nextSetasListIt = new ArrayList>(nextSetasList).iterator();
+ if (variance == 1) {
+ System.out.println("");
+ while (nextSetasListIt.hasNext()) {
+ Set a_next = nextSetasListIt.next();
+ if (a.equals(a_next) ||
+ (oup.compare(a, a_next) == 1)) {
+ nextSetasList.remove(a_next);
+ }
+ else {
+ System.out.println("");
+ }
+ }
+ }
+ else { if (variance == -1) {
+ System.out.println("");
+ while (nextSetasListIt.hasNext()) {
+ Set a_next = nextSetasListIt.next();
+ if (a.equals(a_next) ||
+ (oup.compare(a, a_next) == -1)) {
+ nextSetasList.remove(0);
+ }
+ else {
+ System.out.println("");
+ }
+ }
+ }
+ else if (variance == 0) {
+ //break;
+ }}
+ }
+ /* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
+
+ /* PL 2018-11-05 wird falsch weil es auf der obersten Ebene ist.
+ if (isUndefinedPairSetSet(res)) {
+ int nofstred= 0;
+ Set abhSubst = res.stream()
+ .map(b ->
+ b.stream()
+ .map(x -> x.getAllSubstitutions())
+ .reduce((y,z) -> { y.addAll(z); return y;}).get())
+ .reduce((y,z) -> { y.addAll(z); return y;}).get();
+ Set b = a;//effective final a
+ Set durchschnitt = abhSubst.stream()
+ .filter(x -> b.contains(x))
+ //.filter(y -> abhSubst.contains(y))
+ .collect(Collectors.toCollection(HashSet::new));
+ //Set vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
+ int len = nextSetasList.size();
+ Set undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results
+ Set, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream()
+ .map(x -> { Set su = x.getAllSubstitutions(); //alle benutzten Substitutionen
+ su.add(x.getGroundBasePair()); // urspruengliches Paar
+ su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen
+ return new Pair<>(su, x.getGroundBasePair());})
+ .collect(Collectors.toCollection(HashSet::new));
+ if (res.size() > 1) {
+ System.out.println();
+ }
+ nextSetasList = nextSetasList.stream().filter(x -> {
+ //Boolean ret = false;
+ //for (PlaceholderType var : vars) {
+ // ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get();
+ //}
+ return (!x.containsAll(durchschnitt));//Was passiert wenn durchschnitt leer ist??
+ })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
+ .collect(Collectors.toCollection(ArrayList::new));
+ nofstred = nextSetasList.size();
+ //NOCH NICHT korrekt PL 2018-10-12
+ //nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
+ // .collect(Collectors.toCollection(ArrayList::new));
+ writeLog("res (undef): " + res.toString());
+ writeLog("abhSubst: " + abhSubst.toString());
+ writeLog("a: " + a.toString());
+ writeLog("Durchschnitt: " + durchschnitt.toString());
+ writeLog("nextSet: " + nextSet.toString());
+ writeLog("nextSetasList: " + nextSetasList.toString());
+ writeLog("Number first erased Elements (undef): " + (len - nofstred));
+ writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size()));
+ writeLog("Number erased Elements (undef): " + (len - nextSetasList.size()));
+ noAllErasedElements = noAllErasedElements + (len - nextSetasList.size());
+ writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
+ noBacktracking++;
+ writeLog("Number of Backtracking: " + noBacktracking);
+ System.out.println("");
+ }
+ */
+ //if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
+ // return result;
+ //}
+ //else {
+ // result.removeIf(y -> isUndefinedPairSet(y));
+ //}
+ //else result.stream().filter(y -> !isUndefinedPairSet(y));
+
+
+ } // End of while (nextSetasList.size() > 0)
+ return result;
+ }
+
/**
* Computes all principal type unifiers for a set of constraints.
@@ -147,18 +439,22 @@ public class TypeUnifyTask extends RecursiveTask>> {
* @param fc The finite closure
* @return The set of all principal type unifiers
*/
- protected Set> unify(Set eq, IFiniteClosure fc, boolean parallel) {
+ protected Set> unify(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//Set aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
// ).collect(Collectors.toCollection(HashSet::new));
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
//if (aas.isEmpty()) {
// System.out.println("");
//}
+
+ //.collect(Collectors.toCollection(HashSet::new)));
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
+ rekTiefe++;
nOfUnify++;
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
+ writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString());
//eq = eq.stream().map(x -> {x.setVariance((byte)-1); return x;}).collect(Collectors.toCollection(HashSet::new));
/*
@@ -224,11 +520,13 @@ public class TypeUnifyTask extends RecursiveTask>> {
Set undefinedPairs = new HashSet<>();
if (printtag) System.out.println("eq2s " + eq2s);
//writeLog("BufferSet: " + bufferSet.toString()+"\n");
- Set>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs);
+ List>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints);
+ Set>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
//PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
//nicht ausgewertet Faculty Beispiel im 1. Schritt
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
//Typen getestet werden.
+ writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput.toString());
if (printtag) System.out.println("secondLevelSets:" +secondLevelSets);
// If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible.
@@ -280,12 +578,13 @@ public class TypeUnifyTask extends RecursiveTask>> {
//Aufruf von computeCartesianRecursive ANFANG
- return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, fc, parallel);
+ //writeLog("topLevelSets: " + topLevelSets.toString());
+ return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe);
}
- Set> unify2(Set> setToFlatten, Set eq, IFiniteClosure fc, boolean parallel) {
+ Set> unify2(Set> setToFlatten, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//Aufruf von computeCartesianRecursive ENDE
//keine Ahnung woher das kommt
@@ -312,14 +611,15 @@ public class TypeUnifyTask extends RecursiveTask>> {
/*
* Step 5: Substitution
*/
- //System.out.println("vor Subst: " + eqPrime);
+ //writeLog("vor Subst: " + eqPrime);
Optional> eqPrimePrime = rules.subst(eqPrime);
-
+ //writeLog("nach Subst: " + eqPrimePrime);
/*
* Step 6 a) Restart (fork) for pairs where subst was applied
*/
if(parallel) {
- if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
+ if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()
+ && oderConstraints.isEmpty()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
//eqPrimePrime Veraenderungen in subst repraesentieren.
@@ -339,26 +639,29 @@ public class TypeUnifyTask extends RecursiveTask>> {
}
else { // sequentiell (Step 6b is included)
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
- if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
+ if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()
+ && oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
//eqPrimePrime Veraenderungen in subst repraesentieren.
try {
if (isSolvedForm(eqPrime)) {
- logFile.write(eqPrime.toString()+"\n");
+ logFile.write("eqPrime:" + eqPrime.toString()+"\n");
logFile.flush();
}
}
- catch (IOException e) { }
+ catch (IOException e) {
+ System.err.println("log-File nicht vorhanden");
+ }
eqPrimePrimeSet.add(eqPrime);
}
else if(eqPrimePrime.isPresent()) {
- Set> unifyres = unify(eqPrimePrime.get(), fc, false);
+ Set> unifyres = unify(eqPrimePrime.get(), oderConstraints, fc, false, rekTiefe);
eqPrimePrimeSet.addAll(unifyres);
}
else {
- Set> unifyres = unify(eqPrime, fc, false);
+ Set> unifyres = unify(eqPrime, oderConstraints, fc, false, rekTiefe);
eqPrimePrimeSet.addAll(unifyres);
@@ -387,7 +690,7 @@ public class TypeUnifyTask extends RecursiveTask>> {
- Set> computeCartesianRecursive(Set> fstElems, ArrayList>> topLevelSets, Set eq, IFiniteClosure fc, boolean parallel) {
+ Set> computeCartesianRecursive(Set> fstElems, ArrayList>> topLevelSets, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//ArrayList>> remainingSets = new ArrayList<>(topLevelSets);
fstElems.addAll(topLevelSets.stream()
.filter(x -> x.size()==1)
@@ -397,7 +700,7 @@ public class TypeUnifyTask extends RecursiveTask>> {
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
- Set> result = unify2(fstElems, eq, fc, parallel);
+ Set> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe);
return result;
}
Set> nextSet = remainingSets.remove(0);
@@ -415,7 +718,7 @@ public class TypeUnifyTask extends RecursiveTask>> {
Optional xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType)
.filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType)c.getLhsType()).getVariance())
- .reduce((a,b)-> {if (a==b) return a; else return 0; }))
+ .reduce((a,b)-> {if (a==b) return a; else return 2; })) //2 kommt insbesondere bei Oder-Constraints vor
.filter(d -> d.isPresent())
.map(e -> e.get())
.findAny();
@@ -463,7 +766,7 @@ public class TypeUnifyTask extends RecursiveTask>> {
a = oup.min(nextSetasList.iterator());
nextSetasList.remove(a);
}
- else if (variance == 0) {
+ else if (variance == 0 || variance == 2) {
a = nextSetasList.remove(0);
}
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
@@ -492,10 +795,11 @@ public class TypeUnifyTask extends RecursiveTask>> {
//for(Set a : newSet) {
i++;
Set> elems = new HashSet>(fstElems);
+ writeLog("a1: " + rekTiefe + " "+ a.toString()+ "\n");
elems.add(a);
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
- //writeLog("Vor unify2 Aufruf: " + eq.toString());
- Set> res = unify2(elems, eq, fc, parallel);
+ //writeLog("Vor unify2 Aufruf: " + elems.toString());
+ Set> res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe);
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = res;
@@ -505,22 +809,42 @@ public class TypeUnifyTask extends RecursiveTask>> {
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
- if (!result.isEmpty() && !res.isEmpty() && !isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) {
+ if ((!result.isEmpty() && !res.isEmpty() && !isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) //korrekte Loesungen aus und-constraints
+ && (a.stream().map(x-> (x.getBasePair() != null)).reduce(true, (x, y) -> (x && y)))) //bei oder-Constraints nicht ausfuehren
+ {
+ //TODO: PL 2019-01-15: Bug 129: Im Moment wird nur das Maximum und das Minimum des aktuellen Elements betrachtet.
+ //Die zu vereinigenden Mengen können mehrere Elemente enthalten. Das ist bisher nicht berücksichtigt
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a
- List vars_a = a.stream().filter(x -> (x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
- ||x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName()))).map(y -> (PlaceholderType)y.getLhsType()).collect(Collectors.toCollection(ArrayList::new));
+ //PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar
+ System.out.println("");
+ List vars_a =
+ a.stream().filter(x -> ((x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
+ && (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getLhsType() instanceof PlaceholderType))
+ || ((x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName()))
+ && (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getRhsType() instanceof PlaceholderType)))
+ )
+ .map(y -> (PlaceholderType)y.getLhsType()).collect(Collectors.toCollection(ArrayList::new));
Set fstElemRes = res.iterator().next();
Set compRes = fstElemRes.stream().filter(x -> vars_a.contains(((PlaceholderType)x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
- List varsLast_a = a_last.stream().filter(x -> (x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
- ||x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName()))).map(y -> (PlaceholderType)y.getLhsType()).collect(Collectors.toCollection(ArrayList::new));
+ //System.out.println(a_last);
+ a_last.forEach(x -> {writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());});
+ List varsLast_a =
+ a_last.stream().filter(x -> ((x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
+ && (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getLhsType() instanceof PlaceholderType))
+ || ((x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName())))
+ && (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getRhsType() instanceof PlaceholderType)))
+ .map(y -> (PlaceholderType)y.getLhsType()).collect(Collectors.toCollection(ArrayList::new));
+ //[(java.util.Vector <. gen_aq, , 1), (CEK =. ? extends gen_aq, 1)] KANN VORKOMMEN
//erstes Element genügt, da vars immer auf die gleichen Elemente zugeordnet werden muessen
Set fstElemResult = result.iterator().next();
Set compResult = fstElemResult.stream().filter(x -> varsLast_a.contains(((PlaceholderType)x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));;
-
if (variance == 1) {
+ writeLog("a_last:" + a_last + " a: " + a);
+ writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
+ writeLog("compResult:" + compResult + " compRes: " + compRes);
int resOfCompare = oup.compare(compResult, compRes);
if (resOfCompare == -1) {
writeLog("Geloescht result: " + result);
@@ -534,6 +858,9 @@ public class TypeUnifyTask extends RecursiveTask>> {
//result = result;
}}}
else { if (variance == -1) {
+ writeLog("a_last:" + a_last + " a: " + a);
+ writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
+ writeLog("compResult:" + compResult + " compRes: " + compRes);
int resOfCompare = oup.compare(compResult, compRes);
if (resOfCompare == 1) {
writeLog("Geloescht result: " + result);
@@ -575,33 +902,41 @@ public class TypeUnifyTask extends RecursiveTask>> {
Iterator> nextSetasListIt = new ArrayList>(nextSetasList).iterator();
if (variance == 1) {
System.out.println("");
+ writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
while (nextSetasListIt.hasNext()) {
Set a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) {
+ writeLog("Removed: " + a_next.toString());
nextSetasList.remove(a_next);
}
else {
+ writeLog("Not Removed: " + a_next.toString());
System.out.println("");
}
}
}
else { if (variance == -1) {
System.out.println("");
+ writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
while (nextSetasListIt.hasNext()) {
Set a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == -1)) {
- nextSetasList.remove(0);
+ writeLog("Removed: " + a_next.toString());
+ nextSetasList.remove(a_next); //PL geaendert 2019-01-09
}
else {
System.out.println("");
+ writeLog("Not Removed: " + a_next.toString());
}
}
}
else if (variance == 0) {
+ writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
break;
}
+ writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
}
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
@@ -614,6 +949,14 @@ public class TypeUnifyTask extends RecursiveTask>> {
.map(x -> x.getAllSubstitutions())
.reduce((y,z) -> { y.addAll(z); return y;}).get())
.reduce((y,z) -> { y.addAll(z); return y;}).get();
+ abhSubst.addAll(
+ res.stream()
+ .map(b ->
+ b.stream()
+ .map(x -> x.getAllBases())
+ .reduce((y,z) -> { y.addAll(z); return y;}).get())
+ .reduce((y,z) -> { y.addAll(z); return y;}).get()
+ );
Set b = a;//effective final a
Set durchschnitt = abhSubst.stream()
.filter(x -> b.contains(x))
@@ -645,7 +988,7 @@ public class TypeUnifyTask extends RecursiveTask>> {
// .collect(Collectors.toCollection(ArrayList::new));
writeLog("res (undef): " + res.toString());
writeLog("abhSubst: " + abhSubst.toString());
- writeLog("a: " + a.toString());
+ writeLog("a2: " + rekTiefe + " " + a.toString());
writeLog("Durchschnitt: " + durchschnitt.toString());
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetasList.toString());
@@ -665,7 +1008,10 @@ public class TypeUnifyTask extends RecursiveTask>> {
// result.removeIf(y -> isUndefinedPairSet(y));
//}
//else result.stream().filter(y -> !isUndefinedPairSet(y));
+ writeLog("res: " + res.toString());
}
+
+ writeLog("Return computeCR: " + result.toString());
return result;
}
@@ -942,15 +1288,18 @@ public class TypeUnifyTask extends RecursiveTask>> {
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
* (as in case 1 where sigma is added to the innermost set).
*/
- protected Set>>> calculatePairSets(Set eq2s, IFiniteClosure fc, Set undefined) {
- List>>> result = new ArrayList<>(8);
+ protected Set>>> calculatePairSets(Set eq2s, List>> oderConstraintsInput, IFiniteClosure fc, Set undefined, List>> oderConstraintsOutput) {
+ oderConstraintsOutput.addAll(oderConstraintsInput);
+ List>>> result = new ArrayList<>(9);
- // Init all 8 cases
- for(int i = 0; i < 8; i++)
+ // Init all 8 cases + 9. Case: oderConstraints
+ for(int i = 0; i < 9; i++)
result.add(new HashSet<>());
+
ArrayList eq2sprime = new ArrayList<>(eq2s);
Iterator eq2sprimeit = eq2sprime.iterator();
ArrayList eq2sAsList = new ArrayList<>();
+ Boolean first = true;
while(eq2sprimeit.hasNext()) {// alle mit Variance != 0 nach vorne schieben
UnifyPair up = eq2sprimeit.next();
if ((up.getLhsType() instanceof PlaceholderType && ((PlaceholderType)up.getLhsType()).getVariance() != 0)
@@ -959,8 +1308,45 @@ public class TypeUnifyTask extends RecursiveTask>> {
eq2s.remove(up);
}
}
+ if (eq2sAsList.isEmpty()) {
+ List>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind
+ .filter(x -> x.stream()
+ .filter(y ->
+ y.stream().filter(z -> ((z.getLhsType() instanceof PlaceholderType)
+ && (((PlaceholderType)(z.getLhsType())).getVariance() != 0))
+ || ((z.getRhsType() instanceof PlaceholderType)
+ && (((PlaceholderType)(z.getRhsType())).getVariance() != 0))
+ ).findFirst().isPresent()
+ ).findFirst().isPresent()).collect(Collectors.toList());
+ if (!oderConstraintsVariance.isEmpty()) {
+ Set> ret = oderConstraintsVariance.get(0);
+ oderConstraintsOutput.remove(ret);
+ //Set retFlat = new HashSet<>();
+ //ret.stream().forEach(x -> retFlat.addAll(x));
+ ret.stream().forEach(x -> x.stream().forEach(y -> y.addSubstitutions(x)));
+ result.get(8).add(ret);
+ first = false;
+ }
+ }
+
eq2sAsList.addAll(eq2s);
- Boolean first = true;
+
+ if (eq2sAsList.isEmpty() && first) {//Alle eq2s sind empty und alle oderConstraints mit Variance != 0 sind bearbeitet
+ if (!oderConstraintsOutput.isEmpty()) {
+ Set> ret = oderConstraintsOutput.remove(0);
+ //if (ret.iterator().next().iterator().next().getLhsType().getName().equals("M"))
+ // System.out.println("M");
+ //Set retFlat = new HashSet<>();
+ //ret.stream().forEach(x -> retFlat.addAll(x));
+ ret.stream().forEach(x -> x.stream().forEach(y -> y.addSubstitutions(x)));
+ result.get(8).add(ret);
+ first = false;
+ }
+ }
+ /*
+ Bei allen die Abhaengigkeit der Elemente aus eq2sAsList als evtl. als Substitution
+ hinzufuegen
+ */
for(UnifyPair pair : eq2sAsList) {
PairOperator pairOp = pair.getPairOp();
UnifyType lhsType = pair.getLhsType();
@@ -970,6 +1356,9 @@ public class TypeUnifyTask extends RecursiveTask>> {
if (((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.SMALLERNEQDOT)) && lhsType instanceof PlaceholderType) {
//System.out.println(pair);
if (first) { //writeLog(pair.toString()+"\n");
+ if (((PlaceholderType)(pair.getLhsType())).getName().equals("AR")) {
+ System.out.println("AR");
+ }
Set> x1 = unifyCase1(pair, fc);
if (pairOp == PairOperator.SMALLERNEQDOT) {
Set remElem = new HashSet<>();
@@ -1112,11 +1501,15 @@ public class TypeUnifyTask extends RecursiveTask>> {
allGen = false;
break;
}
-
+ //if (thetaPrime.getName().equals("java.util.Vector") //Fuer Bug 127
+ // && thetaPrime instanceof ReferenceType
+ // && ((ReferenceType)thetaPrime).getTypeParams().iterator().next().getName().equals("java.util.Vector")
+ // && ((ReferenceType)((ReferenceType)thetaPrime).getTypeParams().iterator().next()).getTypeParams().iterator().next().getName().equals("java.lang.Integer")) {
+ // System.out.println("");
+ //}
Set cs = fc.getAllTypesByName(thetaPrime.getName());//cs= [java.util.Vector, java.util.Vector>, ????java.util.Vector???]
-
//PL 18-02-06 entfernt, kommt durch unify wieder rein
//cs.add(thetaPrime);
//PL 18-02-06 entfernt
@@ -1158,19 +1551,21 @@ public class TypeUnifyTask extends RecursiveTask>> {
}
for(UnifyType tqp : thetaQPrimes) {
+ Collection tphs = tqp.getInvolvedPlaceholderTypes();
Optional opt = stdUnify.unify(tqp, thetaPrime);
if (!opt.isPresent()) {
continue;
}
-
Unifier unifier = opt.get();
unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
Set substitutionSet = new HashSet<>();
for (Entry sigma : unifier) {
- substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT,
+ if (!tphs.contains(sigma.getKey())) {//eingefuegt PL 2019-02-02 Bug 127
+ substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT,
//TODO: nochmals ueberlegen ob hier pair.getSubstitution() korrekt ist, oder ob leere Menge hin müsste
//alle folgenden New UnifyPair ebenfalls ueberpruefen PL 2018-04-19
pair.getSubstitution(), pair));
+ }
}
//List freshTphs = new ArrayList<>(); PL 18-02-06 in die For-Schleife verschoben
for (UnifyType tq : thetaQs) {
@@ -1195,6 +1590,7 @@ public class TypeUnifyTask extends RecursiveTask>> {
for(int i = 0; !allGen && i < theta.getTypeParams().size(); i++) {
if(freshTphs.size()-1 < i)//IST DAS RICHTIG??? PL 2018-12-12
freshTphs.add(PlaceholderType.freshPlaceholder());
+ freshTphs.forEach(x -> ((PlaceholderType)x).setInnerType(true));
resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair));
}
@@ -1236,6 +1632,7 @@ public class TypeUnifyTask extends RecursiveTask>> {
UnifyType aPrime = PlaceholderType.freshPlaceholder();
((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance());
+ ((PlaceholderType)aPrime).disableWildcardtable();
UnifyType extAPrime = new ExtendsType(aPrime);
UnifyType thetaPrime = extThetaPrime.getExtendedType();
Set resultPrime = new HashSet<>();
@@ -1256,12 +1653,14 @@ public class TypeUnifyTask extends RecursiveTask>> {
*/
private Set> unifyCase3(UnifyPair pair, IFiniteClosure fc) {
PlaceholderType a = (PlaceholderType) pair.getLhsType();
+ a.reversVariance();
SuperType subThetaPrime = (SuperType) pair.getRhsType();
byte variance = pair.getVariance();
Set> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance());
+ ((PlaceholderType)aPrime).disableWildcardtable();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType thetaPrime = subThetaPrime.getSuperedType();
Set resultPrime = new HashSet<>();
@@ -1295,7 +1694,27 @@ public class TypeUnifyTask extends RecursiveTask>> {
break;
}
- for(UnifyType thetaS : fc.greater(theta, pair.getfBounded())) {
+ //eingefuegt PL 2019-01-03 ANFANG
+ //fc.setLogTrue();
+ //writeLog("FBOUNDED: " + pair.getfBounded());
+ //writeLog("Pair: " + pair);
+ Set greater = fc.greater(theta, pair.getfBounded());
+ //writeLog("GREATER: " + greater + pair + "THETA: " + theta + "FBOUNDED: " + pair.getfBounded() + " ");
+ if (a.isWildcardable()) {
+ Set greater_ext = greater.stream().filter(x -> !(x instanceof ExtendsType) && !(x instanceof SuperType))
+ .map(x -> {
+ //BinaryOperator> combiner = (aa,b) -> { aa.putAll(b); return aa;}; //Variablenumbenennung rausgenommen
+ //HashMap hm = x.getInvolvedPlaceholderTypes().stream() //Variablen muessen wahrscheinlich erhalten bleiben
+ // .reduce(new HashMap(),
+ // (aa, b)-> { aa.put(b,PlaceholderType.freshPlaceholder()); return aa; }, combiner);
+ return new SuperType (x);})//.accept(new freshPlaceholder(), hm));}
+ .collect(Collectors.toCollection(HashSet::new));
+ greater.addAll(greater_ext);
+ }
+ //eingefuegt PL 2019-01-03 ENDE
+
+ //for(UnifyType thetaS : fc.greater(theta, pair.getfBounded())) {
+ for(UnifyType thetaS : greater) {
Set resultPrime = new HashSet<>();
Match match = new Match();
@@ -1303,7 +1722,7 @@ public class TypeUnifyTask extends RecursiveTask>> {
for(int i = 0; !allGen && i < freshTphs.length; i++) {
freshTphs[i] = PlaceholderType.freshPlaceholder();
((PlaceholderType)freshTphs[i]).setVariance(((PlaceholderType)a).getVariance());
- Set fBounded = pair.getfBounded();
+ Set fBounded = new HashSet<>(pair.getfBounded()); //PL 2019-01-09 new HashSet eingefuegt
int i_ef = i;
BiFunction f = (x,y) ->
@@ -1317,7 +1736,7 @@ public class TypeUnifyTask extends RecursiveTask>> {
//}
BinaryOperator bo = (x,y) -> (x || y);
if (fBounded.stream().reduce(false,f,bo)) {
- resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
+ resultPrime.add(new UnifyPair(freshTphs[i], thetaS.getTypeParams().get(i), PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
}
else {
fBounded.add(thetaS.getTypeParams().get(i));
@@ -1331,7 +1750,8 @@ public class TypeUnifyTask extends RecursiveTask>> {
resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
result.add(resultPrime);
- //writeLog(resultPrime.toString());
+ //writeLog("FBOUNDED2: " + pair.getfBounded());
+ //writeLog("resultPrime Theta < a: " + greater + pair + "THETA: " + theta + "FBOUNDED: " + pair.getfBounded() + " " + resultPrime.toString());
}
return result;
@@ -1354,6 +1774,7 @@ public class TypeUnifyTask extends RecursiveTask>> {
UnifyType freshTph = PlaceholderType.freshPlaceholder();
((PlaceholderType)freshTph).setVariance(a.getVariance());
+ ((PlaceholderType)freshTph).disableWildcardtable();
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair, pair.getfBounded()));
@@ -1407,11 +1828,13 @@ public class TypeUnifyTask extends RecursiveTask>> {
void writeLog(String str) {
if (log) {
try {
- logFile.write(str+"\n");
+ logFile.write(str+"\n\n");
logFile.flush();
}
- catch (IOException e) { }
+ catch (IOException e) {
+ System.err.println("kein LogFile");
+ }
}
}
}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/interfaces/IFiniteClosure.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/interfaces/IFiniteClosure.java
index 3e20f4332..37d70d550 100644
--- a/src/main/java/de/dhbwstuttgart/typeinference/unify/interfaces/IFiniteClosure.java
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/interfaces/IFiniteClosure.java
@@ -17,6 +17,7 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
*/
public interface IFiniteClosure {
+ public void setLogTrue();
/**
* Returns all types of the finite closure that are subtypes of the argument.
* @return The set of subtypes of the argument.
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/model/FiniteClosure.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/model/FiniteClosure.java
index 4aa769e55..07c864570 100644
--- a/src/main/java/de/dhbwstuttgart/typeinference/unify/model/FiniteClosure.java
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/model/FiniteClosure.java
@@ -1,5 +1,7 @@
package de.dhbwstuttgart.typeinference.unify.model;
+import java.io.FileWriter;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -30,6 +32,11 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
public class FiniteClosure //extends Ordering //entfernt PL 2018-12-11
implements IFiniteClosure {
+ FileWriter logFile;
+ static Boolean log = false;
+ public void setLogTrue() {
+ log = true;
+ }
/**
* A map that maps every type to the node in the inheritance graph that contains that type.
*/
@@ -46,10 +53,21 @@ implements IFiniteClosure {
*/
private Set pairs;
+ /**
+ * Hastable fuer die greater-Werte, damit sie nicht doppelt berechnet werden muessen
+ */
+ Hashtable