Compare commits

..

6 Commits

Author SHA1 Message Date
95f48ffcb7 modified: ../src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
ckeckA druch checkNoContradiction ersetzt
2021-01-14 15:29:06 +01:00
163f0f3047 modified: ../src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
vor Loeschung von checkA
2021-01-14 14:59:20 +01:00
1cf22d2602 modified: ../src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
remaingSet entfernt
fstElems in oneElems umbenannt
2021-01-14 11:27:19 +01:00
d8ac25234f modified: ../src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
vor entfernung von Remaining
2021-01-14 10:38:32 +01:00
e00d76ce3b Occurs-Check mit Abbruch eingebaut
finalResult zum Attribut der Klasse gemacht
2021-01-13 19:39:01 +01:00
fec83c3a62 modified: src/main/java/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java
Fuer den Receiver wieder = eingefuegt.

	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
Kommentierung begonnen
2021-01-12 21:27:44 +01:00
46 changed files with 217 additions and 3288 deletions

10
pom.xml

@ -118,15 +118,7 @@ http://maven.apache.org/maven-v4_0_0.xsd">
</descriptorRefs>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>9</source>
<target>9</target>
</configuration>
</plugin>
</plugins>
</plugins>
</build>
<repositories>
<repository>

@ -9,7 +9,6 @@ import java.util.List;
import java.util.Optional;
import de.dhbwstuttgart.bytecode.TPHExtractor;
import de.dhbwstuttgart.bytecode.insertGenerics.FamilyOfGeneratedGenerics;
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.GenericGenratorResultForSourceFile;
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.GenericsGeneratorResultForClass;
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.MethodAndConstraints;
@ -75,7 +74,6 @@ public class GeneratedGenericsFinder implements ASTVisitor {
private GenericGenratorResultForSourceFile generatedGenericsForSF;//Ergebnis des GGenerics
private ResultSet resultSet;
private final List<String> methodNameAndParamsT = new ArrayList<>();
private FamilyOfGeneratedGenerics fogg;
private String pkgName;
private JavaClassName className;
@ -133,10 +131,6 @@ public class GeneratedGenericsFinder implements ASTVisitor {
tphExtractor.setResultSet(resultSet);
resolver = new Resolver(resultSet);
classOrInterface.accept(tphExtractor);
//PL 2020-10-16: Ab hier GGenerics implementieren durch Ali
//Rueckgabe an generatedGenericsForSF
fogg = new FamilyOfGeneratedGenerics(tphExtractor);
tphsClass = tphExtractor.tphsClass;
simplifiedConstraints = GenericsGenerator.simplifyConstraints(tphExtractor, tphsClass);
if(!isVisited) {
@ -151,8 +145,8 @@ public class GeneratedGenericsFinder implements ASTVisitor {
if(ggResult != null)
generatedGenericsForSF.addGenericGeneratorResultClass(ggResult);
System.out.println("ddd");
}
}

@ -1,15 +0,0 @@
package de.dhbwstuttgart.bytecode.insertGenerics;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
public class ClassConstraint extends TPHConstraint {
//private TPHConstraint constraint;
public ClassConstraint(String left, String right, Relation rel) {
super(left, right, rel);
}
//besser?
/*public ClassConstraint(TPHConstraint constraint) {
this.constraint = constraint;
}*/
}

@ -1,364 +0,0 @@
package de.dhbwstuttgart.bytecode.insertGenerics;
import com.ibm.icu.text.CurrencyMetaInfo;
import de.dhbwstuttgart.bytecode.TPHExtractor;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
import de.dhbwstuttgart.bytecode.utilities.MethodAndTPH;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class FamilyOfGeneratedGenerics {
public List<TPHConstraint> allConstraints = new ArrayList<>();
// HashMap speichert ob TPH in einer Methode oder in der Klasse ist; und wenn es in der Methode ist, in welcher Methode
public HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTPHs = new HashMap<>();
public List<ClassConstraint> classConstraints = new ArrayList<>();
public List<MethodConstraint> methodConstraints = new ArrayList<>();
public FamilyOfGeneratedGenerics(TPHExtractor tphExtractor) {
this.allConstraints = tphExtractor.allCons;
this.posOfTPHs = positionConverter(tphExtractor.allTPHS, tphExtractor.ListOfMethodsAndTph);
this.classConstraints = getClassConstraints(allConstraints,posOfTPHs);
// this.methodConstraints =
}
public static List<ClassConstraint> getClassConstraints(List<TPHConstraint> cs, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) { //Inputparameter List<TPHConstraint> constraintsSet weg
List<ClassConstraint> cs_cl = new ArrayList<>();
List<ClassConstraint> classConstraints1 = typeOfANodeOfAField(cs, posOfTphs);
for (ClassConstraint cons: classConstraints1) {
if (!checkForDuplicates(cons, cs_cl)) {
cs_cl.add(cons);
}
}
List<ClassConstraint> classConstraints2 = transitiveSubtypeForClassTypes(cs, cs_cl); // in Klammer classConstraints1 oder constraintsSet? beides eher
for (ClassConstraint cons: classConstraints2) {
if (!checkForDuplicates(cons, cs_cl)) {
cs_cl.add(cons);
}
}
List<ClassConstraint> classConstraints3 = hasNoSupertypeForClassTypes(cs, cs_cl, posOfTphs);
for (ClassConstraint cons: classConstraints3) {
if (!checkForDuplicates(cons, cs_cl)) {
cs_cl.add(cons);
}
}
return cs_cl;
}
public static List<MethodConstraint> getMethodConstraints(List<TPHConstraint> cs, List<ClassConstraint> cs_cl, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) {
//TODO: Regeln
List<MethodConstraint> cs_m = new ArrayList<>();
List<MethodConstraint> methodConstraints1 = typeOfTheMethodInClSigma(cs, posOfTphs);
for (MethodConstraint cons: methodConstraints1) {
if (!checkForDuplicates(cons, cs_m)) {
cs_m.add(cons);
}
}
List<MethodConstraint> methodConstraints2 = firstTransitiveSubtypeForMethodTypes(cs, cs_m);
for (MethodConstraint cons: methodConstraints2) {
if (!checkForDuplicates(cons, cs_m)) {
cs_m.add(cons);
}
}
List<MethodConstraint> methodConstraints3 = secondTransitiveSubtypeForMethodTypes(cs, cs_cl, cs_m);
for (MethodConstraint cons: methodConstraints3) {
if (!checkForDuplicates(cons, cs_m)) {
cs_m.add(cons);
}
}
List<MethodConstraint> methodConstraints4 = hasNoSupertypeForMethodTypes(cs, posOfTphs);
for (MethodConstraint cons: methodConstraints4) {
if (!checkForDuplicates(cons, cs_m)) {
cs_m.add(cons);
}
}
List<MethodConstraint> methodConstraints5 = methodTypesWithoutClassTypes(cs_cl, cs_m);
cs_m = methodConstraints5;
return cs_m;
}
/**
* Def. FGG: erste Zeile von cs_cl
* {T < .T' | T is a type variable in a type of a node of a field}
*/
public static List<ClassConstraint> typeOfANodeOfAField(List<TPHConstraint> allConstraints, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) {
//RuntimeException re = new RuntimeException("enthält EQUALS-Relation");
List<ClassConstraint> tempCC= new ArrayList<>();
for(TPHConstraint allCons: allConstraints){
if(posOfTphs.containsKey(allCons.getLeft()) && allCons.getRight()!=null && allCons.getRel()==Relation.EXTENDS) {
for(String tph: posOfTphs.keySet()) {
if(tph == allCons.getLeft() && posOfTphs.get(tph).fst == PositionFinder.Position.FIELD) {
ClassConstraint consToAdd = new ClassConstraint(tph, allCons.getRight(), allCons.getRel());
if (!checkForDuplicates(consToAdd, tempCC)) {
tempCC.add(consToAdd);
}
}
}
}
/*else if (allCons.getRel() != Relation.EXTENDS) {
throw re;
}*/
}
return tempCC;
}
/**
* Def. FGG: zweite Zeile von cs_cl
* {T' <. T'' | \exists T: (T <. T') \in cs_cl, (T' <. T'') \in cs }
*/
public static List<ClassConstraint> transitiveSubtypeForClassTypes(List<TPHConstraint> allConstraints, List<ClassConstraint> cs_cl) {
List<ClassConstraint> tempCC= new ArrayList<>();
for(ClassConstraint cCons: cs_cl) {
if(cCons.getLeft() != null && cCons.getRel()==Relation.EXTENDS) {
for(TPHConstraint allCons: allConstraints) {
if(cCons.getRight() == allCons.getLeft() && allCons.getRight() != null && allCons.getRel()==Relation.EXTENDS){
ClassConstraint consToAdd = new ClassConstraint(allCons.getLeft(), allCons.getRight(), allCons.getRel());
if (!checkForDuplicates(consToAdd, tempCC)) {
tempCC.add(consToAdd);
}
}
}
}
}
return tempCC;
}
/**
* Def. FGG: dritte Zeile von cs_cl
* {T <. Object | ((T is a type variable in a type of a node of a field
* or (\exists T~: (T~ <. T) \in cs_cl)) and (\existsnot T': T <. T') \in cs)}
*/
public static List<ClassConstraint> hasNoSupertypeForClassTypes(List<TPHConstraint> allConstraints, List<ClassConstraint> cs_cl, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) {
List<ClassConstraint> tempCC= new ArrayList<>();
for(TPHConstraint allCons: allConstraints) {
for(ClassConstraint cCons: cs_cl) {
for(String tph: posOfTphs.keySet()) {
boolean tvInField = posOfTphs.get(tph).fst == PositionFinder.Position.FIELD;
boolean hasSmallerTVInClCons = (posOfTphs.containsKey(cCons.getRight()) && cCons.getRight() == tph && cCons.getLeft() != null);
if( ((tvInField || hasSmallerTVInClCons) && cCons.getRel()==Relation.EXTENDS) &&
checkUpperBound(allConstraints, tph) && allCons.getRel()==Relation.EXTENDS) {
ClassConstraint consToAdd = new ClassConstraint(tph, "Object", Relation.EXTENDS);
if (!checkForDuplicates(consToAdd, tempCC)){
tempCC.add(consToAdd);
}
}
}
}
}
return tempCC;
}
/**
* Def. FGG: erste Zeile von cs_m
* {T < .T' | T is a type variable in a type of the method/constructor m in cl_\sigma, (T <. T') \in cs}
*/
public static List<MethodConstraint> typeOfTheMethodInClSigma(List<TPHConstraint> allConstraints, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) { // cl_\sigma??
//TODO:
List<MethodConstraint> tempMC= new ArrayList<>();
for(TPHConstraint allCons: allConstraints){
if(posOfTphs.containsKey(allCons.getLeft()) && allCons.getRight()!=null && allCons.getRel()==Relation.EXTENDS) {
for(String tph: posOfTphs.keySet()) {
if(tph == allCons.getLeft() && (posOfTphs.get(tph).fst == PositionFinder.Position.METHOD || posOfTphs.get(tph).fst == PositionFinder.Position.CONSTRUCTOR)) {
MethodConstraint consToAdd = new MethodConstraint(allCons.getLeft(), allCons.getRight(), allCons.getRel());
if (!checkForDuplicates(consToAdd, tempMC)) {
tempMC.add(consToAdd);
}
}
}
}
}
return tempMC;
}
/**
* Def. FGG: zweite Zeile von cs_m
* {R' <. S | (R <. R'), (S <. S') \in cs_m and (R',S) is in the transitive closure of cs}
*/
public static List<MethodConstraint> firstTransitiveSubtypeForMethodTypes(List<TPHConstraint> allConstraints, List<MethodConstraint> cs_m) { //transitive closure of cs
//TODO:
List<MethodConstraint> tempMC= new ArrayList<>();
List<TPHConstraint> tcOfCs = buildTransitiveClosure(allConstraints);
for(MethodConstraint mC1 : cs_m) { //(R <. R')
for(MethodConstraint mC2 : cs_m) { //(S <. S')
String lSide = mC1.getRight(); //R'
String rSide = mC2.getLeft(); //S
for(TPHConstraint tphC : tcOfCs) {
if(tphC.getLeft().equals(lSide)&&tphC.getRight().equals(rSide)) { //is it (R',S)
MethodConstraint consToAdd = new MethodConstraint(lSide, rSide, tphC.getRel()); //create (R'<.S)
if (!checkForDuplicates(consToAdd, tempMC)) {
tempMC.add(consToAdd);
}
}
}
}
}
return tempMC;
}
/**
* Def. FGG: dritte Zeile von cs_m
* {R' <. S | (R <. R') \in cs_m, (S <. S') \in cs_cl and (R',S) is in the transitive closure of cs}
*/
public static List<MethodConstraint> secondTransitiveSubtypeForMethodTypes(List<TPHConstraint> allConstraints, List<ClassConstraint> cs_cl, List<MethodConstraint> cs_m) {
//TODO:
List<MethodConstraint> tempMC= new ArrayList<>();
List<TPHConstraint> tcOfCs = buildTransitiveClosure(allConstraints);
for(ClassConstraint cC : cs_cl) {
for(MethodConstraint mC : cs_m) {
String leftSide = mC.getRight();
String rightSide = cC.getLeft();
for(TPHConstraint tphC : tcOfCs) {
if(tphC.getLeft().equals(leftSide)&&tphC.getRight().equals(rightSide)) {
MethodConstraint consToAdd = new MethodConstraint(tphC.getLeft(), tphC.getRight(), tphC.getRel());
if (!checkForDuplicates(consToAdd, tempMC)) {
tempMC.add(consToAdd);
System.out.println(consToAdd);
}
}
}
}
}
return tempMC;
}
/**
* Def. FGG: vierte Zeile von cs_m
* {T <. Object | (T is a type variable in a type of a node of the method/constructor m in cl_\sigma),
* (\existsnot T': T <. T') \in cs)}
*/
public static List<MethodConstraint> hasNoSupertypeForMethodTypes(List<TPHConstraint> allConstraints, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) {
//TODO:
List<MethodConstraint> tempMC= new ArrayList<>();
for(String tph: posOfTphs.keySet()) {
for(TPHConstraint allCons: allConstraints) {
if((posOfTphs.get(tph).fst.equals(PositionFinder.Position.METHOD) || posOfTphs.get(tph).fst.equals(PositionFinder.Position.CONSTRUCTOR)) && checkUpperBound(allConstraints,tph)) {
MethodConstraint consToAdd = new MethodConstraint(tph, "Object", Relation.EXTENDS);
if (!checkForDuplicates(consToAdd, tempMC)) {
tempMC.add(consToAdd);
System.out.println(consToAdd);
}
}
}
}
return tempMC;
}
/**
* nimm die Menge cs_cl aus cs_m raus
*/
public static List<MethodConstraint> methodTypesWithoutClassTypes(List<ClassConstraint> cs_cl, List<MethodConstraint> cs_m) {
//TODO:
List<TPHConstraint> tempCC = new ArrayList<>();
for(ClassConstraint cc: cs_cl) {
TPHConstraint tphC = new TPHConstraint(cc.getLeft(), cc.getRight(), cc.getRel());
tempCC.add(tphC);
}
List<TPHConstraint> tempMC = new ArrayList<>();
for(MethodConstraint mc: cs_m) {
TPHConstraint tphC = new TPHConstraint(mc.getLeft(), mc.getRight(), mc.getRel());
tempMC.add(tphC);
}
List<TPHConstraint> tempMC2 = new ArrayList<>();
tempMC2.addAll(tempMC);
List<MethodConstraint> tempMCToReturn = new ArrayList<>();
for(TPHConstraint cc: tempCC) {
for(TPHConstraint mc: tempMC) {
if(cc.getLeft().equals(mc.getLeft()) && cc.getRight().equals(mc.getRight())) {
tempMC2.remove(mc);
}
}
}
for(TPHConstraint tphC: tempMC2) {
MethodConstraint mCons = new MethodConstraint(tphC.getLeft(), tphC.getRight(), tphC.getRel());
tempMCToReturn.add(mCons);
}
return tempMCToReturn;
}
public static boolean checkForDuplicates(TPHConstraint constraint, List list) {
List<TPHConstraint> tempList = list;
boolean hasSame = false;
for (TPHConstraint tphC: tempList) {
hasSame = constraint.getLeft() == tphC.getLeft() &&
constraint.getRight() == tphC.getRight() &&
constraint.getRel() == tphC.getRel(); //constraint already in ArrayList if true
if (hasSame)
return true;
}
return false;
}
public static List<TPHConstraint> buildTransitiveClosure(List list) {
List<TPHConstraint> iterList = new ArrayList<>(list);
List<TPHConstraint> runList = new ArrayList<>(list);
List<TPHConstraint> tcList = new ArrayList<>(list);
boolean addedConToList = false;
for (TPHConstraint cons: iterList) {
for (TPHConstraint cons2: runList) {
if(cons.getRight() == cons2.getLeft()) {
TPHConstraint consToAdd = new TPHConstraint(cons.getLeft(), cons2.getRight(), Relation.EXTENDS);
if (!checkForDuplicates(consToAdd,tcList)) {
tcList.add(consToAdd);
addedConToList = true;
if (addedConToList) {
return buildTransitiveClosure(tcList);
}
}
}
}
}
return tcList;
}
public static boolean checkUpperBound(List<TPHConstraint> cs, String tph) {
for(int i=0; i<cs.size(); i++) {
if(cs.get(i).getLeft() == tph) {
return false;
}
}
return true;
}
public static HashMap<String, PairTphMethod<PositionFinder.Position, String>> positionConverter(HashMap<String, Boolean> allTphs, List<MethodAndTPH> listOfMethodsAndTphs) {
HashMap<String, PairTphMethod<PositionFinder.Position, String>> convertedPositions = new HashMap<>();
for(String tph: allTphs.keySet()) {
if(allTphs.get(tph)) { //if true, then tph is a method-TPH
for(MethodAndTPH methTph: listOfMethodsAndTphs) {
if (methTph.getTphs().contains(tph)) {
convertedPositions.put(tph, new PairTphMethod<>(PositionFinder.Position.METHOD, methTph.getId()));
}
}
} else { // else it is in the class-TPH
convertedPositions.put(tph, new PairTphMethod<>(PositionFinder.Position.FIELD, null));
}
}
return convertedPositions;
}
/* public PositionFinder.Position positionConverter(TPHExtractor tphExtractor) {
if(tphExtractor.allTPHS.keySet())
}*/
/* GeneratedGenericsFinder genGenFinder;
ConstraintsSimplierResult simplifiedConstraints = null;
GenericsGeneratorResultForClass ggResult = null;
Method m;
public void addMethodConstraints(List<MethodConstraint> cs_m) {
genGenFinder.addMethodConstraints(simplifiedConstraints, ggResult, m);
cs_m.add();
}
*/
}

@ -1,42 +0,0 @@
/*
package de.dhbwstuttgart.bytecode.gGenericsAli;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
*/
/**
* gets set of typed variable constraints with substitutions and the set of typed classes
* and returns the set of families of generated generics and the set of families of type variable mappings
*//*
public class GGenerics implements preGGenerics {
private TVarConstraints tVarCons;
private Substitutions subst;
private TClass typedClass;
public GGenerics(TVarConstraints tVarCons, Substitutions subst, TClass typedClass) throws IOException, ClassNotFoundException {
this.tVarCons = tVarCons;
this.subst = subst;
this.typedClass = typedClass;
}
List<File> input = new ArrayList<>();
List<File> classpath = new ArrayList<>();
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath);
compiler.typeInference();
public List<ResultSet> getResultOfTypeInference() {
return null;
}
}
*/

@ -1,9 +0,0 @@
package de.dhbwstuttgart.bytecode.insertGenerics;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
public class MethodConstraint extends TPHConstraint {
public MethodConstraint(String left, String right, Relation rel) {
super(left, right, rel);
}
}

@ -1,43 +0,0 @@
package de.dhbwstuttgart.bytecode.insertGenerics;
import java.util.Objects;
/** A generic class for pairs.
*
* <p><b>This is NOT part of any supported API.
* If you write code that depends on this, you do so at your own risk.
* This code and its internal interfaces are subject to change or
* deletion without notice.</b>
*/
public class PairTphMethod<A, B> {
public final A fst;
public final B snd;
public PairTphMethod(A fst, B snd) {
this.fst = fst;
this.snd = snd;
}
public String toString() {
return "PairTphMethod[" + fst + "," + snd + "]";
}
public boolean equals(Object other) {
return
other instanceof PairTphMethod<?,?> &&
Objects.equals(fst, ((PairTphMethod<?,?>)other).fst) &&
Objects.equals(snd, ((PairTphMethod<?,?>)other).snd);
}
public int hashCode() {
if (fst == null) return (snd == null) ? 0 : snd.hashCode() + 1;
else if (snd == null) return fst.hashCode() + 2;
else return fst.hashCode() * 17 + snd.hashCode();
}
public static <A,B> PairTphMethod<A,B> of(A a, B b) {
return new PairTphMethod<>(a,b);
}
}

@ -1,79 +0,0 @@
package de.dhbwstuttgart.bytecode.insertGenerics;
import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import java.util.HashMap;
import java.util.Set;
public class PositionFinder{
static HashMap<String, PairTphMethod<Position, String>> posOfTphs = new HashMap<String, PairTphMethod<Position, String>>();
static PairTphMethod<Position, String> whichMethod; // gibt an, in welcher Methode sich TPH befindet (Position.METHOD, id_of_method)
public enum Position{
METHOD,
CONSTRUCTOR,
FIELD
}
public static HashMap<String, PairTphMethod<Position, String>> getPositionOfTPH(SourceFile sf, Set<String> tphs) {
new Walker().visit(sf);
for (String tph: posOfTphs.keySet()) {
System.out.println(tph + " " + posOfTphs.get(tph));
}
return null;
}
public static void putPositionInMethod(String tph, String methodId) {
posOfTphs.put(tph, new PairTphMethod<>(Position.METHOD, methodId));
}
public static void putPositionInField(String tph) {
posOfTphs.put(tph, new PairTphMethod<>(Position.FIELD, null));
}
public static void putPositionInConstructor(String tph, String id) {
posOfTphs.put(tph, new PairTphMethod<>(Position.CONSTRUCTOR, id));
}
static class Walker extends AbstractASTWalker{
Boolean inMethod = false;
Boolean inConstructor = false;
@Override
public void visit(TypePlaceholder tph) {
if (inMethod) {
if (inConstructor) {
// System.out.println(tph);
// putPositionInConstructor(tph.getName(),);
}
// System.out.println(tph);
// putPositionInMethod(tph.getName(),);
} else {
putPositionInField(tph.getName());
}
}
@Override
public void visit(Field field) {
super.visit(field);
}
@Override
public void visit(Method method) {
inMethod = true;
super.visit(method);
}
@Override
public void visit(Constructor cons) {
inConstructor = true;
super.visit(cons);
}
}
}

@ -1,9 +0,0 @@
package de.dhbwstuttgart.bytecode.insertGenerics;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import java.util.List;
interface preGGenerics {
public List<ResultSet> getResultOfTypeInference();
}

@ -43,7 +43,7 @@ public class GatherNames {
}
else{
if(typeDecl.classDeclaration().normalClassDeclaration() != null){
if(!pkgName.isEmpty()){
if(pkgName != ""){
nameString = pkgName + "." + typeDecl.classDeclaration().normalClassDeclaration().Identifier().toString();
}
else{

@ -154,7 +154,7 @@ public class UnifyTypeFactory {
public static Constraint<UnifyPair> convert(Constraint<Pair> constraint){
Constraint<UnifyPair> unifyPairConstraint = constraint.stream()
.map(UnifyTypeFactory::convert)
.collect(Collectors.toCollection( () -> new Constraint<UnifyPair>(convert(constraint.getExtendConstraint()))));
.collect(Collectors.toCollection( () -> new Constraint<UnifyPair> (constraint.isInherited(), convert(constraint.getExtendConstraint()))));
return unifyPairConstraint;
}

@ -64,10 +64,6 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
}
}
@Override
public int hashCode() {
return this.getName().hashCode();
}
public String toString()
{

@ -17,13 +17,15 @@ public class MethodAssumption extends Assumption{
private ClassOrInterface receiver;
private RefTypeOrTPHOrWildcardOrGeneric retType;
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params;
private final Boolean isInherited;
public MethodAssumption(ClassOrInterface receiver, RefTypeOrTPHOrWildcardOrGeneric retType,
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope){
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, Boolean isInherited){
super(scope);
this.receiver = receiver;
this.retType = retType;
this.params = params;
this.isInherited = isInherited;
}
/*
@ -70,4 +72,8 @@ public class MethodAssumption extends Assumption{
return receiverType;
}
public Boolean isInherited() {
return isInherited;
}
}

@ -6,17 +6,32 @@ import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
//TODO: Remove this class
public class Constraint<A> extends HashSet<A> {
private static final long serialVersionUID = 1L;
private Boolean isInherited = false;//wird nur für die Method-Constraints benoetigt
private Constraint<A> extendConstraint = null;
public Constraint(){}
public Constraint(Constraint<A> extendConstraint) {
public Constraint() {
super();
}
public Constraint(Boolean isInherited) {
this.isInherited = isInherited;
}
public Constraint(Boolean isInherited, Constraint<A> extendConstraint) {
this.isInherited = isInherited;
this.extendConstraint = extendConstraint;
}
public void setIsInherited(Boolean isInherited) {
this.isInherited = isInherited;
}
public Boolean isInherited() {
return isInherited;
}
public Constraint<A> getExtendConstraint() {
return extendConstraint;
}
@ -26,7 +41,7 @@ public class Constraint<A> extends HashSet<A> {
}
public String toString() {
return super.toString()
return super.toString() + " isInherited = " + isInherited
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ "\n" ;
}

@ -68,8 +68,9 @@ public class ConstraintSet<A> {
Constraint<B> newConst = as.stream()
.map(o)
.collect(Collectors.toCollection((as.getExtendConstraint() != null)
? () -> new Constraint<B> (as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new)))
: () -> new Constraint<B> ()
? () -> new Constraint<B> (as.isInherited(),
as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new)))
: () -> new Constraint<B> (as.isInherited())
));
//CSA2CSB.put(as, newConst);

@ -1,107 +0,0 @@
package de.dhbwstuttgart.typeinference.constraints;
import java.util.*;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
public class ConstraintSet2 {
Set<OderConstraint> oderConstraints = new HashSet<>();
public ConstraintSet2(Set<OderConstraint> constraints){
if(constraints.isEmpty())throw new RuntimeException("Empty constraint set");
this.oderConstraints = constraints;
}
@Override
public String toString(){
BinaryOperator<String> b = (x,y) -> x+y;
return "ODER:" + this.oderConstraints.stream().reduce("", (x,y) -> x.toString()+ "\n" +y, b);
}
private class ConstraintSpliterator implements Spliterator<Set<Pair>> {
private List<OderConstraint> constraints;
private long i = 0;
private long max = 0;
private List<Integer> sizes;
private List<Long> bases = new ArrayList<>();
ConstraintSpliterator(List<OderConstraint> constraints){
this.constraints = constraints;
sizes = constraints.stream().map(OderConstraint::getSize).collect(Collectors.toList());
long base = 1;
for(int size : sizes){
bases.add(base);
base *= size;
}
i = 0;
max = estimateSize() - 1;
}
ConstraintSpliterator(List<OderConstraint> constraints, long start, long end){
this(constraints);
i = start;
max = end;
}
@Override
public boolean tryAdvance(Consumer<? super Set<Pair>> consumer) {
if(i > max) return false;
consumer.accept(get(i));
i++;
return true;
}
private Set<Pair> get(long num){
Set<Pair> ret = new HashSet<>();
Iterator<Long> baseIt = bases.iterator();
for(OderConstraint constraint : constraints){
ret.addAll(constraint.get((int) ((num/baseIt.next())%constraint.getSize())));
}
return ret;
}
@Override
public Spliterator<Set<Pair>> trySplit() {
if(max - i < 2) return null;
long middle = i + ((max- i) / 2);
long maxOld = max;
max = middle - 1;
return new ConstraintSpliterator(constraints, middle, maxOld);
}
@Override
public long estimateSize() {
long ret = 1;
for (int size : sizes)ret*=size;
return ret;
}
@Override
public int characteristics() {
return ORDERED | SIZED | IMMUTABLE | NONNULL;
}
}
public Stream<Set<Pair>> cartesianProductParallel(){
return StreamSupport.stream(new ConstraintSpliterator(oderConstraints.stream().collect(Collectors.toList())), true);
}
/*
public Map<String, TypePlaceholder> generateTPHMap() {
HashMap<String, TypePlaceholder> ret = new HashMap<>();
constraints.map((Pair p) -> {
if (p.TA1 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1);
}
if (p.TA2 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2);
}
return null;
});
return ret;
}
*/
}

@ -1,26 +0,0 @@
package de.dhbwstuttgart.typeinference.constraints;
import java.util.HashSet;
import java.util.Set;
public class ConstraintSetBuilder {
private Set<Pair> undConstraints = new HashSet<>();
private Set<OderConstraint> oderConstraints = new HashSet<>();
private boolean done = false;
public void addUndConstraint(Pair p){
undConstraints.add(p);
}
public void addOderConstraint(OderConstraint orConstraint) {
oderConstraints.add(orConstraint);
}
public ConstraintSet2 build(){
if(done)throw new RuntimeException("Trying to build cartesian product twice");
this.done = true;
if(!undConstraints.isEmpty())
oderConstraints.add(new OderConstraint(Set.of(undConstraints)));
return new ConstraintSet2(oderConstraints);
}
}

@ -1,24 +0,0 @@
package de.dhbwstuttgart.typeinference.constraints;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class OderConstraint {
private final List<Set<Pair>> cons;
public OderConstraint(Set<Set<Pair>> orCons){
if(orCons.isEmpty())throw new RuntimeException("Empty constraint set");
for(Set<Pair> c : orCons){
if(c.isEmpty())throw new RuntimeException("Empty constraint set");
}
this.cons = orCons.stream().collect(Collectors.toList());
}
public int getSize(){
return cons.size();
}
public Set<Pair> get(int l) {
return cons.get(l);
}
}

@ -179,7 +179,7 @@ public class TYPEStmt implements StatementVisitor{
!(x.TA2 instanceof TypePlaceholder))
? new Pair(x.TA1, new ExtendsWildcardType(x.TA2, x.TA2.getOffset()), PairOperator.EQUALSDOT)
: x)
.collect(Collectors.toCollection(() -> new Constraint<Pair>()));
.collect(Collectors.toCollection(() -> new Constraint<Pair>(oneMethodConstraint.isInherited())));
oneMethodConstraint.setExtendConstraint(extendsOneMethodConstraint);
extendsOneMethodConstraint.setExtendConstraint(oneMethodConstraint);
methodConstraints.add(extendsOneMethodConstraint);
@ -574,7 +574,7 @@ public class TYPEStmt implements StatementVisitor{
protected Constraint<Pair> generateConstraint(MethodCall forMethod, MethodAssumption assumption,
TypeInferenceBlockInformation info, GenericsResolver resolver){
Constraint<Pair> methodConstraint = new Constraint<>();
Constraint<Pair> methodConstraint = new Constraint<>(assumption.isInherited());
ClassOrInterface receiverCl = assumption.getReceiver();
/*
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
@ -587,9 +587,13 @@ public class TYPEStmt implements StatementVisitor{
*/
RefTypeOrTPHOrWildcardOrGeneric retType = assumption.getReceiverType(resolver);
methodConstraint.add(forMethod.name.equals("apply") ? //PL 2019-11-29: Tenaerer Operator eingefügt, weil bei Lambda-Ausdrücken keine Suntype FunN$$ existiert
new Pair(forMethod.receiver.getType(), retType, PairOperator.EQUALSDOT)
: new Pair(forMethod.receiver.getType(), retType, PairOperator.SMALLERDOT));
methodConstraint.add(new Pair(forMethod.receiver.getType(), retType,
PairOperator.EQUALSDOT));//PL 2020-03-17 SMALLERDOT in EQUALSDOT umgewandelt, weil alle geerbten Methoden in den jeweilen Klassen enthalten sind.
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ANFANG
//methodConstraint.add(new Pair(forMethod.receiverType, retType,
// PairOperator.EQUALSDOT));
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ENDE
methodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(),
PairOperator.EQUALSDOT));
@ -636,7 +640,7 @@ public class TYPEStmt implements StatementVisitor{
public RefTypeOrTPHOrWildcardOrGeneric getReturnType() {
throw new NotImplementedException();
}
}));
}, false));
}
for(ClassOrInterface cl : info.getAvailableClasses()){
for(Method m : cl.getMethods()){
@ -645,7 +649,7 @@ public class TYPEStmt implements StatementVisitor{
RefTypeOrTPHOrWildcardOrGeneric retType = m.getReturnType();//info.checkGTV(m.getReturnType());
ret.add(new MethodAssumption(cl, retType, convertParams(m.getParameterList(),info),
createTypeScope(cl, m)));
createTypeScope(cl, m), m.isInherited));
}
}
}
@ -680,7 +684,7 @@ public class TYPEStmt implements StatementVisitor{
for(Method m : cl.getConstructors()){
if(m.getParameterList().getFormalparalist().size() == argList.getArguments().size()){
ret.add(new MethodAssumption(cl, ofType, convertParams(m.getParameterList(),
info), createTypeScope(cl, m)));
info), createTypeScope(cl, m), m.isInherited));
}
}
}

@ -655,7 +655,6 @@ public class RuleSet implements IRuleSet{
else
t1.getTypeParams().forEach(x -> occuringTypes.push(x));
}
Queue<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
ArrayList<UnifyPair> result = new ArrayList<UnifyPair>();
boolean applied = false;
@ -669,6 +668,7 @@ public class RuleSet implements IRuleSet{
&& pair.getLhsType() instanceof PlaceholderType)
lhsType = (PlaceholderType) pair.getLhsType();
rhsType = pair.getRhsType(); //PL eingefuegt 2017-09-29 statt !((rhsType = pair.getRhsType()) instanceof PlaceholderType)
if(lhsType != null
//&& !((rhsType = pair.getRhsType()) instanceof PlaceholderType) //PL geloescht am 2017-09-29 Begründung: auch Typvariablen muessen ersetzt werden.
&& typeMap.get(lhsType) > 1 // The type occurs in more pairs in the set than just the recent pair.
@ -682,8 +682,9 @@ public class RuleSet implements IRuleSet{
Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map(
x -> uni.apply(pair,x)).collect(Collectors.toCollection((b.getExtendConstraint() != null)
? () -> new Constraint<UnifyPair>(
b.isInherited(),
b.getExtendConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(Constraint::new)))
: () -> new Constraint<UnifyPair>()
: () -> new Constraint<UnifyPair>(b.isInherited())
));
oderConstraints.replaceAll(oc -> oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new)));
/*

@ -34,7 +34,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
System.out.println("two");
}
one = true;
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, true);
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField);
/*if (isUndefinedPairSetSet(res)) {
return new HashSet<>(); }
else

@ -110,6 +110,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
protected boolean parallel;
//Gives if unify is not called from checkA
private boolean finalresult = true;
int rekTiefeField;
Integer nOfUnify = 0;
@ -257,7 +260,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
ArrayList<Set<Constraint<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, true);
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField);
noOfThread--;
try {
logFile.close();
@ -299,7 +302,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* @param fc The finite closure
* @return The set of all principal type unifiers
*/
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) {
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
// ).collect(Collectors.toCollection(HashSet::new));
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
@ -308,9 +311,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//}
//.collect(Collectors.toCollection(HashSet::new)));
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
@ -339,6 +340,29 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
return ret;
}
/*
* Occurs-Check durchfuehren
*/
Set<UnifyPair> ocurrPairs = eq.stream().filter(x -> {
UnifyType lhs, rhs;
return (lhs = x.getLhsType()) instanceof PlaceholderType
&& !((rhs = x.getRhsType()) instanceof PlaceholderType)
&& rhs.getTypeParams().occurs((PlaceholderType)lhs);})
.map(x -> { x.setUndefinedPair(); return x;})
.collect(Collectors.toCollection(HashSet::new));
writeLog("ocurrPairs: " + ocurrPairs);
if (ocurrPairs.size() > 0) {
Set<Set<UnifyPair>> ret = new HashSet<>();
ret.add(ocurrPairs);
return ret;
}
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
Set<UnifyPair> eq0;
Set<UnifyPair> eq0Prime;
Optional<Set<UnifyPair>> eqSubst = Optional.of(eq);
@ -457,12 +481,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Aufruf von computeCartesianRecursive ANFANG
//writeLog("topLevelSets: " + topLevelSets.toString());
return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe, finalresult);
return computeCartesianRecursive(new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe);
}
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) {
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//Aufruf von computeCartesianRecursive ENDE
//keine Ahnung woher das kommt
@ -551,12 +575,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
else if(eqPrimePrime.isPresent()) {
Set<Set<UnifyPair>> unifyres = unifyres1 = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe, finalresult);
Set<Set<UnifyPair>> unifyres = unifyres1 = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe);
eqPrimePrimeSet.addAll(unifyres);
}
else {
Set<Set<UnifyPair>> unifyres = unifyres2 = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe, finalresult);
Set<Set<UnifyPair>> unifyres = unifyres2 = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe);
eqPrimePrimeSet.addAll(unifyres);
@ -587,22 +611,36 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) {
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
/**
* Computes the cartesian product of topLevelSets step by step.
* @param topLevelSets List of Sets of Sets, where a cartesian product have to be built
* Ex.: [{{a =. Integer}, {a = Object}}, {{a = Vector<b>, b =. Integer}, {a = Vector<b>, b =. Object}}]
* @param eq Original set of equations which should be unified
* @param oderConstraints Remaining or-constraints
* @param fc The finite closure
* @param parallel If the algorithm should be parallelized run
* @param rekTiefe Deep of recursive calls
* @return The set of all principal type unifiers
*/
Set<Set<UnifyPair>> computeCartesianRecursive(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
fstElems.addAll(topLevelSets.stream()
//oneElems: Alle 1-elementigen Mengen, die nur ein Paar
//a <. theta, theta <. a oder a =. theta enthalten
Set<Set<UnifyPair>> oneElems = new HashSet<>();
oneElems.addAll(topLevelSets.stream()
.filter(x -> x.size()==1)
.map(y -> y.stream().findFirst().get())
.collect(Collectors.toCollection(HashSet::new)));
ArrayList<Set<? extends Set<UnifyPair>>> remainingSets = topLevelSets.stream()
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
Set<Set<UnifyPair>> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe, finalresult);
//optNextSet: Eine mehrelementige Menge, wenn vorhanden
Optional<Set<? extends Set<UnifyPair>>> optNextSet = topLevelSets.stream().filter(x -> x.size()>1).findAny();
if (!optNextSet.isPresent()) {//Alle Elemente sind 1-elementig
Set<Set<UnifyPair>> result = unify2(oneElems, eq, oderConstraints, fc, parallel, rekTiefe);
return result;
}
Set<? extends Set<UnifyPair>> nextSet = remainingSets.remove(0);
Set<? extends Set<UnifyPair>> nextSet = optNextSet.get();
//writeLog("nextSet: " + nextSet.toString());
List<Set<UnifyPair>> nextSetasList =new ArrayList<>(nextSet);
/*
@ -617,6 +655,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> result = new HashSet<>();
int variance = 0;
/* Varianzbestimmung Anfang
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
* Varianz = 1 => Argumentvariable
* Varianz = -1 => Rückgabevariable
* Varianz = 0 => unklar
* Varianz = 2 => Operatoren oderConstraints */
ArrayList<UnifyPair> zeroNextElem = new ArrayList<>(nextSetasList.get(0));
UnifyPair fstBasePair = zeroNextElem.remove(0).getBasePair();
Boolean oderConstraint = false;
@ -639,16 +683,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
else {
//variance = 2;
oderConstraint = true;
}
}
else {
//variance = 2;
oderConstraint = true;
}
if (oderConstraint) {//Varianz-Bestimmung Oder-Constraints
//Varianz-Bestimmung Oder-Constraints
if (oderConstraint) {
if (printtag) System.out.println("nextSetasList " + nextSetasList);
Optional<Integer> optVariance =
nextSetasList.iterator()
@ -661,23 +704,22 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
((PlaceholderType)x.getGroundBasePair().getLhsType()).getVariance())
.findAny();
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
//da kein Receiver existiert also keon x.getGroundBasePair().getLhsType() instanceof PlaceholderType
//Es werden alle Elemente des Kartesischen Produkts abgearbeitet
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
variance = optVariance.isPresent() ? optVariance.get() : 2;
}
/* Varianzbestimmung Ende */
if (!nextSetasList.iterator().hasNext())
System.out.print("");
if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print("");
//writeLog("nextSetasList: " + nextSetasList.toString());
Set<UnifyPair> nextSetElem = nextSetasList.get(0);
//writeLog("BasePair1: " + nextSetElem + " " + nextSetElem.iterator().next().getBasePair());
/* sameEqSet-Bestimmung: Wenn a = ty \in nextSet dann enthaelt sameEqSet alle Paare a < ty1 oder ty2 < a aus fstElems */
/* sameEqSet-Bestimmung: Wenn a = ty \in nextSet dann enthaelt sameEqSet
* alle Paare a < ty1 oder ty2 < a aus oneElems */
Set<UnifyPair> sameEqSet = new HashSet<>();
//optOrigPair enthaelt ggf. das Paar a = ty \in nextSet
Optional<UnifyPair> optOrigPair = null;
//if (variance != 2) {
if (!oderConstraint) {
optOrigPair = nextSetElem.stream().filter(x -> (
//x.getBasePair() != null && ist gegeben wenn variance != 2
@ -701,7 +743,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
tyVar = origPair.getRhsType();
}
UnifyType tyVarEF = tyVar;
sameEqSet = fstElems.stream().map(xx -> xx.iterator().next())
sameEqSet = oneElems.stream().map(xx -> xx.iterator().next())
.filter(x -> (((x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
|| (x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType)))))
.collect(Collectors.toCollection(HashSet::new));
@ -710,14 +752,21 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/* sameEqSet-Bestimmung Ende */
Set<UnifyPair> a = null;
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
while (nextSetasList.size() > 0) {
Set<UnifyPair> a_last = a;
//Liste der Faelle für die parallele Verarbeitung
/* Liste der Faelle für die parallele Verarbeitung
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
* Deshalb wird ihre Berechnung parallel angestossen.
*/
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
//Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
//In der Regel ein Element
/* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
* In der Regel ist dies genau ein Element
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
* gefuehrt hat.
*/
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
writeLog("nextSet: " + nextSet.toString());
@ -740,6 +789,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
//Alle maximale Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
nextSetasListRest = oup.maxElements(nextSetasListRest);
}
else if (variance == -1) {
@ -760,10 +810,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
//Alle minimalen Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
nextSetasListRest = oup.minElements(nextSetasListRest);
}
else if (variance == 2) {
a = nextSetasList.remove(0);
//Fuer alle Elemente wird parallele Berechnung angestossen.
nextSetasListRest = new ArrayList<>(nextSetasList);
}
else if (variance == 0) {
@ -789,54 +842,32 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
}
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
//nextSetasList.remove(a);
//PL 2018-03-01
//TODO: 1. Maximum und Minimum unterscheiden
//TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
//for(Set<UnifyPair> a : newSet) {
i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(oneElems);
writeLog("a1: " + rekTiefe + " "+ "variance: "+ variance + " " + a.toString()+ "\n");
//elems.add(a); PL 2019-01-20 Muss weg, weil das in jeweiligen Thread erfolgen muss. Fuer den sequentiellen Fall
//im else-Zweig
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
//writeLog("Vor unify2 Aufruf: " + elems.toString());
//Ergebnisvariable für den aktuelle Thread
Set<Set<UnifyPair>> res = new HashSet<>();
//Menge der Ergebnisse der geforkten Threads
Set<Set<Set<UnifyPair>>> add_res = new HashSet<>();
Set<Set<UnifyPair>> aParDef = new HashSet<>();
/* PL 2019-03-11 Anfang eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
if (!oderConstraint && !sameEqSet.isEmpty()) {
Optional<UnifyPair> optAPair = a.stream().filter(x -> (
//x.getBasePair() != null && ist gegeben wenn variance != 2
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
(x.getPairOp().equals(PairOperator.EQUALSDOT)
/*
(x.getBasePair().getLhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getRhsType())
*/
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
x.getLhsType().equals(x.getBasePair().getRhsType())
).findFirst();
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
UnifyPair aPair = optAPair.get();
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
writeLog("variance: " + new Integer(variance).toString() + "sameEqSet:" + sameEqSet);
if (!checkA(aPair, sameEqSet, elems, result)) {
a = null;
noShortendElements++;
continue;
}
}
/* Wenn bei (a \in theta) \in a zu Widerspruch in oneElems wird
* a verworfen und zu nächstem Element von nextSetasList gegangen
*/
if (!oderConstraint && !sameEqSet.isEmpty() && !checkNoContradiction(a, sameEqSet, result)) {
a = null;
noShortendElements++;
continue;
}
/* PL 2019-03-11 Ende eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
/* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
* gestartet, der parallel weiterarbeitet.
*/
if(parallel && (variance == 1) && noOfThread <= MaxNoOfThreads) {
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
@ -865,35 +896,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
writeLog("1 RM" + nSaL.toString());
}
/* PL 2019-03-13 Anfang eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
if (!oderConstraint) {//weiss nicht ob das wirklich stimmt
Optional<UnifyPair> optAPair = nSaL.stream().filter(x -> (
//x.getBasePair() != null && ist gegeben wenn variance != 2
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
(x.getPairOp().equals(PairOperator.EQUALSDOT)
/*
(x.getBasePair().getLhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getRhsType())
*/
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
x.getLhsType().equals(x.getBasePair().getRhsType())
).findFirst();
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
UnifyPair aPair = optAPair.get();
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
writeLog("variance: " + new Integer(variance).toString() + "sameEqSet:" + sameEqSet);
if (!sameEqSet.isEmpty() && !checkA(aPair, sameEqSet, elems, result)) {
nSaL = null;
noShortendElements++;
continue;
}
if (!oderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
nSaL = null;
noShortendElements++;
continue;
}
}
/* PL 2019-03-13 Ende eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)nSaL).getExtendConstraint());
}
@ -985,35 +995,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
writeLog("-1 RM" + nSaL.toString());
}
if (!oderConstraint) {//weiss nicht ob das wirklich stimmt
/* PL 2019-03-13 Anfang eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
Optional<UnifyPair> optAPair = nSaL.stream().filter(x -> (
//x.getBasePair() != null && ist gegeben wenn variance != 2
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
(x.getPairOp().equals(PairOperator.EQUALSDOT)
/*
(x.getBasePair().getLhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
&& x.getLhsType().equals(x.getBasePair().getRhsType())
*/
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
x.getLhsType().equals(x.getBasePair().getRhsType())
).findFirst();
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
UnifyPair aPair = optAPair.get();
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
writeLog("variance: " + new Integer(variance).toString() + "sameEqSet:" + sameEqSet);
if (!sameEqSet.isEmpty() && !checkA(aPair, sameEqSet, elems, result)) {
nSaL = null;
noShortendElements++;
continue;
}
if (!oderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
nSaL = null;
noShortendElements++;
continue;
}
}
/* PL 2019-03-13 Ende eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)nSaL).getExtendConstraint());
}
@ -1156,11 +1145,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
};
}
//noOfThread++;
} else {
//parallel = false; //Wenn MaxNoOfThreads erreicht ist, sequentiell weiterarbeiten
} else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, finalresult);
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe);
}}}
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = res;
@ -1247,7 +1237,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
else {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RES Fst: reuslt: " + result.toString() + " res: " + res.toString());
writeLog("RES Fst: result: " + result.toString() + " res: " + res.toString());
result.addAll(res);
}
}
@ -1259,10 +1249,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//}
}
//}
//else {//duerfte gar nicht mehr vorkommen PL 2018-04-03
//result.addAll(computeCartesianRecursive(elems, remainingSets, eq, fc, parallel));
//}
if (parallel) {
for (Set<Set<UnifyPair>> par_res : add_res) {
if (!isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) {
@ -1304,6 +1290,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a_new, nextSetasList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>)x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.stream().forEach(x -> { notErased.addAll(oup.smallerEqThan(x, smallerSetasList)); });
@ -1345,10 +1332,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
List<Set<UnifyPair>> greaterSetasList = oup.greaterThan(a_new, nextSetasList);
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
greaterSetasList.add(a_new);
if (!((Constraint<UnifyPair>)a_new).isInherited()) {
greaterSetasList.add(a_new);
}
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>)x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
@ -1399,6 +1387,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
writeLog("Removed: " + nextSetasListOderConstraints);
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a, nextSetasList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>)x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.stream().forEach(x -> { notErased.addAll(oup.smallerEqThan(x, smallerSetasList)); });
@ -1496,7 +1485,29 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
return result;
}
protected Boolean checkA (UnifyPair aPair, Set<UnifyPair> sameEqSet, Set<Set<UnifyPair>> elems, Set<Set<UnifyPair>> result) {
/**
* checks if there is for (a = ty) \in a in sameEqSet a constradiction
* @param a Set of actual element of constraints with a =. ty \in a
* @param sameEqSet Set of constraints where a <. ty' and ty' <. a
* @param result set of results which contains correct solution s and the
* the error constraints. Error constraints are added
* @result contradiction of (a = ty) in sameEqSet
*/
protected Boolean checkNoContradiction(Set<UnifyPair> a, Set<UnifyPair> sameEqSet, Set<Set<UnifyPair>> result) {
//optAPair enthaelt ggf. das Paar a = ty' \in a
//unterscheidet sich von optOrigPair, da dort a = ty
Optional<UnifyPair> optAPair =
a.stream().filter(x -> (x.getPairOp().equals(PairOperator.EQUALSDOT)))
.filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
x.getLhsType().equals(x.getBasePair().getRhsType()))
.findFirst();
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
UnifyPair aPair = optAPair.get();
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
writeLog("checkA: " + aPair + "sameEqSet: " + sameEqSet);
for (UnifyPair sameEq : sameEqSet) {
if (sameEq.getLhsType() instanceof PlaceholderType) {
@ -1506,12 +1517,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
unitedSubst.addAll(sameEq.getAllSubstitutions());
unitedSubst.addAll(sameEq.getAllBases());
localEq.add(new UnifyPair(aPair.getRhsType(), sameEq.getRhsType(), sameEq.getPairOp(), unitedSubst, null));
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0, false);
finalresult = false;
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0);
finalresult = true;
if (isUndefinedPairSetSet(localRes)) {
if (result.isEmpty() || isUndefinedPairSetSet(result)) {
result.addAll(localRes);
}
//writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
return false;
}
}
@ -1522,20 +1535,25 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
unitedSubst.addAll(sameEq.getAllSubstitutions());
unitedSubst.addAll(sameEq.getAllBases());
localEq.add(new UnifyPair(sameEq.getLhsType(), aPair.getRhsType(), sameEq.getPairOp(), unitedSubst, null));
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0, false);
finalresult = false;
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0);
finalresult = true;
if (isUndefinedPairSetSet(localRes)) {
if (result.isEmpty() || isUndefinedPairSetSet(result)) {
result.addAll(localRes);
}
//writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
return false;
}
}
}
//writeLog("TRUE: " + aPair + "sameEqSet: " + sameEqSet);
writeLog("TRUE: " + aPair + "sameEqSet: " + sameEqSet);
return true;
}
return true;
}
protected boolean couldBecorrect(Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair, Set<UnifyPair> nextElem) {
return reducedUndefResSubstGroundedBasePair.stream()
.map(pair -> {
@ -2502,7 +2520,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
void writeLog(String str) {
synchronized ( this ) {
if (log) {
if (log && finalresult) {
try {
logFile.write("Thread no.:" + thNo + "\n");
logFile.write("noOfThread:" + noOfThread + "\n");

@ -114,7 +114,6 @@ public abstract class UnifyType {
@Override
public boolean equals(Object obj) {
if(obj == null)return false;
return this.toString().equals(obj.toString());
}
}

@ -1,108 +0,0 @@
package de.dhbwstuttgart.unify2;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.typeinference.unify.model.*;
import java.util.*;
import java.util.stream.Collectors;
/**
* Implementation of the Martelli-Montanari unification algorithm.
* @author Florian Steurer
*/
public class MartelliMontanariUnify {
/**
* Finds the most general unifier sigma of the set {t1 =. t1',...,tn =. tn'} so that
* sigma(t1) = sigma(t1') , ... sigma(tn) = sigma(tn').
* @param terms The set of terms to be unified
* @return An optional of the most general unifier if it exists or an empty optional if there is no unifier.
*/
public static Optional<Unifier> unify(UnifyType... terms) {
return unify(Arrays.stream(terms).collect(Collectors.toSet()));
}
public static Optional<Unifier> unify(Set<UnifyType> terms) {
// Sets with less than 2 terms are trivially unified
if(terms.size() < 2)
return Optional.of(Unifier.identity());
// For the the set of terms {t1,...,tn},
// build a list of equations {(t1 = t2), (t2 = t3), (t3 = t4), ....}
ArrayList<UnifyPair> termsList = new ArrayList<UnifyPair>();
Iterator<UnifyType> iter = terms.iterator();
UnifyType prev = iter.next();
while(iter.hasNext()) {
UnifyType next = iter.next();
termsList.add(new UnifyPair(prev, next, PairOperator.EQUALSDOT));
prev = next;
}
// Start with the identity unifier. Substitutions will be added later.
Unifier mgu = Unifier.identity();
// Apply rules while possible
int idx = 0;
while(idx < termsList.size()) {
UnifyPair pair = termsList.get(idx);
UnifyType rhsType = pair.getRhsType();
UnifyType lhsType = pair.getLhsType();
TypeParams rhsTypeParams = rhsType.getTypeParams();
TypeParams lhsTypeParams = lhsType.getTypeParams();
// REDUCE - Rule
if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
Set<UnifyPair> result = new HashSet<>();
// f<...> = g<...> with f != g are not unifiable
if(!rhsType.getName().equals(lhsType.getName()))
return Optional.empty(); // conflict
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
if(rhsTypeParams.size() != lhsTypeParams.size())
return Optional.empty(); // conflict
// f = g is not unifiable (cannot be f = f because erase rule would have been applied)
//if(rhsTypeParams.size() == 0)
//return Optional.empty();
// Unpack the arguments
for(int i = 0; i < rhsTypeParams.size(); i++)
result.add(new UnifyPair(rhsTypeParams.get(i), lhsTypeParams.get(i), PairOperator.EQUALSDOT));
termsList.remove(idx);
termsList.addAll(result);
continue;
}
// DELETE - Rule
if(pair.getRhsType().equals(pair.getLhsType())) {
termsList.remove(idx);
continue;
}
// SWAP - Rule
if(!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
termsList.remove(idx);
termsList.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT));
continue;
}
// OCCURS-CHECK
if(pair.getLhsType() instanceof PlaceholderType
&& pair.getRhsType().getTypeParams().occurs((PlaceholderType) pair.getLhsType()))
return Optional.empty();
// SUBST - Rule
if(lhsType instanceof PlaceholderType) {
mgu.add((PlaceholderType) lhsType, rhsType);
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
termsList = termsList.stream().map(x -> mgu.apply(x)).collect(Collectors.toCollection(ArrayList::new));
idx = idx+1 == termsList.size() ? 0 : idx+1;
continue;
}
idx++;
}
return Optional.of(mgu);
}
}

@ -1,922 +0,0 @@
package de.dhbwstuttgart.unify2;
import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.model.*;
import org.antlr.v4.tool.Rule;
import org.apache.commons.io.output.NullOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* Implementation of the type inference rules.
* @author Florian Steurer
*
*/
public class RuleSet {
/**
* Repeatedly applies type unification rules to a set of equations.
* This is step one of the unification algorithm.
* @return The set of pairs that results from repeated application of the inference rules.
*/
public static Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Rule Application Strategy:
*
* 1. Swap all pairs and erase all erasable pairs
* 2. Apply all possible rules to a single pair, then move it to the result set.
* Iterating over pairs first, then iterating over rules prevents the application
* of rules to a "finished" pair over and over.
* 2.1 Apply all rules repeatedly except for erase rules. If
* the application of a rule creates new pairs, check immediately
* against the erase rules.
*/
LinkedHashSet<UnifyPair> targetSet = new LinkedHashSet<UnifyPair>();
LinkedList<UnifyPair> eqQueue = new LinkedList<>();
/*
* Swap all pairs and erase all erasable pairs
*/
eq.forEach(x -> swapAddOrErase(x, fc, eqQueue));
/*
* Apply rules until the queue is empty
*/
while(!eqQueue.isEmpty()) {
UnifyPair pair = eqQueue.pollFirst();
// ReduceUp, ReduceLow, ReduceUpLow
Optional<UnifyPair> opt = RuleSet.reduceUpLow(pair);
opt = opt.isPresent() ? opt : RuleSet.reduceLow(pair);
opt = opt.isPresent() ? opt : RuleSet.reduceUp(pair);
opt = opt.isPresent() ? opt : RuleSet.reduceWildcardLow(pair);
opt = opt.isPresent() ? opt : RuleSet.reduceWildcardLowRight(pair);
opt = opt.isPresent() ? opt : RuleSet.reduceWildcardUp(pair);
opt = opt.isPresent() ? opt : RuleSet.reduceWildcardUpRight(pair);
//PL 2018-03-06 auskommentiert muesste falsch sein vgl. JAVA_BSP/Wildcard6.java
//opt = opt.isPresent() ? opt : rules.reduceWildcardLowUp(pair);
//opt = opt.isPresent() ? opt : rules.reduceWildcardUpLow(pair);
//opt = opt.isPresent() ? opt : rules.reduceWildcardLeft(pair);
// Reduce TPH
opt = opt.isPresent() ? opt : RuleSet.reduceTph(pair);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
//try {
// logFile.write("PAIR1 " + pair + "\n");
// logFile.flush();
//}
//catch (IOException e) { }
Optional<Set<UnifyPair>> optSet = RuleSet.reduce1(pair, fc);
optSet = optSet.isPresent() ? optSet : RuleSet.reduce2(pair);
optSet = optSet.isPresent() ? optSet : RuleSet.reduceExt(pair, fc);
optSet = optSet.isPresent() ? optSet : RuleSet.reduceSup(pair, fc);
optSet = optSet.isPresent() ? optSet : RuleSet.reduceEq(pair);
// ReduceTphExt, ReduceTphSup
optSet = optSet.isPresent() ? optSet : RuleSet.reduceTphExt(pair);
optSet = optSet.isPresent() ? optSet : RuleSet.reduceTphSup(pair);
// FunN Rules
optSet = optSet.isPresent() ? optSet : RuleSet.reduceFunN(pair);
optSet = optSet.isPresent() ? optSet : RuleSet.greaterFunN(pair);
optSet = optSet.isPresent() ? optSet : RuleSet.smallerFunN(pair);
// One of the rules has been applied
if(optSet.isPresent()) {
optSet.get().forEach(x -> swapAddOrErase(x, fc, eqQueue));
continue;
}
// Adapt, AdaptExt, AdaptSup
//try {
// logFile.write("PAIR2 " + pair + "\n");
// logFile.flush();
//}
//catch (IOException e) { }
opt = RuleSet.adapt(pair, fc);
opt = opt.isPresent() ? opt : RuleSet.adaptExt(pair, fc);
opt = opt.isPresent() ? opt : RuleSet.adaptSup(pair, fc);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// None of the rules has been applied
targetSet.add(pair);
}
return targetSet;
}
/**
* Applies the rule swap to a pair if possible. Then adds the pair to the set if no erase rule applies.
* If an erase rule applies, the pair is not added (erased).
* @param pair The pair to swap and add or erase.
* @param collection The collection to which the pairs are added.
*/
static void swapAddOrErase(UnifyPair pair, IFiniteClosure fc, Collection<UnifyPair> collection) {
Optional<UnifyPair> opt = RuleSet.swap(pair);
UnifyPair pair2 = opt.isPresent() ? opt.get() : pair;
if(RuleSet.erase1(pair2, fc) || RuleSet.erase3(pair2) || RuleSet.erase2(pair2, fc))
return;
collection.add(pair2);
}
static Optional<UnifyPair> reduceUp(UnifyPair pair) {
// Check if reduce up is applicable
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType rhsType = pair.getRhsType();
if(!(rhsType instanceof SuperType))
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
if(!(lhsType instanceof ReferenceType) && !(lhsType instanceof PlaceholderType))
return Optional.empty();
// Rule is applicable, unpack the SuperType
return Optional.of(new UnifyPair(lhsType, ((SuperType) rhsType).getSuperedType(), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<UnifyPair> reduceLow(UnifyPair pair) {
// Check if rule is applicable
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
if(!(lhsType instanceof ExtendsType))
return Optional.empty();
UnifyType rhsType = pair.getRhsType();
if(!(rhsType instanceof ReferenceType) && !(rhsType instanceof PlaceholderType))
return Optional.empty();
// Rule is applicable, unpack the ExtendsType
return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(), rhsType, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<UnifyPair> reduceUpLow(UnifyPair pair) {
// Check if rule is applicable
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
if(!(lhsType instanceof ExtendsType))
return Optional.empty();
UnifyType rhsType = pair.getRhsType();
if(!(rhsType instanceof SuperType))
return Optional.empty();
// Rule is applicable, unpack both sides
return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(),((SuperType) rhsType).getSuperedType(), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<Set<UnifyPair>> reduceExt(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType x = pair.getLhsType();
UnifyType sTypeX;
if(x instanceof ReferenceType)
sTypeX = x;
else if(x instanceof ExtendsType)
sTypeX = ((ExtendsType) x).getExtendedType();
else
return Optional.empty();
UnifyType extY = pair.getRhsType();
if(!(extY instanceof ExtendsType))
return Optional.empty();
if(x.getTypeParams().empty() || extY.getTypeParams().size() != x.getTypeParams().size())
return Optional.empty();
UnifyType xFromFc = fc.getLeftHandedType(sTypeX.getName()).orElse(null);
if(xFromFc == null || !xFromFc.getTypeParams().arePlaceholders())
return Optional.empty();
if(x instanceof ExtendsType)
xFromFc = new ExtendsType(xFromFc);
UnifyType extYFromFc = fc.grArg(xFromFc, new HashSet<>()).stream().filter(t -> t.getName().equals(extY.getName())).filter(t -> t.getTypeParams().arePlaceholders()).findAny().orElse(null);
if(extYFromFc == null || extYFromFc.getTypeParams() != xFromFc.getTypeParams())
return Optional.empty();
TypeParams extYParams = extY.getTypeParams();
TypeParams xParams = x.getTypeParams();
int[] pi = pi(xParams, extYParams);
if(pi.length == 0)
return Optional.empty();
Set<UnifyPair> result = new HashSet<>();
for(int rhsIdx = 0; rhsIdx < extYParams.size(); rhsIdx++)
result.add(new UnifyPair(xParams.get(pi[rhsIdx]), extYParams.get(rhsIdx), PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
return Optional.of(result);
}
static Optional<Set<UnifyPair>> reduceSup(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType x = pair.getLhsType();
UnifyType sTypeX;
if(x instanceof ReferenceType)
sTypeX = x;
else if(x instanceof SuperType)
sTypeX = ((SuperType) x).getSuperedType();
else
return Optional.empty();
UnifyType supY = pair.getRhsType();
if(!(supY instanceof SuperType))
return Optional.empty();
if(x.getTypeParams().empty() || supY.getTypeParams().size() != x.getTypeParams().size())
return Optional.empty();
UnifyType xFromFc = fc.getLeftHandedType(sTypeX.getName()).orElse(null);
if(xFromFc == null || !xFromFc.getTypeParams().arePlaceholders())
return Optional.empty();
if(x instanceof SuperType)
xFromFc = new SuperType(xFromFc);
UnifyType supYFromFc = fc.grArg(xFromFc, new HashSet<>()).stream().filter(t -> t.getName().equals(supY.getName())).filter(t -> t.getTypeParams().arePlaceholders()).findAny().orElse(null);
if(supYFromFc == null || supYFromFc.getTypeParams() != xFromFc.getTypeParams())
return Optional.empty();
TypeParams supYParams = supY.getTypeParams();
TypeParams xParams = x.getTypeParams();
Set<UnifyPair> result = new HashSet<>();
int[] pi = pi(xParams, supYParams);
if(pi.length == 0)
return Optional.empty();
for(int rhsIdx = 0; rhsIdx < supYParams.size(); rhsIdx++)
result.add(new UnifyPair(supYParams.get(rhsIdx), xParams.get(pi[rhsIdx]), PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
return Optional.of(result);
}
static Optional<Set<UnifyPair>> reduceEq(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
if(lhsType instanceof PlaceholderType || lhsType.getTypeParams().empty())
return Optional.empty();
UnifyType rhsType = pair.getRhsType();
if(!rhsType.getName().equals(lhsType.getName()))
return Optional.empty();
if(rhsType instanceof PlaceholderType || lhsType instanceof PlaceholderType || rhsType.getTypeParams().empty())
return Optional.empty();
if(rhsType.getTypeParams().size() != lhsType.getTypeParams().size())
return Optional.empty();
// Keine Permutation wie im Paper nötig
Set<UnifyPair> result = new HashSet<>();
TypeParams lhsTypeParams = lhsType.getTypeParams();
TypeParams rhsTypeParams = rhsType.getTypeParams();
for(int i = 0; i < lhsTypeParams.size(); i++)
result.add(new UnifyPair(lhsTypeParams.get(i), rhsTypeParams.get(i), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
return Optional.of(result);
}
static Optional<Set<UnifyPair>> reduce1(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType c = pair.getLhsType();
if(!(c instanceof ReferenceType))
return Optional.empty();
UnifyType d = pair.getRhsType();
if(!(d instanceof ReferenceType))
return Optional.empty();
ReferenceType lhsSType = (ReferenceType) c;
ReferenceType rhsSType = (ReferenceType) d;
//try {
// logFile.write("PAIR Rules: " + pair + "\n");
// logFile.flush();
//}
//catch (IOException e) { }
if(lhsSType.getTypeParams().empty() || lhsSType.getTypeParams().size() != rhsSType.getTypeParams().size())
return Optional.empty();
UnifyType cFromFc = fc.getLeftHandedType(c.getName()).orElse(null);
//2018-02-23: liefert Vector<Vector<Integer>>: Das kann nicht sein.
//NOCHMAL UEBERPRUEFEN
//PL 18-02-09 Eingfuegt Anfang
//C und D koennen auch gleich sein.
if (c.getName().equals(d.getName())) {
Set<UnifyPair> result = new HashSet<>();
TypeParams rhsTypeParams = d.getTypeParams();
TypeParams lhsTypeParams = c.getTypeParams();
for(int rhsIdx = 0; rhsIdx < c.getTypeParams().size(); rhsIdx++)
result.add(new UnifyPair(lhsTypeParams.get(rhsIdx), rhsTypeParams.get(rhsIdx), PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
return Optional.of(result);
}
//PL 18-02-09 Eingfuegt ENDE
//try {
// logFile.write("cFromFc: " + cFromFc);
// logFile.flush();
//}
//catch (IOException e) { }
if(cFromFc == null || !cFromFc.getTypeParams().arePlaceholders())
return Optional.empty();
UnifyType dFromFc = fc.getAncestors(cFromFc).stream().filter(x -> x.getName().equals(d.getName())).findAny().orElse(null);
//try {
// logFile.write("cFromFc: " + cFromFc);
// logFile.flush();
//}
//catch (IOException e) { }
if(dFromFc == null || !dFromFc.getTypeParams().arePlaceholders() || dFromFc.getTypeParams().size() != cFromFc.getTypeParams().size())
return Optional.empty();
//System.out.println("cFromFc: " + cFromFc);
//System.out.println("dFromFc: " + dFromFc);
int[] pi = pi(cFromFc.getTypeParams(), dFromFc.getTypeParams());
if(pi.length == 0)
return Optional.empty();
TypeParams rhsTypeParams = d.getTypeParams();
TypeParams lhsTypeParams = c.getTypeParams();
Set<UnifyPair> result = new HashSet<>();
for(int rhsIdx = 0; rhsIdx < rhsTypeParams.size(); rhsIdx++)
result.add(new UnifyPair(lhsTypeParams.get(pi[rhsIdx]), rhsTypeParams.get(rhsIdx), PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
return Optional.of(result);
}
static Optional<Set<UnifyPair>> reduce2(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.EQUALSDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
ReferenceType lhsSType;
UnifyType rhsType = pair.getRhsType();
ReferenceType rhsSType;
if ((lhsType instanceof ReferenceType) && (rhsType instanceof ReferenceType)) {
lhsSType = (ReferenceType) lhsType;
rhsSType = (ReferenceType) rhsType;
}
else if (((lhsType instanceof ExtendsType) && (rhsType instanceof ExtendsType))
|| ((lhsType instanceof SuperType) && (rhsType instanceof SuperType))) {
UnifyType lhsSTypeRaw = ((WildcardType) lhsType).getWildcardedType();
UnifyType rhsSTypeRaw = ((WildcardType) rhsType).getWildcardedType();
if ((lhsSTypeRaw instanceof ReferenceType) && (rhsSTypeRaw instanceof ReferenceType)) {
lhsSType = (ReferenceType) lhsSTypeRaw;
rhsSType = (ReferenceType) rhsSTypeRaw;
}
else
return Optional.empty();
}
else
return Optional.empty();
if(lhsSType.getTypeParams().empty())
return Optional.empty();
if(!rhsSType.getName().equals(lhsSType.getName()))
return Optional.empty();
if(!(lhsSType.getTypeParams().size()==rhsSType.getTypeParams().size()))throw new DebugException("Fehler in Unifizierung"+ " " + lhsSType.toString() + " " + rhsSType.toString());
//if(rhsSType.getTypeParams().size() != lhsSType.getTypeParams().size())
// return Optional.empty();
Set<UnifyPair> result = new HashSet<>();
TypeParams rhsTypeParams = rhsSType.getTypeParams();
TypeParams lhsTypeParams = lhsSType.getTypeParams();
for(int i = 0; i < rhsTypeParams.size(); i++)
result.add(new UnifyPair(lhsTypeParams.get(i), rhsTypeParams.get(i), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
return Optional.of(result);
}
static boolean erase1(UnifyPair pair, IFiniteClosure fc) {
if((pair.getPairOp() != PairOperator.SMALLERDOT) && (pair.getPairOp() != PairOperator.SMALLERNEQDOT))
return false;
if (pair.getPairOp() == PairOperator.SMALLERNEQDOT) {
UnifyType lhs = pair.getLhsType();
UnifyType rhs = pair.getRhsType();
if (lhs instanceof WildcardType) {
lhs = ((WildcardType)lhs).getWildcardedType();
}
if (rhs instanceof WildcardType) {
rhs = ((WildcardType)rhs).getWildcardedType();
}
if (lhs.equals(rhs)){
return false;
}
}
UnifyType lhsType = pair.getLhsType();
if(!(lhsType instanceof ReferenceType) && !(lhsType instanceof PlaceholderType))
return false;
UnifyType rhsType = pair.getRhsType();
if(!(rhsType instanceof ReferenceType) && !(rhsType instanceof PlaceholderType))
return false;
return fc.greater(lhsType, new HashSet<>()).contains(rhsType);
}
static boolean erase2(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return false;
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
return fc.grArg(lhsType, new HashSet<>()).contains(rhsType);
}
static boolean erase3(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.EQUALSDOT)
return false;
return pair.getLhsType().equals(pair.getRhsType());
}
static Optional<UnifyPair> swap(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.EQUALSDOT)
return Optional.empty();
if(pair.getLhsType() instanceof PlaceholderType)
return Optional.empty();
if(!(pair.getRhsType() instanceof PlaceholderType))
return Optional.empty();
return Optional.of(new UnifyPair(pair.getRhsType(), pair.getLhsType(), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<UnifyPair> adapt(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType typeD = pair.getLhsType();
if(!(typeD instanceof ReferenceType))
return Optional.empty();
UnifyType typeDs = pair.getRhsType();
if(!(typeDs instanceof ReferenceType))
return Optional.empty();
/*if(typeD.getTypeParams().size() == 0 || typeDs.getTypeParams().size() == 0)
return Optional.empty();*/
if(typeD.getName().equals(typeDs.getName()))
return Optional.empty();
Optional<UnifyType> opt = fc.getLeftHandedType(typeD.getName());
if(!opt.isPresent())
return Optional.empty();
// The generic Version of Type D (D<a1, a2, a3, ... >)
UnifyType typeDgen = opt.get();
// Actually greater+ because the types are ensured to have different names
Set<UnifyType> greater = fc.getAncestors(typeDgen);
opt = greater.stream().filter(x -> x.getName().equals(typeDs.getName())).findAny();
if(!opt.isPresent())
return Optional.empty();
UnifyType newLhs = opt.get();
TypeParams typeDParams = typeD.getTypeParams();
TypeParams typeDgenParams = typeDgen.getTypeParams();
Unifier unif = Unifier.identity();
for(int i = 0; i < typeDParams.size(); i++) {
//System.out.println("ADAPT" +typeDgenParams);
if (typeDgenParams.get(i) instanceof PlaceholderType)
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
else System.out.println("ERROR");
}
return Optional.of(new UnifyPair(unif.apply(newLhs), typeDs, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<UnifyPair> adaptExt(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType typeD = pair.getLhsType();
if(!(typeD instanceof ReferenceType) && !(typeD instanceof ExtendsType))
return Optional.empty();
UnifyType typeExtDs = pair.getRhsType();
if(!(typeExtDs instanceof ExtendsType))
return Optional.empty();
if(typeD.getTypeParams().size() == 0 || typeExtDs.getTypeParams().size() == 0)
return Optional.empty();
UnifyType typeDgen;
if(typeD instanceof ReferenceType)
typeDgen = fc.getLeftHandedType(typeD.getName()).orElse(null);
else {
Optional<UnifyType> opt = fc.getLeftHandedType(((ExtendsType) typeD).getExtendedType().getName());
typeDgen = opt.isPresent() ? new ExtendsType(opt.get()) : null;
}
if(typeDgen == null)
return Optional.empty();
Set<UnifyType> grArg = fc.grArg(typeDgen, new HashSet<>());
Optional<UnifyType> opt = grArg.stream().filter(x -> x.getName().equals(typeExtDs.getName())).findAny();
if(!opt.isPresent())
return Optional.empty();
UnifyType newLhs = ((ExtendsType) opt.get()).getExtendedType();
TypeParams typeDParams = typeD.getTypeParams();
TypeParams typeDgenParams = typeDgen.getTypeParams();
Unifier unif = new Unifier((PlaceholderType) typeDgenParams.get(0), typeDParams.get(0));
for(int i = 1; i < typeDParams.size(); i++)
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
return Optional.of(new UnifyPair(unif.apply(newLhs), typeExtDs, PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<UnifyPair> adaptSup(UnifyPair pair, IFiniteClosure fc) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType typeDs = pair.getLhsType();
if(!(typeDs instanceof ReferenceType) && !(typeDs instanceof SuperType))
return Optional.empty();
UnifyType typeSupD = pair.getRhsType();
if(!(typeSupD instanceof SuperType))
return Optional.empty();
if(typeDs.getTypeParams().size() == 0 || typeSupD.getTypeParams().size() == 0)
return Optional.empty();
Optional<UnifyType> opt = fc.getLeftHandedType(((SuperType) typeSupD).getSuperedType().getName());
if(!opt.isPresent())
return Optional.empty();
UnifyType typeDgen = opt.get();
UnifyType typeSupDgen = new SuperType(typeDgen);
// Use of smArg instead of grArg because
// a in grArg(b) => b in smArg(a)
Set<UnifyType> smArg = fc.smArg(typeSupDgen, new HashSet<>());
opt = smArg.stream().filter(x -> x.getName().equals(typeDs.getName())).findAny();
if(!opt.isPresent())
return Optional.empty();
// New RHS
UnifyType newRhs = null;
if(typeDs instanceof ReferenceType)
newRhs = new ExtendsType(typeDs);
else
newRhs = new ExtendsType(((SuperType) typeDs).getSuperedType());
// New LHS
UnifyType newLhs = opt.get();
TypeParams typeDParams = typeSupD.getTypeParams();
TypeParams typeSupDsgenParams = typeSupDgen.getTypeParams();
Unifier unif = new Unifier((PlaceholderType) typeSupDsgenParams.get(0), typeDParams.get(0));
for(int i = 1; i < typeDParams.size(); i++)
unif.add((PlaceholderType) typeSupDsgenParams.get(i), typeDParams.get(i));
return Optional.of(new UnifyPair(unif.apply(newLhs), newRhs, PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
}
/**
* Finds the permutation pi of the type arguments of two types based on the finite closure
* @param cArgs The type which arguments are permuted
* @param dArgs The other type
* @return An array containing the values of pi for every type argument of C or an empty array if the search failed.
*/
private static int[] pi(TypeParams cArgs, TypeParams dArgs) {
if(!(cArgs.size()==dArgs.size()))throw new DebugException("Fehler in Unifizierung");
int[] permutation = new int[dArgs.size()];
boolean succ = true;
for (int dArgIdx = 0; dArgIdx < dArgs.size() && succ; dArgIdx++) {
UnifyType dArg = dArgs.get(dArgIdx);
succ = false;
for (int pi = 0; pi < cArgs.size(); pi++)
if (cArgs.get(pi).getName().equals(dArg.getName())) {
permutation[dArgIdx] = pi;
succ = true;
break;
}
}
return succ ? permutation : new int[0];
}
static Optional<UnifyPair> reduceWildcardLow(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof ExtendsType) || !(rhsType instanceof ExtendsType))
return Optional.empty();
return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(), ((ExtendsType) rhsType).getExtendedType(), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<UnifyPair> reduceWildcardLowRight(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof ReferenceType) || !(rhsType instanceof ExtendsType))
return Optional.empty();
return Optional.of(new UnifyPair(lhsType, ((ExtendsType) rhsType).getExtendedType(), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<UnifyPair> reduceWildcardUp(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof SuperType) || !(rhsType instanceof SuperType))
return Optional.empty();
return Optional.of(new UnifyPair(((SuperType) rhsType).getSuperedType(), ((SuperType) lhsType).getSuperedType(), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<UnifyPair> reduceWildcardUpRight(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof ReferenceType) || !(rhsType instanceof SuperType))
return Optional.empty();
return Optional.of(new UnifyPair(((SuperType) rhsType).getSuperedType(), lhsType, PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<Set<UnifyPair>> reduceFunN(UnifyPair pair) {
if((pair.getPairOp() != PairOperator.SMALLERDOT)
&& (pair.getPairOp() != PairOperator.EQUALSDOT)) //PL 2017-10-03 hinzugefuegt
//da Regel auch fuer EQUALSDOT anwendbar
//TODO: fuer allen anderen Relationen noch pruefen
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof FunNType) || !(rhsType instanceof FunNType))
return Optional.empty();
FunNType funNLhsType = (FunNType) lhsType;
FunNType funNRhsType = (FunNType) rhsType;
if(funNLhsType.getN() != funNRhsType.getN())
return Optional.empty();
Set<UnifyPair> result = new HashSet<UnifyPair>();
if (pair.getPairOp() == PairOperator.SMALLERDOT) {
result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
for(int i = 0; i < funNLhsType.getTypeParams().size()-1; i++) {
result.add(new UnifyPair(funNRhsType.getTypeParams().get(i), funNLhsType.getTypeParams().get(i), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
}
else {// pair.getPairOp() == PairOperator.EQUALDOT
result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
for(int i = 0; i < funNLhsType.getTypeParams().size()-1; i++) {
result.add(new UnifyPair(funNRhsType.getTypeParams().get(i), funNLhsType.getTypeParams().get(i), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
}
}
result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
return Optional.of(result);
}
static Optional<Set<UnifyPair>> greaterFunN(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof FunNType) || !(rhsType instanceof PlaceholderType))
return Optional.empty();
FunNType funNLhsType = (FunNType) lhsType;
Set<UnifyPair> result = new HashSet<UnifyPair>();
Integer variance = ((PlaceholderType)rhsType).getVariance();
Integer inversVariance = distributeVariance.inverseVariance(variance);
UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) {
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
}
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), freshPlaceholders[funNLhsType.getTypeParams().size()-1], PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
for(int i = 0; i < funNLhsType.getTypeParams().size()-1; i++) {
result.add(new UnifyPair(freshPlaceholders[i], funNLhsType.getTypeParams().get(i), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
result.add(new UnifyPair(rhsType, funNLhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
return Optional.of(result);
}
static Optional<Set<UnifyPair>> smallerFunN(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof PlaceholderType) || !(rhsType instanceof FunNType))
return Optional.empty();
FunNType funNRhsType = (FunNType) rhsType;
Set<UnifyPair> result = new HashSet<UnifyPair>();
Integer variance = ((PlaceholderType)lhsType).getVariance();
Integer inversVariance = distributeVariance.inverseVariance(variance);
UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) {
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
}
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
result.add(new UnifyPair(freshPlaceholders[funNRhsType.getTypeParams().size()-1], funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
for(int i = 0; i < funNRhsType.getTypeParams().size()-1; i++) {
result.add(new UnifyPair(funNRhsType.getTypeParams().get(i), freshPlaceholders[i], PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
result.add(new UnifyPair(lhsType, funNRhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
return Optional.of(result);
}
static Optional<UnifyPair> reduceTph(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof PlaceholderType) || !(rhsType instanceof ReferenceType))
return Optional.empty();
return Optional.of(new UnifyPair(lhsType, rhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
}
static Optional<Set<UnifyPair>> reduceTphExt(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof ExtendsType) || !(rhsType instanceof PlaceholderType))
return Optional.empty();
UnifyType extendedType = ((ExtendsType)lhsType).getExtendedType();
if (extendedType.equals(rhsType)) return Optional.empty(); //PL 2019-02-18 eingefügt ? extends a <.? a
boolean isGen = extendedType instanceof PlaceholderType && !((PlaceholderType) extendedType).isGenerated();
Set<UnifyPair> result = new HashSet<>();
if(isGen)
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
else {
UnifyType freshTph = PlaceholderType.freshPlaceholder();
result.add(new UnifyPair(rhsType, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.add(new UnifyPair(extendedType, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
return Optional.of(result);
}
static Optional<Set<UnifyPair>> reduceTphSup(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof SuperType) || !(rhsType instanceof PlaceholderType))
return Optional.empty();
UnifyType superedType = ((SuperType)lhsType).getSuperedType();
if (superedType.equals(rhsType)) return Optional.empty(); //PL 2019-02-18 eingefügt ? super a <.? a
boolean isGen = superedType instanceof PlaceholderType && !((PlaceholderType) superedType).isGenerated();
Set<UnifyPair> result = new HashSet<>();
if(isGen)
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
else {
UnifyType freshTph = PlaceholderType.freshPlaceholder();
result.add(new UnifyPair(rhsType, new SuperType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
Set<UnifyType> fBounded = pair.getfBounded();
fBounded.add(lhsType);
result.add(new UnifyPair(freshTph, superedType, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair(), fBounded));
}
return Optional.of(result);
}
}

@ -1,54 +0,0 @@
package de.dhbwstuttgart.unify2;
import de.dhbwstuttgart.typeinference.unify.model.*;
import de.dhbwstuttgart.unify2.model.UnifyConstraintSet;
import java.util.*;
public class TypeUnify {
public static Optional<Set<UnifyPair>> unifyOrConstraints(UnifyConstraintSet eq, FiniteClosure fc){
return eq.cartesianProductParallel().map(eqPrime -> unify(eqPrime, fc)).filter(Optional::isPresent).map(Optional::get).findAny();
}
public static Optional<Set<UnifyPair>> unify(Set<UnifyPair> eq, FiniteClosure fc){
/*
TODO: Hier könnte man prüfen, ob es überhaupt einen Sinn macht mit eq weiterzumachen
Es könnte eine über threads geteiltes Objekt geben (Feld in TypeUnify), welches unmögliche Klauseln lernt
*/
//Apply Reduce und Apply rules
Set<UnifyPair> res = RuleSet.applyTypeUnificationRules(eq, fc);
//Split result
/*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
*/
Set<UnifyPair> eq1s = new HashSet<>();
Set<UnifyPair> eq2s = new HashSet<>();
for(UnifyPair pair : res) {
if (pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
eq1s.add(pair);
else
eq2s.add(pair);
}
Optional<UnifyConstraintSet> step4Res = Unify.step4(eq1s, eq2s, fc);
//Falls step4 etwas liefert, dann subst und rekursiver unify aufruf anwenden:
return step4Res.flatMap(constraintSet ->
constraintSet.cartesianProductParallel().map(toSubst -> {
Optional<Set<UnifyPair>> substitutionResult = Unify.subst(toSubst); //hier substituieren
//if it changed:
if (substitutionResult.isPresent()) {
return unify(substitutionResult.get(), fc);
}else{
//TODO: return the result
return Optional.of(toSubst);
}
}).filter(it -> it.isPresent()).map(Optional::get).findAny());
}
}

@ -1,340 +0,0 @@
package de.dhbwstuttgart.unify2;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.*;
import de.dhbwstuttgart.unify2.model.UnifyConstraintSet;
import de.dhbwstuttgart.unify2.model.UnifyOderConstraint;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
public class Unify {
/**
* Creates sets of pairs specified in the fourth step. Does not calculate cartesian products.
* @return The set of the eight cases (without empty sets). Each case is a set, containing sets generated
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
* (as in case 1 where sigma is added to the innermost set).
*/
public static Optional<UnifyConstraintSet> step4(Set<UnifyPair> eq1s, Set<UnifyPair> eq2s, FiniteClosure fc) {
Set<UnifyOderConstraint> result = new HashSet<>(8);
for(UnifyPair pair : eq2s) {
PairOperator pairOp = pair.getPairOp();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
// Case 1: (a <. Theta')
if(pairOp == PairOperator.SMALLERDOT && lhsType instanceof PlaceholderType)
result.add(unifyCase1((PlaceholderType) pair.getLhsType(), pair.getRhsType(), fc));
// Case 2: (a <.? ? ext Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
result.add(unifyCase2((PlaceholderType) pair.getLhsType(), (ExtendsType) pair.getRhsType(), fc));
// Case 3: (a <.? ? sup Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
result.add(unifyCase3((PlaceholderType) lhsType, (SuperType) rhsType, fc));
// Case 4 was replaced by an inference rule
// Case 4: (a <.? Theta')
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType)
// result.get(3).add(unifyCase4((PlaceholderType) lhsType, rhsType, fc));
// Case 5: (Theta <. a)
else if(pairOp == PairOperator.SMALLERDOT && rhsType instanceof PlaceholderType)
result.add(unifyCase5(lhsType, (PlaceholderType) rhsType, fc));
// Case 6 was replaced by an inference rule.
// Case 6: (? ext Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType)
// result.get(5).add(unifyCase6((ExtendsType) lhsType, (PlaceholderType) rhsType, fc));
// Case 7 was replaced by an inference rule
// Case 7: (? sup Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType)
// result.get(6).add(unifyCase7((SuperType) lhsType, (PlaceholderType) rhsType, fc));
// Case 8: (Theta <.? a)
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
result.add(unifyCase8(lhsType, (PlaceholderType) rhsType, fc));
// Case unknown: If a pair fits no other case, then the type unification has failed.
// Through application of the rules, every pair should have one of the above forms.
// Pairs that do not have one of the aboves form are contradictory.
else {
return Optional.empty();
}
}
result.add(new UnifyOderConstraint(Set.of(eq1s)));
// Filter empty sets or sets that only contain an empty set.
//Andi: Why? Should they exist? this should be an error then
return Optional.of(new UnifyConstraintSet(result));
}
/**
* Cartesian product Case 1: (a <. Theta')
*/
static UnifyOderConstraint unifyCase1(PlaceholderType a, UnifyType thetaPrime, FiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
boolean allGen = thetaPrime.getTypeParams().size() > 0;
for(UnifyType t : thetaPrime.getTypeParams())
if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
allGen = false;
break;
}
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());
cs.add(thetaPrime);
for(UnifyType c : cs) {
Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
//thetaQs.add(thetaPrime);
Set<UnifyType> thetaQPrimes = new HashSet<>();
TypeParams cParams = c.getTypeParams();
if(cParams.size() == 0)
thetaQPrimes.add(c);
else {
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
for(UnifyType param : cParams)
candidateParams.add(fc.grArg(param, new HashSet<>()));
for(TypeParams tp : permuteParams(candidateParams))
thetaQPrimes.add(c.setTypeParams(tp));
}
for(UnifyType tqp : thetaQPrimes) {
Optional<Unifier> opt = MartelliMontanariUnify.unify(tqp, thetaPrime);
if (!opt.isPresent())
continue;
Unifier unifier = opt.get();
unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
Set<UnifyPair> substitutionSet = new HashSet<>();
for (Map.Entry<PlaceholderType, UnifyType> sigma : unifier)
substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
List<UnifyType> freshTphs = new ArrayList<>();
for (UnifyType tq : thetaQs) {
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq), new HashSet<>());
for(UnifyType theta : smaller) {
Set<UnifyPair> resultPrime = new HashSet<>();
for(int i = 0; !allGen && i < theta.getTypeParams().size(); i++) {
if(freshTphs.size()-1 < i)
freshTphs.add(PlaceholderType.freshPlaceholder());
resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC));
}
if(allGen)
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
else
resultPrime.add(new UnifyPair(a, theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0]))), PairOperator.EQUALSDOT));
resultPrime.addAll(substitutionSet);
result.add(resultPrime);
}
}
}
}
return new UnifyOderConstraint(result);
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
*/
static Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
Set<TypeParams> result = new HashSet<>();
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
* @param idx Idx for the current permutatiton.
* @param result Set of all permutations found so far
* @param current The permutation of type params that is currently explored
*/
static void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
if(candidates.size() == idx) {
result.add(new TypeParams(Arrays.copyOf(current, current.length)));
return;
}
Set<UnifyType> localCandidates = candidates.get(idx);
for(UnifyType t : localCandidates) {
current[idx] = t;
permuteParams(candidates, idx+1, result, current);
}
}
/**
* Cartesian Product Case 2: (a <.? ? ext Theta')
*/
static UnifyOderConstraint unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType extAPrime = new ExtendsType(aPrime);
UnifyType thetaPrime = extThetaPrime.getExtendedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
return new UnifyOderConstraint(result);
}
/**
* Cartesian Product Case 3: (a <.? ? sup Theta')
*/
static UnifyOderConstraint unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType thetaPrime = subThetaPrime.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(thetaPrime, aPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
return new UnifyOderConstraint(result);
}
/**
* Cartesian Product Case 5: (Theta <. a)
*/
static UnifyOderConstraint unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
boolean allGen = theta.getTypeParams().size() > 0;
for(UnifyType t : theta.getTypeParams())
if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
allGen = false;
break;
}
for(UnifyType thetaS : fc.greater(theta, new HashSet<>())) {
Set<UnifyPair> resultPrime = new HashSet<>();
UnifyType[] freshTphs = new UnifyType[thetaS.getTypeParams().size()];
for(int i = 0; !allGen && i < freshTphs.length; i++) {
freshTphs[i] = PlaceholderType.freshPlaceholder();
resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.SMALLERDOTWC));
}
if(allGen)
resultPrime.add(new UnifyPair(a, thetaS, PairOperator.EQUALSDOT));
else
resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT));
result.add(resultPrime);
}
return new UnifyOderConstraint(result);
}
/**
* Cartesian Product Case 8: (Theta <.? a)
*/
static UnifyOderConstraint unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
//for(UnifyType thetaS : fc.grArg(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
result.add(resultPrime);
UnifyType freshTph = PlaceholderType.freshPlaceholder();
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new SuperType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(freshTph, theta, PairOperator.SMALLERDOT));
result.add(resultPrime);
//}
return new UnifyOderConstraint(result);
}
static Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs) {
HashMap<UnifyType, Integer> typeMap = new HashMap<>();
Stack<UnifyType> occuringTypes = new Stack<>();
for(UnifyPair pair : pairs) {
occuringTypes.push(pair.getLhsType());
occuringTypes.push(pair.getRhsType());
}
while(!occuringTypes.isEmpty()) {
UnifyType t1 = occuringTypes.pop();
if(!typeMap.containsKey(t1))
typeMap.put(t1, 0);
typeMap.put(t1, typeMap.get(t1)+1);
if(t1 instanceof ExtendsType)
occuringTypes.push(((ExtendsType) t1).getExtendedType());
if(t1 instanceof SuperType)
occuringTypes.push(((SuperType) t1).getSuperedType());
else
t1.getTypeParams().forEach(x -> occuringTypes.push(x));
}
Queue<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
ArrayList<UnifyPair> result = new ArrayList<UnifyPair>();
boolean applied = false;
while(!result1.isEmpty()) {
UnifyPair pair = result1.poll();
PlaceholderType lhsType = null;
UnifyType rhsType;
if(pair.getPairOp() == PairOperator.EQUALSDOT
&& pair.getLhsType() instanceof PlaceholderType)
lhsType = (PlaceholderType) pair.getLhsType();
rhsType = pair.getRhsType(); //PL eingefuegt 2017-09-29 statt !((rhsType = pair.getRhsType()) instanceof PlaceholderType)
if(lhsType != null
//&& !((rhsType = pair.getRhsType()) instanceof PlaceholderType) //PL geloescht am 2017-09-29 Begründung: auch Typvariablen muessen ersetzt werden.
&& typeMap.get(lhsType) > 1 // The type occurs in more pairs in the set than just the recent pair.
&& !rhsType.getTypeParams().occurs(lhsType)
&& !((rhsType instanceof WildcardType) && ((WildcardType)rhsType).getWildcardedType().equals(lhsType))) //PL eigefuegt 2018-02-18
{
Unifier uni = new Unifier(lhsType, rhsType);
result = result.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(ArrayList::new));
result1 = result1.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(LinkedList::new));
Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map(
x -> uni.apply(pair,x)).collect(Collectors.toCollection((b.getExtendConstraint() != null)
? () -> new Constraint<UnifyPair>(
b.getExtendConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(Constraint::new)))
: () -> new Constraint<UnifyPair>()
));
applied = true;
}
result.add(pair);
}
return applied ? Optional.of(new HashSet<>(result)) : Optional.empty();
}
}

@ -1,11 +0,0 @@
package de.dhbwstuttgart.unify2;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.Set;
public class UnifyResult {
public UnifyResult(Set<UnifyPair> toSubst) {
}
}

@ -1,124 +0,0 @@
package de.dhbwstuttgart.unify2.model;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.*;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
/*
OrC1 OrC2 OrC3 -> Constraints
Step4: OrC1 OrC2/c OrC3 OrC4
Tiefensuche, neue UnifyConstraints erstellen
Wie wird ConstraintSet geändert?
wird nicht gebraucht:
( map -> bei subst
Alle Constraints ändern, neues ConstraintSet zurückgeben )
Step 4 bildet anschließend das karthesische Produkt und muss über alle Möglichkeiten iterieren
die erste möglichkeit vom karthesischen produkt nehmen, subst schritt ausführen und mit dem Ergebnis (einzelnes Constraint Set) weiterarbeiten
*/
public class UnifyConstraintSet {
Set<UnifyOderConstraint> oderConstraints = new HashSet<>();
public UnifyConstraintSet(Set<UnifyOderConstraint> constraints){
if(constraints.isEmpty())throw new RuntimeException("Empty constraint set");
this.oderConstraints = constraints;
}
@Override
public String toString(){
BinaryOperator<String> b = (x, y) -> x+y;
return "ODER:" + this.oderConstraints.stream().reduce("", (x,y) -> x.toString()+ "\n" +y, b);
}
/*
Cartesian product als optimierter Stream
- Ein Split teilt das Set so auf, dass der zweite Thread jedes zweite Element behandelt
- Ein thread der jedes zweite element behandelt wird gesplittet indem
1 2 1 2 1 2 1 2 => (o = 0, n = 2), (o = 1, n = 2)
1 2 1 3 1 2 1 3 => (o = 0, n = 2), (o = 1, n = 4), (o = 3, n = 4)
*/
private class ConstraintSpliterator implements Spliterator<Set<UnifyPair>> {
private List<UnifyOderConstraint> constraints;
private long i = 0;
private long iterationFactor = 1;
private long max = 0;
private List<Integer> sizes;
private List<Long> bases = new ArrayList<>();
ConstraintSpliterator(List<UnifyOderConstraint> constraints){
this.constraints = constraints;
sizes = constraints.stream().map(UnifyOderConstraint::getSize).collect(Collectors.toList());
long base = 1;
for(int size : sizes){
bases.add(base);
base *= size;
}
i = 0;
max = estimateSize() - 1;
}
ConstraintSpliterator(List<UnifyOderConstraint> constraints, long start, long factor){
this(constraints);
i = start;
this.iterationFactor = factor;
}
@Override
public boolean tryAdvance(Consumer<? super Set<UnifyPair>> consumer) {
if(i > max) return false;
consumer.accept(get(i));
i++;
return true;
}
private Set<UnifyPair> get(long num){
Set<UnifyPair> ret = new HashSet<>();
Iterator<Long> baseIt = bases.iterator();
for(UnifyOderConstraint constraint : constraints){
ret.addAll(constraint.get((int) ((num/baseIt.next())%constraint.getSize())));
}
return ret;
}
@Override
public Spliterator<Set<UnifyPair>> trySplit() {
if(max - (i+iterationFactor * 2) < 0) return null;
long iNext = i + iterationFactor;
iterationFactor *= 2;
return new UnifyConstraintSet.ConstraintSpliterator(constraints, iNext, iterationFactor);
}
@Override
public long estimateSize() {
long ret = 1;
for (int size : sizes)ret*=size;
return ret;
}
@Override
public int characteristics() {
return ORDERED | SIZED | IMMUTABLE | NONNULL;
}
}
public Stream<Set<UnifyPair>> cartesianProductParallel(){
return StreamSupport.stream(new UnifyConstraintSet.ConstraintSpliterator(oderConstraints.stream().collect(Collectors.toList())), true);
}
public Stream<Set<UnifyPair>> cartesianProductParallel(Comparator<Set<UnifyPair>> prioritiser){
return StreamSupport.stream(new UnifyConstraintSet.ConstraintSpliterator(oderConstraints.stream().collect(Collectors.toList())), true);
}
}

@ -1,30 +0,0 @@
package de.dhbwstuttgart.unify2.model;
import de.dhbwstuttgart.typeinference.unify.model.Pair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.HashSet;
import java.util.Set;
public class UnifyConstraintSetBuilder {
private Set<UnifyPair> undConstraints = new HashSet<>();
private Set<UnifyOderConstraint> oderConstraints = new HashSet<>();
private boolean done = false;
public void addUndConstraint(UnifyPair p){
undConstraints.add(p);
}
public void addOderConstraint(UnifyOderConstraint orConstraint) {
oderConstraints.add(orConstraint);
}
public UnifyConstraintSet build(){
if(done)throw new RuntimeException("Trying to build cartesian product twice");
this.done = true;
if(!undConstraints.isEmpty())
oderConstraints.add(new UnifyOderConstraint(Set.of(undConstraints)));
return new UnifyConstraintSet(oderConstraints);
}
}

@ -1,27 +0,0 @@
package de.dhbwstuttgart.unify2.model;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class UnifyOderConstraint {
private final List<Set<UnifyPair>> cons;
public UnifyOderConstraint(Set<Set<UnifyPair>> orCons){
if(orCons.isEmpty())throw new RuntimeException("Empty constraint set");
for(Set<UnifyPair> c : orCons){
if(c.isEmpty())throw new RuntimeException("Empty constraint set");
}
this.cons = orCons.stream().collect(Collectors.toList());
}
public int getSize(){
return cons.size();
}
public Set<UnifyPair> get(int l) {
return cons.get(l);
}
}

@ -40,7 +40,8 @@ public class AllgemeinTest {
//String className = "FCTest3";
//String className = "Var";
//String className = "Put";
String className = "Cycle";
//String className = "Twice";
String className = "TestSubTypless";
//PL 2019-10-24: genutzt fuer unterschiedliche Tests
path = System.getProperty("user.dir")+"/src/test/resources/AllgemeinTest/" + className + ".jav";
//path = System.getProperty("user.dir")+"/src/test/resources/AllgemeinTest/Overloading_Generics.jav";

@ -1,68 +0,0 @@
package constraintSimplify;
import de.dhbwstuttgart.bytecode.TPHExtractor;
import de.dhbwstuttgart.bytecode.insertGenerics.PositionFinder;
import de.dhbwstuttgart.bytecode.genericsGenerator.GeneratedGenericsFinder;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.statement.Block;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import org.junit.Test;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
public class FamilyOfGenerics {
@Test
public void generateBC() throws Exception {
SourceFile sf = generateAST();
PositionFinder.getPositionOfTPH(sf, null);
TPHExtractor tphExtractor = new TPHExtractor();
List<ResultSet> results = new ArrayList<ResultSet>();
GeneratedGenericsFinder generatedGenericsFinder = new GeneratedGenericsFinder(sf, results);
}
public static SourceFile generateAST(){
ArrayList<ClassOrInterface> classes = new ArrayList<>();
ArrayList<Field> fields = new ArrayList<>();
ArrayList<Method> methods = new ArrayList<>();
fields.add(generateField("fld1"));
String[] paramNames = {"a"};
methods.add(generateMethod("testMethode", paramNames));
classes.add(new ClassOrInterface(Modifier.PUBLIC, new JavaClassName("Test"), fields, Optional.empty(), methods,
new ArrayList<>(), generateEmptyGenericDeclList(),
new RefType(new JavaClassName("java.lang.Object"), new NullToken()),
false, new ArrayList<>(), new NullToken()));
return new SourceFile("Test.jav", classes, new HashSet<>());
}
public static Method generateMethod(String methodName, String[] paramNames){
ArrayList<FormalParameter> parameters = new ArrayList<>();
for(String str: paramNames) {
FormalParameter fp = new FormalParameter(str, TypePlaceholder.fresh(new NullToken()), new NullToken());
parameters.add(fp);
}
ParameterList parameterList = new ParameterList(parameters, new NullToken());
return new Method(Modifier.PUBLIC, methodName, TypePlaceholder.fresh(new NullToken()), parameterList,
new Block(new ArrayList<>(), new NullToken()), generateEmptyGenericDeclList(), new NullToken());
}
public static GenericDeclarationList generateEmptyGenericDeclList(){
return new GenericDeclarationList(new ArrayList<>(), new NullToken());
}
public static Field generateField(String fieldName) {
return new Field(fieldName, TypePlaceholder.fresh(new NullToken()), Modifier.PUBLIC, new NullToken());
}
}

@ -1,214 +0,0 @@
package insertGenerics;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
import de.dhbwstuttgart.bytecode.insertGenerics.*;
import de.dhbwstuttgart.bytecode.utilities.MethodAndTPH;
import junit.framework.TestCase;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class FamilyOfGeneratedGenericsTest extends TestCase {
public void testIdentityMethod(){
/*
Example method:
A id(B i) return i;
gives constraint: B <. A and A <. Object, which are method constraints
*/
List<TPHConstraint> inputConstraints = new ArrayList<>();
inputConstraints.add(new TPHConstraint("B", "A", TPHConstraint.Relation.EXTENDS));
HashMap<String, PairTphMethod<PositionFinder.Position, String>> tphPositions = new HashMap<>();
PairTphMethod<PositionFinder.Position, String> meth1 = new PairTphMethod<PositionFinder.Position, String>(PositionFinder.Position.METHOD, "m1");
tphPositions.put("A", meth1);
tphPositions.put("B", meth1);
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(inputConstraints, tphPositions);
assertTrue(classConstraints.isEmpty());
/*
MethodConstraints should be the same as the input constraint
*/
List<MethodConstraint> methodConstraints = FamilyOfGeneratedGenerics.getMethodConstraints(inputConstraints, new ArrayList<ClassConstraint>(), tphPositions);
assertTrue(methodConstraints.size() == 2);
assertTrue(methodConstraints.get(0).getLeft().equals("B"));
assertTrue(methodConstraints.get(0).getRight().equals("A"));
}
public void testClassField(){
/*
class Example{
A f;
B fReturn(){
return f;
}
}
gives constraint: A <. B and B <. Object which are class constraints
*/
List<TPHConstraint> inputConstraints = new ArrayList<>();
inputConstraints.add(new TPHConstraint("A", "B", TPHConstraint.Relation.EXTENDS));
HashMap<String, PairTphMethod<PositionFinder.Position, String>> tphPositions = new HashMap<>();
PairTphMethod<PositionFinder.Position, String> posOfA = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
tphPositions.put("A", posOfA);
PairTphMethod<PositionFinder.Position, String> posOfB = new PairTphMethod<>(PositionFinder.Position.METHOD, "fReturn");
tphPositions.put("B", posOfB);
/*
ClassConstraints should not be the same as the input constraint
*/
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(inputConstraints, tphPositions);
System.out.println(classConstraints);
assertTrue(classConstraints.size() == 2);
//assertTrue(classConstraints.get(0).getLeft().equals("A"));
//assertTrue(classConstraints.get(0).getRight().equals("B"));
}
public void testSecondLineOfClassConstraints() {
/*
class Example() {
A a;
B b = a;
C anyMethod() {
F f;
return f;
}
D otherMethod(E e) {
this.b = e;
e = this.a;
return e;
}
}
*/
List<TPHConstraint> inputConstraints = new ArrayList<>();
inputConstraints.add(new TPHConstraint("A", "B", TPHConstraint.Relation.EXTENDS));
inputConstraints.add(new TPHConstraint("F", "C", TPHConstraint.Relation.EXTENDS));
inputConstraints.add(new TPHConstraint("E", "B", TPHConstraint.Relation.EXTENDS));
inputConstraints.add(new TPHConstraint("A", "E", TPHConstraint.Relation.EXTENDS));
inputConstraints.add(new TPHConstraint("E", "D", TPHConstraint.Relation.EXTENDS));
HashMap<String, PairTphMethod<PositionFinder.Position, String>> tphPositions = new HashMap<>();
PairTphMethod<PositionFinder.Position, String> posOfA = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
PairTphMethod<PositionFinder.Position, String> posOfB = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
PairTphMethod<PositionFinder.Position, String> posOfC = new PairTphMethod<>(PositionFinder.Position.METHOD, "anyMethod");
PairTphMethod<PositionFinder.Position, String> posOfD = new PairTphMethod<>(PositionFinder.Position.METHOD, "otherMethod");
PairTphMethod<PositionFinder.Position, String> posOfE = new PairTphMethod<>(PositionFinder.Position.METHOD, "otherMethod");
PairTphMethod<PositionFinder.Position, String> posOfF = new PairTphMethod<>(PositionFinder.Position.METHOD, "anyMethod");
tphPositions.put("A", posOfA);
tphPositions.put("B", posOfB);
tphPositions.put("C", posOfC);
tphPositions.put("F", posOfF);
tphPositions.put("D", posOfD);
tphPositions.put("E", posOfE);
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(inputConstraints, tphPositions);
System.out.println(classConstraints);
List<MethodConstraint> methodConstraints = FamilyOfGeneratedGenerics.getMethodConstraints(inputConstraints, classConstraints, tphPositions);
System.out.println(methodConstraints);
assertFalse(classConstraints.isEmpty());
assertTrue(classConstraints.size() == 6);
assertFalse(methodConstraints.isEmpty());
assertTrue(methodConstraints.size() == 5);
}
public void testTPHsAndGenerics() {
/*
class TPHsAndGenerics {
Fun1<A,B> id = x -> x;
C id2 (D x) {
return id.apply(x);
}
E m(F a, G b){
var c = m2(a,b);
return a;
}
H m2(I a, J b){
return b;
}
}
*/
List<TPHConstraint> inputConstraints = new ArrayList<>();
inputConstraints.add(new TPHConstraint("A","B", TPHConstraint.Relation.EXTENDS));
inputConstraints.add(new TPHConstraint("B","C", TPHConstraint.Relation.EXTENDS));
inputConstraints.add(new TPHConstraint("D","A", TPHConstraint.Relation.EXTENDS));
inputConstraints.add(new TPHConstraint("F","E", TPHConstraint.Relation.EXTENDS));
inputConstraints.add(new TPHConstraint("F","I", TPHConstraint.Relation.EXTENDS));
inputConstraints.add(new TPHConstraint("G","J", TPHConstraint.Relation.EXTENDS));
inputConstraints.add(new TPHConstraint("J","H", TPHConstraint.Relation.EXTENDS));
HashMap<String, PairTphMethod<PositionFinder.Position, String>> tphPositions = new HashMap<>();
PairTphMethod<PositionFinder.Position, String> posOfA = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
PairTphMethod<PositionFinder.Position, String> posOfB = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
PairTphMethod<PositionFinder.Position, String> posOfC = new PairTphMethod<>(PositionFinder.Position.METHOD, "id2");
PairTphMethod<PositionFinder.Position, String> posOfD = new PairTphMethod<>(PositionFinder.Position.METHOD, "id2");
PairTphMethod<PositionFinder.Position, String> posOfE = new PairTphMethod<>(PositionFinder.Position.METHOD, "m");
PairTphMethod<PositionFinder.Position, String> posOfF = new PairTphMethod<>(PositionFinder.Position.METHOD, "m");
PairTphMethod<PositionFinder.Position, String> posOfG = new PairTphMethod<>(PositionFinder.Position.METHOD, "m");
PairTphMethod<PositionFinder.Position, String> posOfH = new PairTphMethod<>(PositionFinder.Position.METHOD, "m2");
PairTphMethod<PositionFinder.Position, String> posOfI = new PairTphMethod<>(PositionFinder.Position.METHOD, "m2");
PairTphMethod<PositionFinder.Position, String> posOfJ = new PairTphMethod<>(PositionFinder.Position.METHOD, "m2");
tphPositions.put("A", posOfA);
tphPositions.put("B", posOfB);
tphPositions.put("C", posOfC);
tphPositions.put("D", posOfD);
tphPositions.put("E", posOfE);
tphPositions.put("F", posOfF);
tphPositions.put("G", posOfG);
tphPositions.put("H", posOfH);
tphPositions.put("I", posOfI);
tphPositions.put("J", posOfJ);
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(inputConstraints, tphPositions);
System.out.println(classConstraints);
List<MethodConstraint> methodConstraints = FamilyOfGeneratedGenerics.getMethodConstraints(inputConstraints, classConstraints, tphPositions);
System.out.println(methodConstraints);
assertFalse(classConstraints.isEmpty());
assertTrue(classConstraints.size() == 3);
assertFalse(methodConstraints.isEmpty());
assertTrue(methodConstraints.size()==9);
}
public void testPositionConverter() {
HashMap<String, Boolean> allTphsOld = new HashMap<>();
List<MethodAndTPH> listOfMethodsAndTphs = new ArrayList<>();
allTphsOld.put("A", true);
allTphsOld.put("B", false);
MethodAndTPH m1 = new MethodAndTPH("m1");
m1.getTphs().add("A");
MethodAndTPH bla = new MethodAndTPH("bla");
MethodAndTPH blubb = new MethodAndTPH("blubb");
// blubb.getTphs().add("A");
listOfMethodsAndTphs.add(bla);
listOfMethodsAndTphs.add(blubb);
listOfMethodsAndTphs.add(m1);
HashMap<String, PairTphMethod<PositionFinder.Position, String>> allTphsNew = FamilyOfGeneratedGenerics.positionConverter(allTphsOld, listOfMethodsAndTphs);
System.out.println(allTphsNew);
//was tun wenn zwei (oder mehr) Methoden gleiches TPH enthalten?
//ist dies möglich oder werden die TPHs immer verschieden initialisiert und dann erst am Ende gemappt?
//überarbeiten oder lassen?
assertTrue(allTphsNew.get("A").fst.equals(PositionFinder.Position.METHOD));
assertTrue(allTphsNew.get("B").fst.equals(PositionFinder.Position.FIELD));
}
public void testFirstTransitiveSubtypeForMethodTypes(){
}
}

@ -1,30 +0,0 @@
package insertGenerics;
import de.dhbwstuttgart.bytecode.genericsGenerator.GeneratedGenericsFinder;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import junit.framework.TestResult;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.util.List;
public class MethodsTest {
public static final String rootDirectory = System.getProperty("user.dir")+"/src/test/resources/insertGenericsJav/";
public static final String fileDirectory = rootDirectory + "TestGGFinder.jav";
@Test
public void testVisit(ClassOrInterface coi) throws IOException, ClassNotFoundException {
JavaTXCompiler compiler = new JavaTXCompiler(new File(fileDirectory));
SourceFile sf = compiler.sourceFiles.get(compiler.sourceFiles.keySet());
List<ResultSet> results = compiler.typeInference();
GeneratedGenericsFinder ggf = new GeneratedGenericsFinder(sf, results);
ClassOrInterface coiHere = coi;
ggf.visit(coiHere);
//assert results.size()>0;
}
}

@ -1,63 +0,0 @@
package insertGenerics;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
import de.dhbwstuttgart.bytecode.insertGenerics.*;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import java.io.BufferedReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class TestExample42 {
public List<TPHConstraint> fillConstraintsList() {
List<TPHConstraint> cs = new ArrayList<>();
cs.add(new TPHConstraint("M", "N", Relation.EXTENDS));
cs.add(new TPHConstraint("N", "Z", Relation.EXTENDS));
cs.add(new TPHConstraint("Q", "K", Relation.EXTENDS));
cs.add(new TPHConstraint("K", "P", Relation.EXTENDS));
cs.add(new TPHConstraint("W", "M", Relation.EXTENDS));
cs.add(new TPHConstraint("Z", "V", Relation.EXTENDS));
return cs;
}
public HashMap<String, PairTphMethod<PositionFinder.Position, String>> fillPosOfTphs() {
HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs = new HashMap<>();
// TPHs "U" und "L" auskommentiert, da nach Vorgaben L zu Z umbenannt und U als void interpretiert wird
PairTphMethod<PositionFinder.Position, String> posOfK = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
// PairTphMethod<PositionFinder.Position, String> posOfL = new PairTphMethod<>(PositionFinder.Position.METHOD, "id");
PairTphMethod<PositionFinder.Position, String> posOfM = new PairTphMethod<>(PositionFinder.Position.METHOD, "id");
PairTphMethod<PositionFinder.Position, String> posOfN = new PairTphMethod<>(PositionFinder.Position.METHOD, "id");
PairTphMethod<PositionFinder.Position, String> posOfP = new PairTphMethod<>(PositionFinder.Position.METHOD, "setA");
PairTphMethod<PositionFinder.Position, String> posOfQ = new PairTphMethod<>(PositionFinder.Position.METHOD, "setA");
// PairTphMethod<PositionFinder.Position, String> posOfU = new PairTphMethod<>(PositionFinder.Position.METHOD, "m");
PairTphMethod<PositionFinder.Position, String> posOfV = new PairTphMethod<>(PositionFinder.Position.METHOD, "m");
PairTphMethod<PositionFinder.Position, String> posOfW = new PairTphMethod<>(PositionFinder.Position.METHOD, "m");
PairTphMethod<PositionFinder.Position, String> posOfZ = new PairTphMethod<>(PositionFinder.Position.METHOD, "m");
posOfTphs.put("K", posOfK);
// posOfTphs.put("L", posOfL);
posOfTphs.put("M", posOfM);
posOfTphs.put("N", posOfN);
posOfTphs.put("P", posOfP);
posOfTphs.put("Q", posOfQ);
// posOfTphs.put("U", posOfU);
posOfTphs.put("V", posOfV);
posOfTphs.put("W", posOfW);
posOfTphs.put("Z", posOfZ);
return posOfTphs;
}
@Test
public void genericTest() {
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(fillConstraintsList(),fillPosOfTphs());
System.out.println("ClassConstraints: " + classConstraints);
List<MethodConstraint> methodConstraints = FamilyOfGeneratedGenerics.getMethodConstraints(fillConstraintsList(),classConstraints,fillPosOfTphs());
System.out.println("MethodConstraints: " + methodConstraints);
List<TPHConstraint> testCons;
}
}

@ -1,61 +0,0 @@
package insertGenerics;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
import de.dhbwstuttgart.bytecode.insertGenerics.*;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class TestExample42_allInOneMethod {
public List<TPHConstraint> fillConstraintsList() {
List<TPHConstraint> cs = new ArrayList<>();
cs.add(new TPHConstraint("M", "N", Relation.EXTENDS));
cs.add(new TPHConstraint("N", "Z", Relation.EXTENDS));
cs.add(new TPHConstraint("Q", "K", Relation.EXTENDS));
cs.add(new TPHConstraint("K", "P", Relation.EXTENDS));
cs.add(new TPHConstraint("W", "M", Relation.EXTENDS));
cs.add(new TPHConstraint("Z", "V", Relation.EXTENDS));
return cs;
}
public HashMap<String, PairTphMethod<PositionFinder.Position, String>> fillPosOfTphs() {
HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs = new HashMap<>();
// TPHs "U" und "L" auskommentiert, da nach Vorgaben L zu Z umbenannt und U als void interpretiert wird
PairTphMethod<PositionFinder.Position, String> posOfK = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
// PairTphMethod<PositionFinder.Position, String> posOfL = new PairTphMethod<>(PositionFinder.Position.METHOD, "sameMethod");
PairTphMethod<PositionFinder.Position, String> posOfM = new PairTphMethod<>(PositionFinder.Position.METHOD, "sameMethod");
PairTphMethod<PositionFinder.Position, String> posOfN = new PairTphMethod<>(PositionFinder.Position.METHOD, "sameMethod");
PairTphMethod<PositionFinder.Position, String> posOfP = new PairTphMethod<>(PositionFinder.Position.METHOD, "sameMethod");
PairTphMethod<PositionFinder.Position, String> posOfQ = new PairTphMethod<>(PositionFinder.Position.METHOD, "sameMethod");
// PairTphMethod<PositionFinder.Position, String> posOfU = new PairTphMethod<>(PositionFinder.Position.METHOD, "sameMethod");
PairTphMethod<PositionFinder.Position, String> posOfV = new PairTphMethod<>(PositionFinder.Position.METHOD, "sameMethod");
PairTphMethod<PositionFinder.Position, String> posOfW = new PairTphMethod<>(PositionFinder.Position.METHOD, "sameMethod");
PairTphMethod<PositionFinder.Position, String> posOfZ = new PairTphMethod<>(PositionFinder.Position.METHOD, "sameMethod");
posOfTphs.put("K", posOfK);
// posOfTphs.put("L", posOfL);
posOfTphs.put("M", posOfM);
posOfTphs.put("N", posOfN);
posOfTphs.put("P", posOfP);
posOfTphs.put("Q", posOfQ);
// posOfTphs.put("U", posOfU);
posOfTphs.put("V", posOfV);
posOfTphs.put("W", posOfW);
posOfTphs.put("Z", posOfZ);
return posOfTphs;
}
@Test
public void genericTest() {
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(fillConstraintsList(),fillPosOfTphs());
System.out.println("ClassConstraints: " + classConstraints);
List<MethodConstraint> methodConstraints = FamilyOfGeneratedGenerics.getMethodConstraints(fillConstraintsList(),classConstraints,fillPosOfTphs());
System.out.println("MethodConstraints: " + methodConstraints);
List<TPHConstraint> testCons;
}
}

@ -1,28 +0,0 @@
package insertGenerics;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
import de.dhbwstuttgart.bytecode.insertGenerics.FamilyOfGeneratedGenerics;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.List;
public class TestTransitiveClosure {
public List<TPHConstraint> fillList() {
List<TPHConstraint> list = new ArrayList<>();
list.add(new TPHConstraint("A", "B", Relation.EXTENDS));
list.add(new TPHConstraint("B", "C", Relation.EXTENDS));
list.add(new TPHConstraint("C", "D", Relation.EXTENDS));
return list;
}
@Test
public void genericTest() {
List<TPHConstraint> testCons = FamilyOfGeneratedGenerics.buildTransitiveClosure(fillList());
System.out.println(testCons);
}
}

@ -1,92 +0,0 @@
package insertGenerics;
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.GenericGenratorResultForSourceFile;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.syntaxtree.visual.ASTPrinter;
import de.dhbwstuttgart.syntaxtree.visual.ASTTypePrinter;
import de.dhbwstuttgart.typedeployment.TypeInsert;
import de.dhbwstuttgart.typedeployment.TypeInsertFactory;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class TryTest {
public static final String rootDirectory = System.getProperty("user.dir")+"/src/test/resources/insertGenericsJav/";
@Test
public void ggFinder() throws IOException, ClassNotFoundException {
execute(new File(rootDirectory+"TestGGFinder.jav"));
}
private static class TestResultSet{
}
public TestResultSet execute(File fileToTest) throws IOException, ClassNotFoundException {
//filesToTest.add(new File(rootDirectory+"fc.jav"));
//filesToTest.add(new File(rootDirectory+"Lambda.jav"));
//filesToTest.add(new File(rootDirectory+"Lambda2.jav"));
//filesToTest.add(new File(rootDirectory+"Lambda3.jav"));
//filesToTest.add(new File(rootDirectory+"Vector.jav"));
//filesToTest.add(new File(rootDirectory+"Generics.jav"));
//filesToTest.add(new File(rootDirectory+"MethodsEasy.jav"));
//filesToTest.add(new File(rootDirectory+"Matrix.jav"));
//filesToTest.add(new File(rootDirectory+"Import.jav"));
// //filesToTest.add(new File(rootDirectory+"Faculty.jav"));
// //filesToTest.add(new File(rootDirectory+"mathStruc.jav"));
// //filesToTest.add(new File(rootDirectory+"test.jav"));
JavaTXCompiler compiler = new JavaTXCompiler(fileToTest);
for(File f : compiler.sourceFiles.keySet()){
SourceFile sf = compiler.sourceFiles.get(f);
// System.out.println(ASTTypePrinter.print(sf));
// System.out.println("---------------------------1");
// System.out.println(ASTPrinter.print(sf));
// System.out.println("---------------------------2");
}
List<ResultSet> results = compiler.typeInference();
List<GenericGenratorResultForSourceFile> simplifyResultsForAllSourceFiles = compiler.getGeneratedGenericResultsForAllSourceFiles(results);
//compiler.generateBytecode(rootDirectory+"xxx.class", results, simplifyResultsForAllSourceFiles);
for(File f : compiler.sourceFiles.keySet()){
SourceFile sf = compiler.sourceFiles.get(f);
System.out.println(ASTTypePrinter.print(sf));
// System.out.println("---------------------------3");
System.out.println(ASTPrinter.print(sf));
// System.out.println("---------------------------4");
//List<ResultSet> results = compiler.typeInference(); PL 2017-10-03 vor die For-Schleife gezogen
assert results.size()>0;
Set<String> insertedTypes = new HashSet<>();
for(ResultSet resultSet : results){
Set<TypeInsert> result = TypeInsertFactory.createTypeInsertPoints(sf, resultSet, results, simplifyResultsForAllSourceFiles);
assert result.size()>0;
String content = readFile(f.getPath(), StandardCharsets.UTF_8);
for(TypeInsert tip : result){
insertedTypes.add(tip.insert(content));
}
}
for(String s : insertedTypes){
System.out.println("---------------------------51");
System.out.println(s);
System.out.println("---------------------------52");
}
}
return new TestResultSet();
}
static String readFile(String path, Charset encoding)
throws IOException
{
byte[] encoded = Files.readAllBytes(Paths.get(path));
return new String(encoded, encoding);
}
}

@ -1,62 +0,0 @@
package typeinference;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSetBuilder;
import de.dhbwstuttgart.typeinference.constraints.OderConstraint;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import org.junit.Test;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class ConstraintSetTests {
@Test
public void cartesianProductTestSingleConstraint(){
ConstraintSetBuilder builder = new ConstraintSetBuilder();
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()))));
List<Set<Pair>> result = builder.build().cartesianProductParallel().collect(Collectors.toList());
assert result.size() == 1;
}
@Test
public void cartesianProductTestSingleOderConstraint(){
ConstraintSetBuilder builder = new ConstraintSetBuilder();
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
List<Set<Pair>> result = builder.build().cartesianProductParallel().collect(Collectors.toList());
assert result.size() == 2;
}
@Test
public void cartesianProductTestTwoOderConstraint(){
ConstraintSetBuilder builder = new ConstraintSetBuilder();
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
List<Set<Pair>> result = builder.build().cartesianProductParallel().collect(Collectors.toList());
assert result.size() == 4;
}
@Test
public void cartesianProductTestThreeOderConstraint(){
ConstraintSetBuilder builder = new ConstraintSetBuilder();
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()))));
List<Set<Pair>> result = builder.build().cartesianProductParallel().collect(Collectors.toList());
assert result.size() == 4;
ConstraintSetBuilder builder2 = new ConstraintSetBuilder();
builder2.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
builder2.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
builder2.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
List<Set<Pair>> result2 = builder2.build().cartesianProductParallel().collect(Collectors.toList());
assert result2.stream().map( a -> a.stream().map(p -> p.TA1.toString()).reduce("", (x, y)-> x+" "+y)).collect(Collectors.toSet()).size() == 8;
assert result2.size() == 8;
}
public Pair generatePair(){
return new Pair(TypePlaceholder.fresh(new NullToken()), TypePlaceholder.fresh(new NullToken()));
}
}

@ -1,62 +0,0 @@
package typeinference;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSetBuilder;
import de.dhbwstuttgart.typeinference.constraints.OderConstraint;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import org.junit.Test;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class UnifyConstraintSetTests {
@Test
public void cartesianProductTestSingleConstraint(){
ConstraintSetBuilder builder = new ConstraintSetBuilder();
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()))));
List<Set<Pair>> result = builder.build().cartesianProductParallel().collect(Collectors.toList());
assert result.size() == 1;
}
@Test
public void cartesianProductTestSingleOderConstraint(){
ConstraintSetBuilder builder = new ConstraintSetBuilder();
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
List<Set<Pair>> result = builder.build().cartesianProductParallel().collect(Collectors.toList());
assert result.size() == 2;
}
@Test
public void cartesianProductTestTwoOderConstraint(){
ConstraintSetBuilder builder = new ConstraintSetBuilder();
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
List<Set<Pair>> result = builder.build().cartesianProductParallel().collect(Collectors.toList());
assert result.size() == 4;
}
@Test
public void cartesianProductTestThreeOderConstraint(){
ConstraintSetBuilder builder = new ConstraintSetBuilder();
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
builder.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()))));
List<Set<Pair>> result = builder.build().cartesianProductParallel().collect(Collectors.toList());
assert result.size() == 4;
ConstraintSetBuilder builder2 = new ConstraintSetBuilder();
builder2.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
builder2.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
builder2.addOderConstraint(new OderConstraint(Set.of(Set.of(generatePair()), Set.of(generatePair()))));
List<Set<Pair>> result2 = builder2.build().cartesianProductParallel().collect(Collectors.toList());
assert result2.stream().map( a -> a.stream().map(p -> p.TA1.toString()).reduce("", (x, y)-> x+" "+y)).collect(Collectors.toSet()).size() == 8;
assert result2.size() == 8;
}
public Pair generatePair(){
return new Pair(TypePlaceholder.fresh(new NullToken()), TypePlaceholder.fresh(new NullToken()));
}
}

@ -1,4 +0,0 @@
package unify;
public class RuleSetTest {
}

@ -1,47 +0,0 @@
package unify;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.unify2.TypeUnify;
import de.dhbwstuttgart.unify2.model.UnifyConstraintSet;
import de.dhbwstuttgart.unify2.model.UnifyOderConstraint;
import org.junit.Test;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
public class TypeUnifyTest {
@Test
public void emptyInput(){
UnifyConstraintSet empty = new UnifyConstraintSet(new HashSet<>());
TypeUnify.unifyOrConstraints(empty, emptyFC());
}
@Test
public void singleOrConstraintOnlyTPHs(){
Set<UnifyPair> pairs = Set.of(new UnifyPair(PlaceholderType.freshPlaceholder(), PlaceholderType.freshPlaceholder(), PairOperator.EQUALSDOT));
UnifyOderConstraint orConstraint = new UnifyOderConstraint(Set.of(pairs));
UnifyConstraintSet input = new UnifyConstraintSet(Set.of(orConstraint));
Optional<Set<UnifyPair>> res = TypeUnify.unifyOrConstraints(input, emptyFC());
assert res.isPresent();
}
@Test
public void unifyTest1(){
PlaceholderType tph1 = PlaceholderType.freshPlaceholder();
PlaceholderType tph2 = PlaceholderType.freshPlaceholder();
UnifyPair p1 = new UnifyPair(tph1, tph2, PairOperator.SMALLERDOT);
Set<UnifyPair> pairs = Set.of(p1);
UnifyOderConstraint orConstraint = new UnifyOderConstraint(Set.of(pairs));
UnifyConstraintSet input = new UnifyConstraintSet(Set.of(orConstraint));
Optional<Set<UnifyPair>> res = TypeUnify.unifyOrConstraints(input, emptyFC());
assert res.isPresent();
}
private FiniteClosure emptyFC(){
return new FiniteClosure(new HashSet<>(), null);
}
}

@ -1,11 +0,0 @@
class Cycle{
<A> A und(A a, A b){
return a;
}
m(a){
return und(m(m(a)), a);
}
}

@ -1,5 +0,0 @@
class Twice{
m(x, f){
return f.apply(f.apply(x));
}
}

@ -1,17 +0,0 @@
class Generics{
a;
id(b) {
var c = b;
return c;
}
setA(x) {
a = x;
return a;
}
m(x,y) {
x = id(y);
}
}