Merge branch 'refactoring' into bytecode

This commit is contained in:
JanUlrich 2016-04-12 10:28:20 +02:00
commit 22393888f1
120 changed files with 4761 additions and 7192 deletions

View File

@ -1,6 +1,7 @@
eclipse.preferences.version=1
encoding//src/de/dhbwstuttgart/core/MyCompiler.java=UTF-8
encoding//src/de/dhbwstuttgart/syntaxtree/statement/LambdaExpression.java=UTF-8
encoding//src/de/dhbwstuttgart/typeinference/SingleConstraint.java=UTF-8
encoding//src/de/dhbwstuttgart/typeinference/UndConstraint.java=UTF-8
encoding//src/de/dhbwstuttgart/typeinference/UnifySingleConstraint.java=UTF-8
encoding//src/de/dhbwstuttgart/typeinference/UnifyUndConstraint.java=UTF-8
encoding/<project>=UTF-8

View File

@ -2,6 +2,14 @@ package de.dhbwstuttgart.myexception;
public class NotImplementedException extends RuntimeException {
public NotImplementedException(String message) {
super(message);
}
public NotImplementedException() {
super("Nicht implementiert");
}
/**
*
*/

View File

@ -1693,7 +1693,7 @@ assignment :lefthandside assignmentoperator assignmentexpr
{
de.dhbwstuttgart.logger.Logger.getLogger("parser").debug("\nParser --> Zuweisung1!\n", Section.PARSER);
Assign Ass = new Assign($1.getOffset(),$1.getVariableLength());
LocalOrFieldVar LOFV = new LocalOrFieldVar($1.getOffset(),$1.getVariableLength());
LocalOrFieldVarOrClassname LOFV = new LocalOrFieldVarOrClassname($1.getOffset(),$1.getVariableLength());
LOFV.set_UsedId($1);
//auskommentiert von Andreas Stadelmeier (a10023) LOFV.setType(TypePlaceholder.fresh());
//auskommentiert von Andreas Stadelmeier (a10023) Ass.setType(TypePlaceholder.fresh());
@ -1717,7 +1717,7 @@ assignment :lefthandside assignmentoperator assignmentexpr
| lefthandside assignmentoperator classinstancecreationexpression
{
Assign Ass =new Assign($1.getOffset(),$1.getVariableLength());
LocalOrFieldVar LOFV = new LocalOrFieldVar($1.getOffset(),$1.getVariableLength());
LocalOrFieldVarOrClassname LOFV = new LocalOrFieldVarOrClassname($1.getOffset(),$1.getVariableLength());
LOFV.set_UsedId($1);
//auskommentiert von Andreas Stadelmeier (a10023) LOFV.setType(TypePlaceholder.fresh());
//auskommentiert von Andreas Stadelmeier (a10023) Ass.setType(TypePlaceholder.fresh());
@ -1939,7 +1939,7 @@ methodinvocation:
rec = new Receiver(INSTVA);
}
else if ($1.get_Name().size() == 2) {
LocalOrFieldVar LOFV = new LocalOrFieldVar($1.getOffset(),$1.getVariableLength());
LocalOrFieldVarOrClassname LOFV = new LocalOrFieldVarOrClassname($1.getOffset(),$1.getVariableLength());
$1.removeLast();
LOFV.set_UsedId($1);
//auskommentiert von Andreas Stadelmeier (a10023) LOFV.setType(TypePlaceholder.fresh());
@ -1969,7 +1969,7 @@ methodinvocation:
rec = new Receiver(INSTVA);
}
else if ($1.get_Name().size() == 2) {
LocalOrFieldVar LOFV = new LocalOrFieldVar($1.getOffset(),$1.getVariableLength());
LocalOrFieldVarOrClassname LOFV = new LocalOrFieldVarOrClassname($1.getOffset(),$1.getVariableLength());
$1.removeLast();
LOFV.set_UsedId($1);
//auskommentiert von Andreas Stadelmeier (a10023) LOFV.setType(TypePlaceholder.fresh());
@ -2105,7 +2105,7 @@ postfixexpression :primary
$$ = INSTVA;
}
else {
LocalOrFieldVar Postincexpr = new LocalOrFieldVar($1.getOffset(),$1.getVariableLength());
LocalOrFieldVarOrClassname Postincexpr = new LocalOrFieldVarOrClassname($1.getOffset(),$1.getVariableLength());
Postincexpr.set_UsedId($1);
//auskommentiert von Andreas Stadelmeier (a10023) Postincexpr.setType(TypePlaceholder.fresh());
$$=Postincexpr;

View File

@ -49,7 +49,6 @@ import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.typedeployment.TypeInsertPoint;
import de.dhbwstuttgart.typeinference.unify.FC_TTO;
import de.dhbwstuttgart.typeinference.unify.Unify;
import org.apache.commons.bcel6.generic.*;
@ -170,59 +169,30 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
{
this.superif = superif;
}
// ino.attribute.superclassid.23014.decldescription type=line
// private Status status;
// ino.end
// ino.attribute.superclassid.23014.declaration
public UsedId superclassid = (SourceFile.READ_OBJECT_SUPERCLASSES_FROM_JRE?UsedId.createFromQualifiedName("Object",-1):null);
// ino.end
// ino.attribute.class_block.23020.decldescription type=line
// private Class java;
// ino.end
// ino.attribute.class_block.23020.declaration
private Block class_block;
// ino.end
// ino.attribute.paralist.23023.declaration
//private Menge<Type> paralist = new Menge<Type>(); // Parameterliste 'class xy<para1, para2,...>{}' wird gespeichert
// ino.end
// ino.attribute.parahash.23026.declaration
private Hashtable<String,String> parahash = new Hashtable<String,String>(); // parametrisierten Attrib. werden mit den Paramet.aus paralist verk.
// ino.end
// ino.attribute.isFirstLocalVarDecl.23029.declaration
public static boolean isFirstLocalVarDecl; //Hilfsvariable fuer Offsets, hoth
// ino.end
//private Menge<GenericTypeVar> genericClassParameters=new Menge<GenericTypeVar>();
// ino.attribute.containedTypes.23032.decldescription type=line
public UsedId superclassid = (SourceFile.READ_OBJECT_SUPERCLASSES_FROM_JRE?UsedId.createFromQualifiedName("Object",-1):null);
private Block class_block;
private Hashtable<String,String> parahash = new Hashtable<String,String>(); // parametrisierten Attrib. werden mit den Paramet.aus paralist verk.
public static boolean isFirstLocalVarDecl; //Hilfsvariable fuer Offsets, hoth
// PL 05-07-30 eingefuegt. Vektor aller Typdeklarationen, die in der Klasse
// vorkommen. Wird in der Studienarbeit von Andreas Stadelmeier nur ¼r Verifizierung der Tests eingesetzt.
// ino.end
// ino.attribute.containedTypes.23032.declaration
private Menge<Type> containedTypes = new Menge<Type>();
// ino.end
// ino.attribute.usedIdsToCheck.23035.declaration
private Menge<UsedId> usedIdsToCheck = new Menge<UsedId>();
// ino.end
private TypeAssumptions typeAssumptions = null;//muss mit null Initialisiert werden. Darf nur über getTypeAssumptions abgerufen werden.
// ino.attribute.parserlog.23038.declaration
//protected Logger parselog = Logger.getLogger("parser");
// ino.end
protected Logger typinferenzLog = Logger.getLogger(Class.class.getName());
private SyntaxTreeNode parent;
private Menge<Field> fielddecl = new Menge<Field>();
private GenericDeclarationList genericClassParameters;
private int offset;
private Type superClass;
private RefType superClass;
// ino.method.Class.23041.definition
public Class(String name, int offset)
// ino.end
// ino.method.Class.23041.body
{
this.name = name;
if(name.equals("java.lang.Object")){
@ -242,7 +212,7 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
* @param modifiers
* @param supertypeGenPara - Eine Liste von Namen, welche die Generischen Parameter der Klasse darstellen.
*/
public Class(String name, Type superClass, Modifiers modifiers, Menge<String> supertypeGenPara) {
public Class(String name, RefType superClass, Modifiers modifiers, Menge<String> supertypeGenPara) {
this(name,superClass,modifiers,0);
if(supertypeGenPara == null)return;
Menge<GenericTypeVar> gtvs = new Menge<>();
@ -263,7 +233,7 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
this.setGenericParameter(new GenericDeclarationList(gtvs,0));
}
public Class(String name, Type superClass, Modifiers mod, int offset){
public Class(String name, RefType superClass, Modifiers mod, int offset){
this(name,mod,offset);
if(superClass == null)this.superClass = new Class("java.lang.Object",-1).getType();
else this.superClass = superClass;
@ -295,14 +265,14 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
public Class(String name2, Modifiers object, ClassBody classBody,
Menge<Type> containedTypes2, Menge<Type> typeMenge,
Menge<Type> paraMenge, int offset2) {
this(name2, object, classBody, containedTypes2,(Type)null, typeMenge, paraMenge, offset2);
this(name2, object, classBody, containedTypes2,(RefType)null, typeMenge, paraMenge, offset2);
}
private static Menge<Type> usedIdToRefType(Menge<UsedId> superif2) {
Menge<Type> ret = new Menge<>();
for(UsedId id : superif2)ret.add(usedIdToRefType(id));
return ret;
}
private static Type usedIdToRefType(UsedId superclass2) {
private static RefType usedIdToRefType(UsedId superclass2) {
RefType ret = new RefType(superclass2.getSimpleName(), null, superclass2.getOffset());
ret.set_ParaList(superclass2.get_ParaList());
return ret;
@ -316,7 +286,7 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
// ino.end
// ino.method.Class.23047.definition
public Class(String name, Modifiers mod, ClassBody cb, Menge<Type> ct,
Type superclass, Menge<Type> Menge, Menge<? extends Type> paralist, int offset)
RefType superclass, Menge<Type> Menge, Menge<? extends Type> paralist, int offset)
// ino.end
// ino.method.Class.23047.body
{
@ -627,7 +597,7 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
*/
// ino.end
// ino.method.TRProg.23110.definition
public ConstraintsSet typeReconstruction(FC_TTO supportData, TypeAssumptions globalAssumptions)
public ConstraintsSet typeReconstruction(TypeAssumptions globalAssumptions)
// ino.end
// ino.method.TRProg.23110.body
{
@ -639,8 +609,7 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
//////////////////////////////
inferencelog.info("Rufe TRStart()...", Section.TYPEINFERENCE);
typinferenzLog.debug("Erstellte FiniteClosure: "+supportData, Section.TYPEINFERENCE);
//////////////////////////////
//////////////////////////////
// Ab hier ...
// @author A10023 - Andreas Stadelmeier:
//////////////////////////////
@ -673,7 +642,8 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
//ConstraintsSet oderConstraints = this.TYPE(this.getMethodList(), fieldInitializers, assumptions);
this.superClass = this.superClass.TYPE(assumptions, this);
//Gibt es hier eine ClassCastException stimmt etwas grundsätzlich nicht!
this.superClass = (RefType)this.superClass.TYPE(assumptions, this);
for(Field f:this.getFields()){
oderConstraints.add(f.TYPE(assumptions));
@ -1030,7 +1000,7 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
* Die Super Klasse dieser Klasse.
* @return null ¼r Klasse Object
*/
public Type getSuperClass(){
public RefType getSuperClass(){
return this.superClass;
}
@Override

View File

@ -28,7 +28,6 @@ import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.ConstructorAssumption;
import de.dhbwstuttgart.typeinference.assumptions.MethodAssumption;

View File

@ -21,8 +21,8 @@ import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.OderConstraint;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.FieldAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
@ -148,13 +148,13 @@ public class FieldDeclaration extends Field{
}
*/
SingleConstraint c1 = new SingleConstraint(thisType, thisType);
UndConstraint c1 = ConstraintsSet.createSingleConstraint(thisType, thisType);
ret.add(c1); //Damit die TypVariable des Felds in den Constraints auftaucht
if(this.wert!=null){
//Falls bei der Deklaration ein Wert zugewiesen wird, verhält sich das Constraintserzeugen wie bei dem Assign-Statement:
ret.add(this.wert.TYPEExpr(localAssumptions));
ret.add(new SingleConstraint(this.wert.getType().TYPE(localAssumptions,this), thisType));
ret.add(ConstraintsSet.createSingleConstraint(this.wert.getType().TYPE(localAssumptions,this), thisType));
}
return ret;
}

View File

@ -46,9 +46,9 @@ import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeInsertable;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.MethodAssumption;
import de.dhbwstuttgart.typeinference.assumptions.ParameterAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
@ -499,7 +499,7 @@ public class Method extends Field implements IItemWithOffset, TypeInsertable
ret.add(this.block.TYPEStmt(localAss));
// eine Verknüpfung mit der Type Assumption aus dem Assumption Set
// und dem ermittelten Typ der Methode:
ret.add(new SingleConstraint(this.block.getType().TYPE(localAss, this), this.returntype.TYPE(localAss, this)));
ret.add(ConstraintsSet.createSingleConstraint(this.block.getType().TYPE(localAss, this), this.returntype.TYPE(localAss, this)));
return ret;
}

View File

@ -0,0 +1,26 @@
package de.dhbwstuttgart.syntaxtree;
import de.dhbwstuttgart.typeinference.Menge;
/**
* Dieser SyntaxTreeNode kann anstelle von null in einem Syntaxbaum eingesetzt werden.
* Vorsicht: Als Offset wird dann immer 0 zurück gegeben.
*/
public class NullSyntaxTreeNode extends SyntaxTreeNode {
@Override
public int getOffset() {
return 0;
}
@Override
public int getVariableLength() {
return 0;
}
@Override
public Menge<? extends SyntaxTreeNode> getChildren() {
return new Menge<>();
}
}

View File

@ -9,6 +9,8 @@ import java.util.Enumeration;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Set;
import java.util.function.Function;
import de.dhbwstuttgart.typeinference.Menge;
@ -23,6 +25,7 @@ import de.dhbwstuttgart.myexception.JVMCodeException;
import de.dhbwstuttgart.myexception.SCClassException;
import de.dhbwstuttgart.myexception.SCException;
import de.dhbwstuttgart.parser.JavaClassName;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.misc.DeclId;
import de.dhbwstuttgart.syntaxtree.misc.UsedId;
import de.dhbwstuttgart.syntaxtree.modifier.Modifiers;
@ -37,29 +40,27 @@ import de.dhbwstuttgart.typeinference.ByteCodeResult;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.FunNInterface;
import de.dhbwstuttgart.typeinference.FunNMethod;
import de.dhbwstuttgart.typeinference.KomplexeMenge;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResults;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.UnifyConstraintsSet;
import de.dhbwstuttgart.typeinference.assumptions.ClassAssumption;
import de.dhbwstuttgart.typeinference.assumptions.MethodAssumption;
import de.dhbwstuttgart.typeinference.assumptions.ParameterAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.FC_TTO;
import de.dhbwstuttgart.typeinference.unify.Unifier;
import de.dhbwstuttgart.typeinference.unify.Unify;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
// ino.class.SourceFile.21355.declaration
public class SourceFile
extends SyntaxTreeNode
// ino.end
// ino.class.SourceFile.21355.body
{
// ino.attribute.LOAD_BASIC_ASSUMPTIONS_FROM_JRE.21358.decldescription type=javadoc
/**
@ -170,17 +171,13 @@ public class SourceFile
this.KlassenVektor = classDefinitions;
}
// ino.attribute.imports.21382.decldescription type=javadoc
/**
* HOTI 4.5.06
* Beinhaltet alle Imports des aktuell geparsten Files
* in Form einer UsedId
*/
// ino.end
// ino.attribute.imports.21382.declaration
private ImportDeclarations imports=new ImportDeclarations();
// ino.end
// ino.attribute.baseTypeTranslationTable.21385.decldescription type=javadoc
/**
* Table zum Übersetzen der nicht implementierten Base-Types:
* Überall im Compiler wird statt bspw. int Integer verwendet
@ -189,24 +186,13 @@ public class SourceFile
* der JRE gelieferten Base-Typen (int,char, etc) und die Objekt-
* Typen umwandeln nnen
*/
// ino.end
// ino.attribute.baseTypeTranslationTable.21385.declaration
private Hashtable<String,String> baseTypeTranslationTable;
// ino.end
// ino.method.addElement.21394.defdescription type=javadoc
/**
* Fuegt ein neues Element (Interface oder Klasse) hinzu.
* @param c
*/
// ino.end
// ino.method.addElement.21394.definition
public void addElement(AClassOrInterface e)
// ino.end
// ino.method.addElement.21394.body
{
if (e instanceof Class) {
KlassenVektor.addElement((Class) e);
@ -214,447 +200,8 @@ public class SourceFile
InterfaceVektor.addElement((Interface) e);
}
}
// ino.end
// ino.method.codegen.21397.defdescription type=javadoc
/**
* Startet die Bytecodegenerierung fuer alle in der Datei
* enthaltenen Klassen und Interfaces.
*
// ino.end
// ino.method.codegen.21397.definition
public Menge<ClassFile> codegen(ResultSet result)
throws JVMCodeException
// ino.end
// ino.method.codegen.21397.body
{
Menge<ClassFile> ret = new Menge<ClassFile>();
codegenlog.info("Anzahl der Interfaces: "
+ Integer.toString(InterfaceVektor.size()));
for(int i = 0; i < InterfaceVektor.size(); i++) {
InterfaceVektor.elementAt(i).codegen(result);
}
codegenlog.info("Anzahl der Klassen: "
+ Integer.toString(KlassenVektor.size()));
for(int i = 0; i < KlassenVektor.size(); i++) {
ret.add(KlassenVektor.elementAt(i).codegen(result));
}
return ret;
}
// ino.end
*/
// ino.method.createPairFromClassAndSuperclass.21400.defdescription type=javadoc
/**
* Erstellt ein Typ-Paar, welches im 1. Durchlauf in die Menge der Finite Closure
* aufgenommen wird Input: Klassenname, Name der Superklasse, ParameterDerKlasse,
* Parameter der Superklasse
* @return
*/
// ino.end
// ino.method.createPairFromClassAndSuperclass.21400.definition
private Pair createPairFromClassAndSuperclass(Class baseClass, Type superclass, Menge classParaOrg, Menge superclassParaOrg, TypeAssumptions ass)
// ino.end
// ino.method.createPairFromClassAndSuperclass.21400.body
{
// Paar erstellen
if(classParaOrg!=null && classParaOrg.size()==0){
classParaOrg=null;
}
if(superclassParaOrg!=null && superclassParaOrg.size()==0){
superclassParaOrg=null;
}
/*
Pair P = new Pair(
new RefType( className.toString(), classParaOrg,-1),
new RefType( superclassName.toString(), superclassParaOrg,-1)
);
*/
Pair P = new Pair(baseClass.getType().TYPE(ass, baseClass), superclass.TYPE(ass, baseClass));
//PL 04-12-29 freshe Variablen ANFANG
RefType r1 = (RefType)P.getTA1Copy();
RefType r2 = (RefType)P.getTA2Copy();
r1 = (RefType) r1.TYPE(ass, baseClass);
r2 = (RefType) r2.TYPE(ass, baseClass);
// #JB# 05.04.2005
// ###########################################################
Hashtable<JavaClassName,Type> substHash = new Hashtable<JavaClassName,Type>(); //fuer jedes Paar komplett neue Variablen
Unify.varSubst(r1, substHash);
Unify.varSubst(r2, substHash);
// ###########################################################
P = new Pair(r1, r2);
//PL 04-12-29 freshe Variablen ENDE
//HIER AUSKOMMENTIERT, SOLLTE MAN AM ENDE WIEDER DAZU NEHMEN PL 04-12-28
// gleiches Paar aufnehmen
//vFC.add( new Pair( P.getTA1Copy(), P.getTA1Copy() ) );
return(P);
}
// ino.end
// ino.method.makeFC.21403.defdescription type=javadoc
/**
* Erstellt die Finite Closure
* @return FC_TTO-Object, welches die Finite Closure repräsentiert
*/
public FC_TTO makeFC( TypeAssumptions ass )
{
// Menge FC bilden
Menge<Pair> vFC = new Menge<Pair>(); // Menge FC
TypeAssumptions globalAssumptions = this.makeBasicAssumptionsFromJRE(imports, false);
globalAssumptions.add(this.getPublicFieldAssumptions());
// 1. Menge <= in FC aufnehmen --> Iteration ueber alle Klassen
Menge<Type> ignoreTypes = new Menge<>(); //Enthält die Typen, welche nicht in der FC als Supertypen enthalten sein sollen.
ignoreTypes.add(new RefType("Long",null,-1).TYPE(globalAssumptions, parent));
ignoreTypes.add(new RefType("Float",null,-1).TYPE(globalAssumptions, parent));
ignoreTypes.add(new RefType("Double",null,-1).TYPE(globalAssumptions, parent));
ignoreTypes.add(new RefType("String",null,-1).TYPE(globalAssumptions, parent));
ignoreTypes.add(new RefType("Integer",null,-1).TYPE(globalAssumptions, parent));
ignoreTypes.add(new RefType("Object",null,-1).TYPE(globalAssumptions, parent));
Menge<Class> basicAssumptionsClassMenge = new Menge<>(); //die Klassen aus den BasicAssumptions und den Importierten Klassen
for(ClassAssumption cAss : ass.getClassAssumptions()){
Type t1 = cAss.getAssumedClass().getType();
Type t2 = cAss.getAssumedClass().getSuperClass();
if(t2 != null){
Pair p = new Pair(t1, t2);
//System.out.println("FCPair: "+p);
if(! t1.equals(t2)){//Um FC_TTO darf kein T <. T stehen.
Type superTypeFromAssumptions = ass.getTypeFor(t2, t2); //In den Assumptions den SuperTyp nachschlagen
if(superTypeFromAssumptions != null && ! ignoreTypes.contains(superTypeFromAssumptions)){//Die Superklasse eines Typs nur anfügen, wenn er auch in den Assumptions vorkommt.
vFC.add(p);
}
basicAssumptionsClassMenge.add(cAss.getAssumedClass());//Klasse ohne die Superklasse anfügen
}else{
//System.out.println("Wurde nicht aufgenommen");
}
}
}
for( int i = 0; i < KlassenVektor.size(); i++ )
{
Class tempKlasse = KlassenVektor.elementAt(i);
inferencelog.debug("Verarbeite "+tempKlasse.getName(), Section.TYPEINFERENCE);
//TODO: SuperKlasse erstellen, dies sollte am besten beim Konstruktoraufruf von Class geschehen. Diese kann dann mit getSuperClass abgefragt werden.
if( tempKlasse.superclassid != null ) { // Klasse hat Superklasse
Pair P=createPairFromClassAndSuperclass(tempKlasse,tempKlasse.getSuperClass(),tempKlasse.get_ParaList(),tempKlasse.superclassid.get_ParaList(), globalAssumptions);
vFC.add( P );
}
if(tempKlasse.getSuperInterfaces()!=null){
Iterator<Type> interfaceIterator=tempKlasse.getSuperInterfaces().iterator();
while(interfaceIterator.hasNext()){
RefType intf=(RefType) interfaceIterator.next();
Pair P=createPairFromClassAndSuperclass(tempKlasse,intf,tempKlasse.get_ParaList(),intf.get_ParaList(),globalAssumptions);
vFC.add( P );
}
}
} // Schleifenende durch Klassenvektor
for(int i=0; i<InterfaceVektor.size();i++){
Interface intf= InterfaceVektor.get(i);
if(intf.getSuperInterfaces()!=null){
Iterator<Type> interfaceIterator=intf.getSuperInterfaces().iterator();
while(interfaceIterator.hasNext()){
RefType superintf=(RefType) interfaceIterator.next();
Pair P=createPairFromClassAndSuperclass(intf,superintf,intf.getParaList(), superintf.get_ParaList(),globalAssumptions);
vFC.add( P );
}
}
}
Menge tto = (Menge)vFC.clone();
Unify.printMenge( "FC", vFC, 6 );
/* z.B.
*******************************
Menge FC = {
(Vektor< A >, Vektor< A >),
(Vektor< A >, AbstractList< A >),
(Matrix< A >, Matrix< A >),
(Matrix< A >, Vektor< Vektor< A > >),
(ExMatrix< A >, ExMatrix< A >),
(ExMatrix< A >, Matrix< A >) }
*******************************
ODER
*******************************
Menge FC = {
(BB< A >, BB< A >),
(BB< A >, CC< A >),
(AA< A, B >, AA< A, B >),
(AA< A, B >, BB< DD< B, A > >) }
*******************************
*/
// 2. Regel 2 der Huellendefinition "eingeschraenkt" anwenden
// d.h. sinnvolle Substitutionen suchen (nicht alle)
boolean bPaarHinzu = true;
while( bPaarHinzu )
{
bPaarHinzu = false; //PL 04-12-29 nur wenn hinzugefuegt auf true setzen
// konkret: rechte Seite von FC nach Typkonstruktoren in der Parameterliste durchsuchen
for( int n = 0; n < vFC.size(); n++ )
{
// Elemente in FC ¯Â¿Â½nnen nur Pair's sein --> Cast ohne Abfrage
Pair PTypKonst = vFC.elementAt(n);
// Parameter des rechten Typausdrucks des betrachteten Paars extrahieren
Menge<Type> vPara = ((RefType)(PTypKonst.TA2)).get_ParaList();
Integer Subst = null; // Substitution
int nSubstStelle = 0;
inferencelog.debug("nSubstStelleStart" + nSubstStelle + " " + n, Section.FINITECLOSURE);
// Parameter durchlaufen und nach Typkonstruktor suchen
// #JB# 17.05.2005
// ###########################################################
if(vPara!=null){
// ###########################################################
for( ; nSubstStelle < vPara.size(); nSubstStelle++ )
{
inferencelog.debug("nSubstStelle" + nSubstStelle, Section.FINITECLOSURE);
if( vPara.elementAt(nSubstStelle) instanceof RefType && ((RefType)vPara.elementAt(nSubstStelle)).get_ParaList() != null )
{
// Typkonstruktor gefunden -> wird nun als Substitution verwendet
Subst = 1;//new RefType( (RefType)vPara.elementAt(nSubstStelle) ,-1);
inferencelog.debug( "Ausgangstyp:" + ((RefType)PTypKonst.TA2).getName() , Section.FINITECLOSURE);
inferencelog.debug( "RefType = " + ((RefType)vPara.elementAt(nSubstStelle)).getName() , Section.FINITECLOSURE);
break; // Einschraenkung - nur fuer ein RefType wird eine Substitution gesucht
}
}
// ###########################################################
}
// ###########################################################
if( Subst != null )
{
// Rechter Typ hat einen Typkonstruktor --> sinvolles neues Paar bilden
// d.h. Rechter Typ auf linker Paarseite suchen
// System.out.println("Subststelle = " + nSubstStelle );
for( int t = 0; t < vFC.size(); t++ )
{
Pair PSuchen = vFC.elementAt(t);
if( ((RefType)(PTypKonst.TA2)).getTypeName().equals( ((RefType)PSuchen.TA1).getTypeName() ) )
{
inferencelog.debug(" gefundener Typ links: " + ((RefType)(PSuchen.TA1)).getName(), Section.FINITECLOSURE );
inferencelog.debug(" gefundener Typ rechts: " + ((RefType)(PSuchen.TA2)).getName() , Section.FINITECLOSURE);
// Paar gefunden, das als linken Typ den gleichen Typen enth�lt, der als Parameter einen Typkonstruktor hat
// Substitution
//Pair P = new Pair( PSuchen.getTA1Copy( ), PSuchen.getTA2Copy( ) );
//linker Typterm bleibt gleich
//rechter Typterm wird aussen auf den Supertyp gesetzt.
//restliches FC erfolgt ueber die Transitivitaet
//siehe im unteren Teil
Pair P = new Pair( PTypKonst.getTA1Copy( ), PSuchen.getTA2Copy( ) );
// System.out.println(" Subst " + Subst.getName() );
// System.out.println(" Vor: P = " + P.toString() + P.TA1 );
// System.out.println(" Vor: PSuchen = " + PSuchen.toString() + PSuchen.TA1 );
// Parameter, der substituiert wird, sollte TV sein ???
//TypePlaceholder TV = null;
// if( ((RefType)P.TA1).isTV( nSubstStelle ) )
// try
// {
// TV = new TypePlaceholder( ((RefType)P.TA1).getParaN( nSubstStelle ) );
// }
// catch( Exception E )
// {
// continue;
// }
// else
// continue;
//es werden alle Parameter in einem Typeterm, der
//der Argumente hat ersetzt PL 04-12-28
Hashtable<JavaClassName,Type> hts = new Hashtable<JavaClassName,Type>();
//for(int u = nSubstStelle; u < vPara.size(); u++) {
for(int u = 0; u < vPara.size(); u++) {
try {
// #JB# 05.04.2005
// ###########################################################
//TV = new TypePlaceholder( ((RefType)PSuchen.TA1).getParaN(u) );
//System.out.println("TV_Name: " + u + TV.Type2String());
// ###########################################################
inferencelog.debug("Typterm_Name: " + vPara.elementAt(u), Section.FINITECLOSURE);
inferencelog.debug("Typterm_Name: " + ((Type)vPara.elementAt(u)).Type2String(), Section.FINITECLOSURE);
hts.put(new JavaClassName(((RefType)PSuchen.TA1).getParaN(u)), vPara.elementAt(u));
}
catch( Exception E ) {
inferencelog.error(E.getMessage(), Section.FINITECLOSURE);
//FIXME Throw Exception or Error instead of exiting!
System.exit(0);
}
// Subst( P,
// 2,
// TV,
// new RefType( (RefType)vPara.elementAt(u) ),
// false ); // rechte Seite substituieren
//Es genuegt die rechte Seite zu substituieren, da
//die linke Seite ein Typterm ausschlie�lich mit
//Typvariablen ist
}
//Unify.SubstHashtableGeneric(((RefType)P.TA1), hts); //funktioniert nicht
Unify.SubstHashtableGeneric(((RefType)P.TA2), hts); //funktioniert nicht
// System.out.println(" TV!!!= " + TV.getName() );
//Subst( P, 1, TV, Subst, false ); // linke Seite substituieren
//Subst( P, 2, TV, Subst, false ); // rechte Seite substituieren
// System.out.println(" nach Subst: P = " + P.toString() );
// System.out.println(" Nach: PSuchen = " + PSuchen.toString() );
// System.out.println(" Nach: " + P.toString() );
// Paar einfuegen, falls noch nicht vorhanden
// System.out.println("Paar alt:" + PSuchen.toString() );
// System.out.println("Paar neu:" + P.toString() );
if( !P.isInMenge( vFC ) )
{
vFC.add( P );
Unify.printMenge( "FC", vFC, 6 );
bPaarHinzu = true;
}
//PL 04-12-29
// else //unnoetig, da am Anfang bereits false gesetzt
// {
// bPaarHinzu = false;
// }
}
}
} // end if: Substitution gefunden???
} // end for: Typkonstruktor suchen
// Transitivitaet berechnen
for( int u = 0; u < vFC.size(); u++ )
{
Pair PTemp = vFC.elementAt(u);
// falls rechtes Paar = RefType
if( PTemp.TA2 instanceof RefType )
{
RefType R = (RefType)PTemp.TA2;
// rechte Seite auf linker Seite suchen
for( int e = 0; e < vFC.size(); e++ )
{
Pair PSuch = vFC.elementAt(e);
// als linke Paarseite theortisch nur RefType's moeglich --> Cast
RefType RSuch = (RefType)PSuch.TA1;
//if( R.getName().equals(RSuch.getName()) )
if (R.is_Equiv(RSuch, new Hashtable<JavaClassName,Type>())) //eingefuegt PL 05-01-07
{
// Paar einfuegen, falls noch nicht vorhanden
RefType L1 = (RefType)PTemp.getTA1Copy();
RefType L2 = (RefType)PTemp.getTA2Copy();
RefType R1 = (RefType)PSuch.getTA1Copy();
RefType R2 = (RefType)PSuch.getTA2Copy();
//zunaechst Variablen disjunkt machen ANFANG
// #JB# 05.04.2005
// ###########################################################
Hashtable<JavaClassName,Type> substHash1 = new Hashtable<JavaClassName,Type>();
Unify.varSubst(L1, substHash1);
Unify.varSubst(L2, substHash1);
Hashtable<JavaClassName,Type> substHash2 = new Hashtable<JavaClassName,Type>();
Unify.varSubst(R1, substHash2);
Unify.varSubst(R2, substHash2);
// ###########################################################
//zunaechst Variablen disjunkt machen ENDE
//Variablen so umbennen, dass transitiver Abschluss richtige
//Namen hat ANFANG
// #JB# 05.04.2005
// ###########################################################
Hashtable<JavaClassName,Type> h = new Hashtable<JavaClassName,Type>();
L2.Equiv2Equal(R1, h);
Hashtable<JavaClassName,Type> substHash3 = h;
Unify.varSubst(L1, substHash3);
Unify.varSubst(R2, substHash3);
// ###########################################################
//Variablen so umbennen, dass transitiver Abschluss richitge
//Namen hat ENDE
//Pair P = new Pair( (RefType)PTemp.TA1, (RefType)PSuch.TA2 );
Pair P = new Pair(L1, R2);
if( !P.isInMenge( vFC ) )
{
vFC.add( P );
bPaarHinzu = true;
}
else
{
bPaarHinzu = false;
}
}
} // end for: linke Seite suchen
} // end if: Element ist RefType
} // end for: Transitivit�ten berechnen
//PL HIER REFLEXIVE HUELLE EINFUEGEN
// 05-01-07
} // Ende WHILE
/* z.B.
*******************************
Menge nach trans: FC = {
(Vektor< A >, Vektor< A >),
(Vektor< A >, AbstractList< A >),
(Matrix< A >, Matrix< A >),
(Matrix< A >, Vektor< Vektor< A > >),
(ExMatrix< A >, ExMatrix< A >),
(ExMatrix< A >, Matrix< A >),
(Vektor< Vektor< A > >, Vektor< Vektor< A > >),
(Vektor< Vektor< A > >, AbstractList< Vektor< A > >),
(Matrix< A >, AbstractList< Vektor< A > >),
(ExMatrix< A >, Vektor< Vektor< A > >),
(ExMatrix< A >, AbstractList< Vektor< A > >) }
ODER
*******************************
Menge nach trans: FC = {
(BB< A >, BB< A >),
(BB< A >, CC< A >),
(AA< A, B >, AA< A, B >),
(AA< A, B >, BB< DD< B, A > >),
(BB< DD< B, A > >, BB< DD< B, A > >),
(BB< DD< B, A > >, CC< DD< B, A > >),
(AA< A, B >, CC< DD< B, A > >) }
*******************************
******************************* */
// printMenge( "nach trans: FC", vFC, 6 );
Menge<Class> KlassenVektorunImportierteKlassen = new Menge<>();
KlassenVektorunImportierteKlassen.addAll(basicAssumptionsClassMenge);
KlassenVektorunImportierteKlassen.addAll(KlassenVektor);
FC_TTO fctto = new FC_TTO(vFC, tto, KlassenVektorunImportierteKlassen);
return fctto;
}
public TypeAssumptions getPublicFieldAssumptions(){
TypeAssumptions publicAssumptions = new TypeAssumptions(null);
//Alle PublicAssumptions der in dieser SourceFile enthaltenen Klassen sammeln:
for(Class klasse : KlassenVektor){
publicAssumptions.add(klasse.getPublicFieldAssumptions());
}
return publicAssumptions;
}
/////////////////////////////////////////////////////////////////////////
// TypeReconstructionAlgorithmus
/////////////////////////////////////////////////////////////////////////
@ -691,63 +238,66 @@ public class SourceFile
typinferenzLog.debug("Von JRE erstellte Assumptions: "+importAssumptions, Section.TYPEINFERENCE);
//FiniteClosure generieren:
FC_TTO finiteClosure = this.makeFC(globalAssumptions);
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(globalAssumptions);
typinferenzLog.debug("FiniteClosure: \n"+finiteClosure, Section.TYPEINFERENCE);
ConstraintsSet oderConstraints = new ConstraintsSet();
//Alle Constraints der in dieser SourceFile enthaltenen Klassen sammeln:
for(Class klasse : KlassenVektor){
oderConstraints.add(klasse.typeReconstruction(finiteClosure, globalAssumptions));
oderConstraints.add(klasse.typeReconstruction(globalAssumptions));
}
/*////////////////
* Paare in MPairs umwandeln
* (Wird zunächst mal weggelassen. Constraints werden erst beim Unifizieren umgewandelt
*/////////////////
//UnifyTypeFactory.convert(oderConstraints);
////////////////
//Karthesisches Produkt bilden:
//Typen in UnifyTypen umwandeln:
////////////////
UnifyConstraintsSet unifyConstraints = UnifyTypeFactory.convert(oderConstraints);
//UnmÃgliche ConstraintsSets aussortieren durch Unifizierung
Unifier unifier = (pairs)->{
Menge<Menge<Pair>> retValue = new Menge<>();
retValue = Unify.unify(pairs, finiteClosure);
Function<Menge<UnifyPair>,Menge<Menge<UnifyPair>>> unifier = (pairs)->{
Menge<Menge<UnifyPair>> retValue = new Menge<>();
Set<Set<UnifyPair>> unifiedPairs = new Unify().unify(pairs, finiteClosure);
return retValue;};
//oderConstraints.filterWrongConstraints(unifier);
//oderConstraints.unifyUndConstraints(unifier); //rausgeworfen für Tests (08.12.2015)
typinferenzLog.debug("Übriggebliebene Konstraints:\n"+oderConstraints+"\n", Section.TYPEINFERENCE);
typinferenzLog.debug("Übriggebliebene Konvertierte Konstraints:\n"+unifyConstraints+"\n", Section.TYPEINFERENCE);
////////////////
//Karthesisches Produkt bilden:
////////////////
Set<Set<UnifyPair>> xConstraints = unifyConstraints.cartesianProduct();
typinferenzLog.debug("Übriggebliebene Konstraints:\n"+oderConstraints+"\n", Section.TYPEINFERENCE);
//Die Constraints in Pair's umwandeln (Karthesisches Produkt bilden):
Menge<Menge<Pair>> xConstraints = oderConstraints.cartesianProduct();
typinferenzLog.debug("Finite Closure: "+finiteClosure, Section.TYPEINFERENCE);
typinferenzLog.debug("Karthesisches Produkt der Constraints: "+xConstraints, Section.TYPEINFERENCE);
finiteClosure.generateFullyNamedTypes(globalAssumptions);
//finiteClosure.generateFullyNamedTypes(globalAssumptions);
//////////////////////////////
// Unifizierung der Constraints:
//////////////////////////////
boolean unifyFail = true;
for(Menge<Pair> constraints : xConstraints){
for(Set<UnifyPair> constraints : xConstraints){
//Alle durch das Karthesische Produkt entstandenen glichkeiten durchgehen:
Menge<Menge<Pair>> result = new Menge<Menge<Pair>>();
//Alle FunN-Typen werden per clone-methode in RefTypes verwandelt. (Die clone Methode in FunN darf nicht überschrieben werden.
for(Pair p : constraints){
if(p.TA1 instanceof FunN){
p.TA1 = p.TA1.clone();
}
if(p.TA2 instanceof FunN){
p.TA2 = p.TA2.clone();
}
}
//Erst die Unifizierung erstellen:
Menge<Pair> constraintsClone = (Menge<Pair>)constraints.clone();
//Menge<Menge<Pair>> result = new Menge<Menge<Pair>>();
//IDEE: Man bildet Zusammenhangskomponenten von Paaren, die gemeinsame Variablen haben
// und unifizert nur die Zusammenhangskomponenten in Schritten 1 - 5
/*
//Schritt 1: Alle Variablen in den Paaren von Elementen einsammeln
Menge<Menge<TypePlaceholder>> constraintsclonevars = constraintsClone.stream().map(p -> {Menge<TypePlaceholder> TPHs = new Menge<>();
Menge<Menge<TypePlaceholder>> constraintsclonevars = constraints.stream().map(p -> {Menge<TypePlaceholder> TPHs = new Menge<>();
TPHs.addAll(p.TA1.getInvolvedTypePlaceholder());
TPHs.addAll(p.TA2.getInvolvedTypePlaceholder());
return TPHs;}
@ -762,17 +312,17 @@ public class SourceFile
//Schritt 3: Umwandlung der Indizes in die zugehoerigen Elemente
// In streamconstraintsclone sind die Mengen von Paar enthalten die unifiziert werden muessen
Stream<Menge<Pair>> streamconstraintsclone = indexeset.stream().map(x -> x.stream()
Stream<Menge<MPair>> streamconstraintsclone = indexeset.stream().map(x -> x.stream()
.map(i -> constraintsClone.elementAt(i))
.<Menge<Pair>>collect(Menge::new, Menge::add, Menge::addAll));
.<Menge<MPair>>collect(Menge::new, Menge::add, Menge::addAll));
//Menge<Menge<Pair>> vecconstraintsclone = streamconstraintsclone.collect(Menge::new, Menge::add, Menge::addAll);
//System.out.println();
//Schritt 4: Unifikation
Menge<Menge<Menge<Pair>>> vecunifyResult =
Set<Set<Set<MPair>>> vecunifyResult =
//streamconstraintsclone.map(x -> Unify.unify(x, finiteClosure)).collect(Menge::new, Menge::add, Menge::addAll);
//DEBUG-Variante
streamconstraintsclone.map(x ->
{ Menge<Menge<Pair>> z = Unify.unify(x, finiteClosure);
{ Set<Set<MPair>> z = new Unify().unify(x, finiteClosure);
return z;
}
).collect(Menge::new, Menge::add, Menge::addAll);
@ -785,19 +335,19 @@ public class SourceFile
//Schritt 5: Bildung des cartesischen Produkts
//sollte wieder entfernt werden: Weiterarbeit mit:
//[[x_1 -> t_1, x_2 -> t2], [x_1 -> t'_1, x_2 -> t'_2]] x ... x [[x_n -> t_1n], [x_n -> t2n], [x_n -> t3n]]
Menge<Menge<Pair>> cardprodret_start = new Menge<>();
Set<Set<Pair>> cardprodret_start = new Menge<>();
cardprodret_start.add(new Menge<Pair>());
//cart. Produkt mit Linkverschiebung
Menge<Menge<Pair>> unifyResult = vecunifyResult.stream().reduce(cardprodret_start, (x, y) -> {
Menge<Menge<Pair>> cardprodret= new Menge<>();
Set<Set<Pair>> unifyResult = vecunifyResult.stream().reduce(cardprodret_start, (x, y) -> {
Set<Set<Pair>> cardprodret= new Menge<>();
if (y.size() > 0) {
//System.out.println(y);
//Menge<Menge<Pair>> cardprodretold = x;
//cardprodret = new Menge<>();
for(int j = 0; j < x.size(); j++) {
for (int k = 0; k < y.size(); k++){
Menge<Pair> help = new Menge<>();
Set<Pair> help = new Menge<>();
help.addAll(y.elementAt(k));
help.addAll(x.elementAt(j));
cardprodret.add(help);
@ -808,10 +358,23 @@ public class SourceFile
return new Menge<>(); //kein unifiziertes Ergebnis, damit wird das Geseamtergebnis []
return cardprodret;
});
*/
typinferenzLog.debug("\nUnifiziere Constraints:\n"+constraints, Section.TYPEINFERENCE);
Set<Set<UnifyPair>> unifyResult = new Unify().unify(constraints, finiteClosure);
Menge<Menge<Pair>> convertedResult = unifyResult.parallelStream().<Menge<Pair>>map((Set<UnifyPair> resultSet)->{
Menge<Pair> innerConvert = resultSet.stream().map((UnifyPair mp)->UnifyTypeFactory.convert(mp))
.collect(Menge<Pair>::new, Menge::add, Menge::addAll);
return innerConvert;
}).collect(Menge::new, Menge::add, Menge::addAll);
Menge<Pair> convertedConstraints = constraints.stream().map(
(UnifyPair mp)->{return UnifyTypeFactory.convert(mp);}
).collect(Menge<Pair>::new, Menge::add, Menge::addAll);
//Dann den Ergebnissen anfügen
typinferenzLog.debug("\nErgebnis der Unifizierung:\n"+unifyResult, Section.TYPEINFERENCE);
result.addAll(unifyResult);
//result.addAll(convertedResult);
typinferenzLog.debug("\nJavaFiles:\n", Section.TYPEINFERENCE);
@ -821,10 +384,10 @@ public class SourceFile
//¼r jede Klasse in diesem SourceFile gilt das selbe ResultSet:
for(Class klasse : this.KlassenVektor){
//Der Unifikationsalgorithmus kann wiederum auch mehrere sungen errechnen, diese werden im folgenden durchlaufen:
for(Menge<Pair> resultSet : result){
for(Menge<Pair> resultSet : convertedResult){
unifyFail = false; //Ein Unifiziertes Ergebnis ist entstanden (es kann auch leer sein, das bedeutet nur, dass die Constraints mindestens in einem Fall Sinn ergaben)
//Add Result set as a new ReconstructionResult to ret:
TypeinferenceResultSet reconstructionResult = new TypeinferenceResultSet(klasse, constraints, new ResultSet(resultSet));
TypeinferenceResultSet reconstructionResult = new TypeinferenceResultSet(klasse, convertedConstraints, new ResultSet(resultSet));
ret.add(reconstructionResult);
//ResultSet res = new ResultSet(resultSet);

View File

@ -55,7 +55,7 @@ public class ASTFactory {
return new Constructor(method, superClass);
}
public static Class createClass(String className, Type type, Modifiers modifiers, Menge supertypeGenPara, SourceFile parent) {
public static Class createClass(String className, RefType type, Modifiers modifiers, Menge supertypeGenPara, SourceFile parent) {
// TODO bytecode createClass
//String name, RefType superClass, Modifiers modifiers, Menge<String> supertypeGenPara
Class generatedClass = new Class(className, type, modifiers, supertypeGenPara);

View File

@ -3,7 +3,7 @@ package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.Pair.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
public class UnifyPairMengenBuilder {

View File

@ -1,8 +1,10 @@
package de.dhbwstuttgart.syntaxtree.factory;
import java.util.HashSet;
import java.util.logging.Logger;
import de.dhbwstuttgart.myexception.NotImplementedException;
import de.dhbwstuttgart.syntaxtree.NullSyntaxTreeNode;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.syntaxtree.type.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.type.ObjectType;
@ -11,43 +13,66 @@ import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.syntaxtree.type.WildcardType;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.EinzelElement;
import de.dhbwstuttgart.typeinference.KomplexeMenge;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.typeinference.OderConstraint;
import de.dhbwstuttgart.typeinference.OderMenge;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.UndMenge;
import de.dhbwstuttgart.typeinference.UnifyConstraintsSet;
import de.dhbwstuttgart.typeinference.UnifyOderConstraint;
import de.dhbwstuttgart.typeinference.UnifyUndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.ClassAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.MPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.SimpleType;
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
public class UnifyTypeFactory {
public class UnifyTypeFactory {
private final static NullSyntaxTreeNode NULL_NODE = new NullSyntaxTreeNode();
public static FiniteClosure generateFC(TypeAssumptions fromAss){
HashSet<MPair> pairs = new HashSet<>();
HashSet<UnifyPair> pairs = new HashSet<>();
for(ClassAssumption cAss : fromAss.getClassAssumptions()){
UnifyType tl = UnifyTypeFactory.convert(cAss.getAssumedClass().getType());
Type superClass = cAss.getAssumedClass().getSuperClass();
RefType superClass = cAss.getAssumedClass().getSuperClass();
if(superClass != null){
UnifyType tr = UnifyTypeFactory.convert(superClass);
pairs.add(smaller(tl, tr));
pairs.add(generateSmallerPair(tl, tr));
}
}
return new FiniteClosure(pairs);
}
public static MPair smaller(UnifyType tl, UnifyType tr){
return new MPair(tl, tr,MPair.PairOperator.SMALLER);
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr){
return new UnifyPair(tl, tr, PairOperator.SMALLER);
}
public static UnifyType convert(Type t){
//Es wurde versucht ein Typ umzuwandeln, welcher noch nicht von der Factory abgedeckt ist
if(t instanceof GenericTypeVar){
return UnifyTypeFactory.convert((GenericTypeVar)t);
}else if(t instanceof RefType){
return UnifyTypeFactory.convert((RefType)t);
}else if(t instanceof TypePlaceholder){
return UnifyTypeFactory.convert((TypePlaceholder)t);
}else if(t instanceof ExtendsWildcardType){
return UnifyTypeFactory.convert((ExtendsWildcardType)t);
}else if(t instanceof SuperWildcardType){
return UnifyTypeFactory.convert((SuperWildcardType)t);
}
System.out.println("Der Typ "+t+" kann nicht umgewandelt werden");
throw new NotImplementedException();
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
}
public static UnifyType convert(RefType t){
@ -57,9 +82,9 @@ public class UnifyTypeFactory {
for(Type pT : t.getParaList()){
params.add(UnifyTypeFactory.convert(pT));
}
ret = new SimpleType(t.get_Name(),params.toArray());
ret = new ReferenceType(t.get_Name(),new TypeParams(params));
}else{
ret = new SimpleType(t.get_Name());
ret = new ReferenceType(t.get_Name());
}
return ret;
}
@ -77,6 +102,73 @@ public class UnifyTypeFactory {
}
public static UnifyType convert(GenericTypeVar t){
return new SimpleType(t.get_Name());
return new ReferenceType(t.get_Name());
}
public static UnifyConstraintsSet convert(ConstraintsSet constraints) {
UnifyConstraintsSet ret = new UnifyConstraintsSet();
for(OderConstraint oC : constraints.getOderConstraints()){
ret.add(UnifyTypeFactory.convert(oC));
}
return ret;
}
public static UnifyOderConstraint convert(OderConstraint set) {
UnifyOderConstraint ret = new UnifyOderConstraint();
for(UndConstraint oC : set.getUndConstraints()){
ret.addConstraint(UnifyTypeFactory.convert(oC));
}
return ret;
}
public static UnifyUndConstraint convert(UndConstraint set) {
UnifyUndConstraint ret = new UnifyUndConstraint();
for(EinzelElement<Pair> oC : set.getPairs()){
ret.add(UnifyTypeFactory.convert(oC));
}
return ret;
}
public static UnifyPair convert(EinzelElement<Pair> p) {
return convert(p.getItem());
}
public static UnifyPair convert(Pair p) {
if(!p.OperatorSmaller())throw new NotImplementedException();
UnifyPair ret = generateSmallerPair(UnifyTypeFactory.convert(p.TA1)
, UnifyTypeFactory.convert(p.TA2));
return ret;
}
public static Pair convert(UnifyPair mp) {
Type tl = UnifyTypeFactory.convert(mp.getLhsType());
Type tr = UnifyTypeFactory.convert(mp.getRhsType());
return new Pair(tl, tr, mp.getPairOp());
}
public static Type convert(ReferenceType t) {
return new RefType(t.getName(),null,0);
}
public static Type convert(SuperType t) {
RefType innerType = new RefType(t.getSuperedType().getName(),NULL_NODE,0);
return new SuperWildcardType(innerType);
}
public static Type convert(ExtendsType t) {
RefType innerType = new RefType(t.getExtendedType().getName(),NULL_NODE,0);
return new ExtendsWildcardType(innerType);
}
public static Type convert(PlaceholderType t) {
return TypePlaceholder.getInstance(t.getName());
}
public static Type convert(UnifyType t) {
if(t instanceof ReferenceType)return convert((ReferenceType) t);
if(t instanceof SuperType)return convert((SuperType) t);
if(t instanceof ExtendsType)return convert((ExtendsType) t);
if(t instanceof PlaceholderType)return convert((PlaceholderType) t);
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
}
}

View File

@ -2,7 +2,6 @@ package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.unify.FC_TTO;
import de.dhbwstuttgart.syntaxtree.Class;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.Type;
@ -14,18 +13,16 @@ public class Unify_FC_TTO_Builder {
public void AddInheritance(Type t1, Type t2) {
if(t1 instanceof RefType)
classes.add(new Class(t1.get_Name(), t1.getOffset()));
if(!classes.stream().anyMatch(x -> x.getName().equals(t1.getName())))
classes.add(new Class(t1.get_Name(), t1.getOffset()));
if(t2 instanceof RefType)
classes.add(new Class(t2.get_Name(), t2.getOffset()));
if(!classes.stream().anyMatch(x -> x.getName().equals(t2.getName())))
classes.add(new Class(t2.get_Name(), t2.getOffset()));
fc.add(new Pair(t1, t2));
}
public FC_TTO Get_FC_TTO() {
return new FC_TTO(fc, (Menge<?>) fc.clone(), classes);
}
public void clear() {
fc = new Menge<Pair>();
classes = new Menge<Class>();

View File

@ -15,7 +15,6 @@ import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.unify.Unify;

View File

@ -7,7 +7,6 @@ import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.syntaxtree.statement.Expr;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
// ino.class.AndOp.24101.declaration

View File

@ -18,7 +18,6 @@ import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.unify.Unify;

View File

@ -19,54 +19,32 @@ import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.OderConstraint;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.Unify;
// ino.class.Operator.24257.declaration
public abstract class Operator extends SyntaxTreeNode
// ino.end
// ino.class.Operator.24257.body
{
// ino.attribute.offset.24261.declaration
private int offset;
// ino.end
// ino.attribute.variableLength.24264.declaration
private int variableLength;
// ino.end
// ino.method.Operator.24267.definition
public Operator(int offset,int variableLength)
// ino.end
// ino.method.Operator.24267.body
{
this.offset=offset;
this.variableLength=variableLength;
}
// ino.end
// ino.method.getOffset.24270.definition
public int getOffset()
// ino.end
// ino.method.getOffset.24270.body
{
return offset;
}
// ino.end
// ino.method.getVariableLength.24273.definition
public int getVariableLength()
// ino.end
// ino.method.getVariableLength.24273.body
{
return variableLength;
}
// ino.end
/**

View File

@ -34,7 +34,6 @@ import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.Unify;
@ -108,8 +107,8 @@ public class Assign extends Expr
ret.add(expr2.TYPEExpr(assumptions));
//this.setTypeVariable( TypePlaceholder.fresh(this));
this.setType(TypePlaceholder.fresh(this));
ret.add(new SingleConstraint(expr2.getType().TYPE(assumptions, this), expr1.getType().TYPE(assumptions, this))); //expr2.type <. expr1.type
ret.add(new SingleConstraint(expr1.getType().TYPE(assumptions, this), this.getType().TYPE(assumptions, this)));
ret.add(ConstraintsSet.createSingleConstraint(expr2.getType().TYPE(assumptions, this), expr1.getType().TYPE(assumptions, this))); //expr2.type <. expr1.type
ret.add(ConstraintsSet.createSingleConstraint(expr1.getType().TYPE(assumptions, this), this.getType().TYPE(assumptions, this)));
return ret;
}

View File

@ -38,12 +38,10 @@ import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.OderConstraint;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -32,12 +32,10 @@ import de.dhbwstuttgart.syntaxtree.type.Void;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
@ -188,8 +186,8 @@ public class Block extends Statement
}
else {
TypePlaceholder tph = TypePlaceholder.fresh(this);
ret.add(new SingleConstraint(this.getType().TYPE(assumptions, this), tph));
ret.add(new SingleConstraint(stmt.getType().TYPE(assumptions, this), tph));
ret.add(ConstraintsSet.createSingleConstraint(this.getType().TYPE(assumptions, this), tph));
ret.add(ConstraintsSet.createSingleConstraint(stmt.getType().TYPE(assumptions, this), tph));
this.setType(tph);
}
}

View File

@ -24,7 +24,6 @@ import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -25,7 +25,6 @@ import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -24,7 +24,6 @@ import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -27,7 +27,6 @@ import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -22,7 +22,6 @@ import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -33,7 +33,6 @@ import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;

View File

@ -44,11 +44,9 @@ import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.unify.MUB;
import de.dhbwstuttgart.typeinference.unify.Unify;
@ -142,10 +140,10 @@ public class IfStmt extends Statement
ret.add(this.then_block.TYPEStmt(assumptions));
if(else_block!=null){
ret.add(this.else_block.TYPEStmt(assumptions));
if(!(else_block.getType() instanceof Void))ret.add(new SingleConstraint(else_block.getType().TYPE(assumptions, this),this.getType().TYPE(assumptions, this)));
if(!(else_block.getType() instanceof Void))ret.add(ConstraintsSet.createSingleConstraint(else_block.getType().TYPE(assumptions, this),this.getType().TYPE(assumptions, this)));
}
ret.add(new SingleConstraint(expr.getType().TYPE(assumptions, this),new RefType("Boolean",this,0).TYPE(assumptions, this))); //(expressionDesIfStmt)<.boolean
if(!(then_block.getType() instanceof Void))ret.add(new SingleConstraint(then_block.getType().TYPE(assumptions, this),this.getType().TYPE(assumptions, this)));
ret.add(ConstraintsSet.createSingleConstraint(expr.getType().TYPE(assumptions, this),new RefType("Boolean",this,0).TYPE(assumptions, this))); //(expressionDesIfStmt)<.boolean
if(!(then_block.getType() instanceof Void))ret.add(ConstraintsSet.createSingleConstraint(then_block.getType().TYPE(assumptions, this),this.getType().TYPE(assumptions, this)));
if(then_block.getType() instanceof Void &&
(else_block == null || else_block.getType() instanceof Void))this.setType(new Void(this,this.getOffset()));
return ret;

View File

@ -28,7 +28,6 @@ import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -41,13 +41,11 @@ import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.OderConstraint;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.Typeable;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.ParameterAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
/**
* @author A10023 - Andreas Stadelmeier
@ -199,7 +197,7 @@ public class LambdaExpression extends Expr{
}else{
this.lambdaType = new FunN(retType, modifiedParamTypes);
}
ret.add(new SingleConstraint(lambdaType.TYPE(assumptions, this),this.getType().TYPE(assumptions, this)));
ret.add(ConstraintsSet.createSingleConstraint(lambdaType.TYPE(assumptions, this),this.getType().TYPE(assumptions, this)));
return ret;
}

View File

@ -3,7 +3,6 @@ package de.dhbwstuttgart.syntaxtree.statement;
// ino.end
import de.dhbwstuttgart.myexception.JVMCodeException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
// ino.class.Literal.25490.declaration
public abstract class Literal extends Expr

View File

@ -28,13 +28,11 @@ import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.ClassAssumption;
import de.dhbwstuttgart.typeinference.assumptions.FieldAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -27,7 +27,6 @@ import de.dhbwstuttgart.syntaxtree.type.Void;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeInsertable;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.LocalVarAssumption;
@ -35,7 +34,6 @@ import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.typedeployment.TypeInsertPoint;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
@ -372,7 +370,7 @@ public class LocalVarDecl extends Statement implements TypeInsertable
}
assumptions.addAssumption(new LocalVarAssumption(this, this.getType())); //Bevor der Typ auf Void gesetzt wird.
//if(this.getType() == null)throw new DebugException("Parser Post Processing nicht aufgerufen");
ret.add(new SingleConstraint(this.getType().TYPE(assumptions, this), this.getType().TYPE(assumptions, this)));
ret.add(ConstraintsSet.createSingleConstraint(this.getType().TYPE(assumptions, this), this.getType().TYPE(assumptions, this)));
//assumptions.remove(null); // falls Variable mit diesem Namen bereits vorhanden.
this.setReturnType(new Void(this,0)); //Return typ einer Variablendeklaration ist Void
return ret;

View File

@ -24,7 +24,6 @@ import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -28,7 +28,6 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
import de.dhbwstuttgart.typeinference.unify.Unify;

View File

@ -22,7 +22,6 @@ import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -34,13 +34,11 @@ import de.dhbwstuttgart.syntaxtree.type.Void;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.ConstructorAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -28,7 +28,6 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
import de.dhbwstuttgart.typeinference.unify.Unify;

View File

@ -24,7 +24,6 @@ import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -22,7 +22,6 @@ import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -28,7 +28,6 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
import de.dhbwstuttgart.typeinference.unify.Unify;

View File

@ -33,7 +33,6 @@ import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
import de.dhbwstuttgart.typeinference.unify.Unify;

View File

@ -28,7 +28,6 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
import de.dhbwstuttgart.typeinference.unify.Unify;

View File

@ -28,7 +28,6 @@ import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
import de.dhbwstuttgart.typeinference.unify.Unify;

View File

@ -24,10 +24,8 @@ import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
@ -96,7 +94,7 @@ public class Return extends Statement
ret.add(this.retexpr.TYPEExpr(assumptions));
//this.setTypeVariable(TypePlaceholder.fresh("Return Type"));
this.setType(TypePlaceholder.fresh(this));
ret.add(new SingleConstraint(retexpr.getType().TYPE(assumptions, this), this.getType().TYPE(assumptions, this)));
ret.add(ConstraintsSet.createSingleConstraint(retexpr.getType().TYPE(assumptions, this), this.getType().TYPE(assumptions, this)));
return ret;
}

View File

@ -32,7 +32,6 @@ import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.ConstructorAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -32,7 +32,6 @@ import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.ConstructorAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -24,7 +24,6 @@ import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.assumptions.ConstructorAssumption;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;

View File

@ -28,11 +28,10 @@ import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.UndConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
import de.dhbwstuttgart.typeinference.unify.Unify;
@ -113,7 +112,7 @@ public class WhileStmt extends Statement
public ConstraintsSet TYPEStmt(TypeAssumptions assumptions) {
ConstraintsSet ret = new ConstraintsSet();
ret.add(expr.TYPEExpr(assumptions));
SingleConstraint exprMustBeBool = new SingleConstraint(expr.getType().TYPE(assumptions, this), new RefType("Boolean",this, 0).TYPE(assumptions, this)); // while(expr){}; expr <. boolean
UndConstraint exprMustBeBool = ConstraintsSet.createSingleConstraint(expr.getType().TYPE(assumptions, this), new RefType("Boolean",this, 0).TYPE(assumptions, this)); // while(expr){}; expr <. boolean
ret.add(exprMustBeBool);
ret.add(this.loop_block.TYPEStmt(assumptions));
this.setType(loop_block.getType());

View File

@ -9,7 +9,6 @@ import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.syntaxtree.Class;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;

View File

@ -163,4 +163,10 @@ public class ExtendsWildcardType extends WildcardType implements ITypeContainer,
return new de.dhbwstuttgart.bytecode.WildcardType(this.innerType.get_Name(), "+");
}
*/
@Override
public String get_Name() {
return "? extends "+this.innerType.get_Name();
}
}

View File

@ -11,7 +11,6 @@ import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.ResultSet;
import de.dhbwstuttgart.typeinference.SingleConstraint;
import de.dhbwstuttgart.typeinference.TypeInsertable;
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;

View File

@ -29,8 +29,6 @@ import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
import de.dhbwstuttgart.typeinference.TypeinferenceResults;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionGenVar;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
@ -182,41 +180,6 @@ public class RefType extends ObjectType implements IMatchable
return name + "<"+para + " >" ;
}
}
// ino.end
/**
* Wandelt die Parameter des RefTypes in TPHs um, sofern es sich um Generische Variablen handelt.
* @return
*/
// ino.method.GenericTypeVar2TypePlaceholder.26652.definition
public CSubstitutionSet GenericTypeVar2TypePlaceholder ()
// ino.end
// ino.method.GenericTypeVar2TypePlaceholder.26652.body
{
//throw new NotImplementedException();
///*
CSubstitutionSet sub = new CSubstitutionSet();
if(parameter != null)
{
for (int i = 0; i < parameter.size(); i++)
{
if (parameter.elementAt(i) instanceof GenericTypeVar)
{
TypePlaceholder tlv = TypePlaceholder.fresh(null);
sub.addElement(new CSubstitutionGenVar((GenericTypeVar)parameter.elementAt(i), tlv));
parameter.set(i, tlv);
}
if (parameter.elementAt(i) instanceof RefType)
{
CSubstitutionSet parasub = ((RefType)parameter.elementAt(i)).GenericTypeVar2TypePlaceholder();
sub.addAll(parasub); //korrigiert PL 07=07=29
}
}
}
return sub;
//*/
}
// ino.end
/**
* Wandelt die Parameter des RefTypes in TPHs um, sofern es sich um Generische Variablen handelt.

View File

@ -2,6 +2,7 @@ package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.bytecode.ClassGenerator;
import de.dhbwstuttgart.parser.JavaClassName;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
@ -18,7 +19,7 @@ import de.dhbwstuttgart.typeinference.exceptions.DebugException;
*/
public class SuperWildcardType extends WildcardType implements ITypeContainer, IMatchable{
public SuperWildcardType(ObjectType innerType){
this(innerType.getOffset(), innerType);
}
@ -168,5 +169,9 @@ public class SuperWildcardType extends WildcardType implements ITypeContainer, I
return "-" + this.innerType.getBytecodeSignature(cg, rs);
}
@Override
public String get_Name() {
return "? super "+this.innerType.get_Name();
}
}

View File

@ -17,7 +17,11 @@ import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
class Test {
void methode(ArrayList<? super Integer> t){
}
}
//TODO: Die Klasse Type muss abstract werden!
// ino.class.Type.26716.declaration

View File

@ -43,7 +43,7 @@ public class TypePlaceholder extends ObjectType
private static String strNextName = "A";
// ino.end
// ino.attribute.m_TypePlaceholdersRegistry.26788.declaration
private static Hashtable<JavaClassName, TypePlaceholder> m_TypePlaceholdersRegistry = new Hashtable<JavaClassName, TypePlaceholder>();
private static Hashtable<String, TypePlaceholder> m_TypePlaceholdersRegistry = new Hashtable<String, TypePlaceholder>();
// ino.end
private SyntaxTreeNode parent;
@ -102,10 +102,10 @@ public class TypePlaceholder extends ObjectType
// ino.method.fresh.26800.body
{
TypePlaceholder typeVar = new TypePlaceholder(name, parent);
TypePlaceholder oldTPH = m_TypePlaceholdersRegistry.put(typeVar.getName(), typeVar);
TypePlaceholder oldTPH = m_TypePlaceholdersRegistry.put(typeVar.getName().toString(), typeVar);
if(oldTPH != null){
oldTPH.name = new JavaClassName(makeNewName());
m_TypePlaceholdersRegistry.put(oldTPH.getName(), oldTPH);
m_TypePlaceholdersRegistry.put(oldTPH.getName().toString(), oldTPH);
}
return typeVar;
}
@ -121,7 +121,7 @@ public class TypePlaceholder extends ObjectType
*/
public static TypePlaceholder fresh(SyntaxTreeNode parent){
TypePlaceholder ret= new TypePlaceholder(makeNewName(), parent);
m_TypePlaceholdersRegistry.put(ret.getName(), ret);
m_TypePlaceholdersRegistry.put(ret.getName().toString(), ret);
return ret;
}
@ -260,7 +260,7 @@ public class TypePlaceholder extends ObjectType
// ino.method.deleteRegistry.26839.body
{
m_TypePlaceholdersRegistry.clear();
m_TypePlaceholdersRegistry = new Hashtable<JavaClassName, TypePlaceholder>();
m_TypePlaceholdersRegistry = new Hashtable<String, TypePlaceholder>();
}
// ino.end
@ -328,7 +328,7 @@ public class TypePlaceholder extends ObjectType
//auf den CSubstitution nicht registrierte Variablen zu
//Exceptions fuehrt
TypePlaceholder typeVar = new TypePlaceholder(makeNewName(), null);
m_TypePlaceholdersRegistry.put(typeVar.getName(), typeVar);
m_TypePlaceholdersRegistry.put(typeVar.getName().toString(), typeVar);
return typeVar;
//return new TypePlaceholder(makeNewName());
@ -378,7 +378,7 @@ public class TypePlaceholder extends ObjectType
//auf den CSubstitution nicht registrierte Variablen zu
//Exceptions fuehrt
TypePlaceholder typeVar = new TypePlaceholder(name, null);
m_TypePlaceholdersRegistry.put(typeVar.getName(), typeVar);
m_TypePlaceholdersRegistry.put(typeVar.getName().toString(), typeVar);
return typeVar;
//return new TypePlaceholder(name);

View File

@ -2,6 +2,7 @@ package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.bytecode.ClassGenerator;
import de.dhbwstuttgart.parser.JavaClassName;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.typeinference.JavaCodeResult;
import de.dhbwstuttgart.typeinference.ResultSet;
@ -111,6 +112,9 @@ public class WildcardType extends Type{
return this.innerType.getBytecodeSignature(cg, rs);
}
@Override
public JavaClassName getName() {
return new JavaClassName(this.get_Name());
}
}

View File

@ -1,10 +1,12 @@
package de.dhbwstuttgart.typeinference;
import java.util.Iterator;
import java.util.Set;
import java.util.Vector;
import de.dhbwstuttgart.logger.Logger;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.logger.*;
import de.dhbwstuttgart.typeinference.unify.Unifier;
import de.dhbwstuttgart.typeinference.unify.Unifikationsalgorithmus;
public class ConstraintsSet extends UndMenge<Pair> implements Iterable<OderConstraint>{
private static final Logger log = Logger.getLogger(ConstraintsSet.class.getName());
@ -35,20 +37,22 @@ public class ConstraintsSet extends UndMenge<Pair> implements Iterable<OderConst
return constraintsSet.iterator();
}
/*
public void filterWrongConstraints(Unifier unify) {
/*
* Das ConstraintsSet enthält nur OderConstraints, welche UND-Verknüpft sind.
* Hier werden Constraints in den OderConstraints kontrolliert:
*/
// * Das ConstraintsSet enthält nur OderConstraints, welche UND-Verknüpft sind.
// * Hier werden Constraints in den OderConstraints kontrolliert:
for(OderConstraint constraint : this){
constraint.filterWrongConstraints(unify);
}
}
*/
/**
* Nimmt alle UndConstraints und filtert mithilfe dieser die falschen Constraints aus den OderConstraints
* @param unifier
*/
public void unifyUndConstraints(Unifier unifier) {
Vector<UndConstraint> uCons = this.filterUndConstraints();
Vector<Pair> alleUndConstraints = new Vector<>();
@ -57,16 +61,16 @@ public class ConstraintsSet extends UndMenge<Pair> implements Iterable<OderConst
}
this.filterWrongConstraints(
(pairs)->{
Menge<Pair> undConstraintsUndPairs = new Menge<>();
Set<Pair> undConstraintsUndPairs = new Menge<>();
undConstraintsUndPairs.addAll(pairs);
undConstraintsUndPairs.addAll(alleUndConstraints);
log.debug("Versuche Pairs auszusondern:\n"+pairs, Section.TYPEINFERENCE);
log.debug("Unifiziere:\n"+undConstraintsUndPairs, Section.TYPEINFERENCE);
Menge<Menge<Pair>> unifyResult = unifier.apply(undConstraintsUndPairs);
Set<Set<Pair>> unifyResult = unifier.apply(undConstraintsUndPairs);
return unifyResult;
});
}
*/
/**
* Aus dem ConstraintsSet [ u1, u2, ... (OderConstraint), ... uN ] werden alle
* UndConstraints, welche sich nicht innerhalb eines OderConstraints befinden, herausgefiltert
@ -89,6 +93,20 @@ public class ConstraintsSet extends UndMenge<Pair> implements Iterable<OderConst
@Override
public Menge<? extends KomplexeMenge<Pair>> getSet() {
return this.getOderConstraints();
}
public Menge<OderConstraint> getOderConstraints() {
return this.constraintsSet;
}
public static UndConstraint createSingleConstraint(Type t1, Type t2){
UndConstraint ret = new UndConstraint();
ret.addConstraint(t1, t2);
return ret;
}
public static UndConstraint createSingleConstraint(Pair pair) {
return createSingleConstraint(pair.TA1, pair.TA2);
}
}

View File

@ -1,5 +1,7 @@
package de.dhbwstuttgart.typeinference;
import java.util.Set;
import com.rits.cloning.Cloner;
public class EinzelElement<A> implements KomplexeMenge<A>{
@ -16,9 +18,9 @@ public class EinzelElement<A> implements KomplexeMenge<A>{
}
@Override
public Menge<Menge<A>> cartesianProduct() {
public Set<Set<A>> cartesianProduct() {
Cloner cloner = new Cloner();
Menge<Menge<A>> ret = new Menge<>();
Set<Set<A>> ret = new Menge<>();
Menge<A> i = new Menge<A>();
i.add(cloner.deepClone(item));
ret.add(i);
@ -30,4 +32,8 @@ public class EinzelElement<A> implements KomplexeMenge<A>{
return item.toString();
}
public A getItem(){
return item;
}
}

View File

@ -1,6 +1,8 @@
package de.dhbwstuttgart.typeinference;
import java.util.Set;
public interface KomplexeMenge<A>{
Menge<? extends KomplexeMenge<A>> getSet();
Menge<Menge<A>> cartesianProduct();
Set<? extends KomplexeMenge<A>> getSet();
Set<Set<A>> cartesianProduct();
}

View File

@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference;
import java.util.Set;
import java.util.Vector;
import de.dhbwstuttgart.logger.Logger;
@ -7,10 +8,10 @@ import de.dhbwstuttgart.logger.Section;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.Unifier;
import de.dhbwstuttgart.typeinference.unify.Unifikationsalgorithmus;
public class OderConstraint extends OderMenge<Pair>{
private Menge<UndConstraint> oderConstraintPairs;
private Set<UndConstraint> oderConstraintPairs;
private final static Logger logger = Logger.getLogger(OderConstraint.class.getName());
@ -70,7 +71,7 @@ public class OderConstraint extends OderMenge<Pair>{
return ret+"]";
}
public Vector<UndConstraint> getUndConstraints() {
public Set<UndConstraint> getUndConstraints() {
return this.oderConstraintPairs;
/*
Vector<UndConstraint> ret = new Vector<UndConstraint>();
@ -89,11 +90,11 @@ public class OderConstraint extends OderMenge<Pair>{
* Filtert die Constraints in diesem ODER-Verknüpften Constraint aus,
* welche keinen Sinn ergeben, also beim unifizieren scheitern.
* @param unifier - Wird für die Unifizierung benutzt
*/
void filterWrongConstraints(Unifier unifier) {
Menge<UndConstraint> filteredConstraints = new Menge<>();
Set<UndConstraint> filteredConstraints = new Menge<>();
for(UndConstraint cons : this.getUndConstraints()){
Menge<Menge<Pair>> unifierResult = unifier.apply(cons.getConstraintPairs());
Set<Set<Pair>> unifierResult = unifier.apply(cons.getConstraintPairs());
if(!unifierResult.isEmpty()){
filteredConstraints.add(cons);
}else{
@ -102,7 +103,6 @@ public class OderConstraint extends OderMenge<Pair>{
}
this.oderConstraintPairs = filteredConstraints;
}
UndConstraint filterUndConstraints() {
if(this.oderConstraintPairs.size()==1){
return this.oderConstraintPairs.firstElement();
@ -110,8 +110,9 @@ public class OderConstraint extends OderMenge<Pair>{
return null;
}
*/
@Override
public Menge<? extends KomplexeMenge<Pair>> getSet() {
public Set<? extends KomplexeMenge<Pair>> getSet() {
return this.oderConstraintPairs;
}

View File

@ -2,6 +2,7 @@ package de.dhbwstuttgart.typeinference;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.logger.Logger;
@ -9,15 +10,15 @@ import de.dhbwstuttgart.logger.Section;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.Unifier;
import de.dhbwstuttgart.typeinference.unify.Unifikationsalgorithmus;
public abstract class OderMenge<A> implements KomplexeMenge<A>{
public abstract Menge<? extends KomplexeMenge<A>> getSet();
public abstract Set<? extends KomplexeMenge<A>> getSet();
@Override
public Menge<Menge<A>> cartesianProduct() {
Menge<Menge<A>> ret = new Menge<>();
public Set<Set<A>> cartesianProduct() {
Set<Set<A>> ret = new Menge<>();
for(KomplexeMenge<A> km : this.getSet()){
ret.addAll(km.cartesianProduct());
}

View File

@ -11,10 +11,8 @@ import java.io.Serializable;
import java.util.Hashtable;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
// ino.end
import de.dhbwstuttgart.parser.JavaClassName;
import de.dhbwstuttgart.syntaxtree.type.FreshWildcardType;
import de.dhbwstuttgart.syntaxtree.type.GenericTypeVar;
@ -43,9 +41,8 @@ public class Pair implements Serializable, DeepCloneable
// ino.end
// ino.attribute.bEqual.26549.declaration
private PairOperator eOperator = PairOperator.Smaller;
private PairOperator eOperator = PairOperator.SMALLER;
public enum PairOperator { Smaller, SmallerExtends, Equal };
// ino.end
// ino.attribute.bSubst.26552.decldescription type=line
// false <--> vorinitialisierter Wert
@ -79,7 +76,7 @@ public class Pair implements Serializable, DeepCloneable
this.TA1 = TA1;
this.TA2 = TA2;
bSubst = false;
eOperator = PairOperator.Smaller;
eOperator = PairOperator.SMALLER;
}
// ino.end
@ -342,7 +339,7 @@ public class Pair implements Serializable, DeepCloneable
*/
public boolean OperatorEqual()
{
return eOperator == PairOperator.Equal;
return eOperator == PairOperator.EQUALSDOT;
}
/**
@ -351,7 +348,7 @@ public class Pair implements Serializable, DeepCloneable
*/
public boolean OperatorSmaller()
{
return eOperator == PairOperator.Smaller;
return eOperator == PairOperator.SMALLER;
}
/**
@ -360,7 +357,7 @@ public class Pair implements Serializable, DeepCloneable
*/
public boolean OperatorSmallerExtends()
{
return eOperator == PairOperator.SmallerExtends;
return eOperator == PairOperator.SMALLERDOTWC;
}
/**

View File

@ -1,71 +0,0 @@
package de.dhbwstuttgart.typeinference;
import java.util.Vector;
import de.dhbwstuttgart.syntaxtree.type.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
/**
* Beschreibung von Herrn Plümicke:
* "The set of constraints consists of constraints of the form θ R θ' , where θ and
* θ' are Java types and R (R { < , <? , = }) is a subtyping condition."
*
* @author AI10023 - Andreas Stadelmeier
*
* Die Klasse stellt ein OderConstraint-Set dar, welches nur aus einem Constraint besteht.
*
*/
public class SingleConstraint extends UndConstraint{
private Pair constraintPair; //entspricht θ condition θ'
//private R condition; //entspricht der condition (R)
public SingleConstraint(Type p1, Type p2){
//super(p1,p2);
if(p1 == null || p2 == null)throw new NullPointerException();
Pair constraintPair = new Pair(p1,p2);//super.getConstraintPairs().firstElement();
this.addConstraint(constraintPair);
}
@Override
public Menge<? extends KomplexeMenge<Pair>> getSet() {
Menge<EinzelElement<Pair>> ret = new Menge<>();
ret.add(new EinzelElement<>(constraintPair));
return ret;
}
public SingleConstraint(Pair toAdd) {
this.addConstraint(toAdd);
}
public Pair getPair(){
return constraintPair;
}
@Override //Methode überschreiben, damit immer nur ein Vector mit nur einem Element zurückgeliefert wird.
public Menge<Pair> getConstraintPairs(){
Menge<Pair> ret = new Menge<Pair>();
ret.add(constraintPair);
return ret;
}
public void addConstraint(Pair toAdd){
if(constraintPair != null)throw new DebugException("Ein Constraint darf nur aus einem ConstraintPair bestehen. Das hinzufügen von "+ toAdd + " ist nicht möglich.");
Type p1 = toAdd.TA1;
Type p2 = toAdd.TA2;
if(p1==null || p2 == null)throw new NullPointerException();
constraintPair = new Pair(p1,p2);
}
@Override
public String toString(){
return ""+constraintPair.TA1.toString()+" < "+constraintPair.TA2.toString();
}
}

View File

@ -15,7 +15,6 @@ import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.typedeployment.TypeInsertPoint;
import de.dhbwstuttgart.typeinference.typedeployment.TypeInsertSet;
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
// ino.class.CTypeReconstructionResult.27238.description type=javadoc
/**

View File

@ -1,11 +1,12 @@
package de.dhbwstuttgart.typeinference;
import java.util.Collection;
import java.util.Set;
import java.util.Vector;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.unify.Unifier;
import de.dhbwstuttgart.typeinference.unify.Unifikationsalgorithmus;
/**
* Stellt ein Constraint dar, welches aus mehreren Constraint-Paaren besteht. Diese gelten alle stets gleichzeitig / sind per "Und" miteinander verknüpft.
@ -21,13 +22,13 @@ public class UndConstraint extends UndMenge<Pair> {
return set;
}
public Menge<Pair> getConstraintPairs() {
Menge<Menge<Pair>> ret = this.cartesianProduct();
public Set<Pair> getConstraintPairs() {
Set<Set<Pair>> ret = this.cartesianProduct();
if(ret.size() != 1){
//UndConstraints enthalten nur SingleConstraints, wodurch das Karthesische Produkt nur aus einem Element bestehen kann.
throw new DebugException("Fehler in ConstraintPairs-Bildung");
}
return ret.firstElement();
return ret.iterator().next();
}
public void addConstraint(Type type, Type rT) {
@ -41,6 +42,9 @@ public class UndConstraint extends UndMenge<Pair> {
return ret;
}
public Set<EinzelElement<Pair>> getPairs(){
return set;
}
/*
public UndConstraint(ConstraintType p1, ConstraintType p2) {

View File

@ -3,32 +3,32 @@ package de.dhbwstuttgart.typeinference;
import de.dhbwstuttgart.typeinference.unify.Unify;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
//import com.rits.cloning.Cloner;
public abstract class UndMenge<A extends DeepCloneable> implements KomplexeMenge<A>{
public abstract class UndMenge<A> implements KomplexeMenge<A>{
public abstract Menge<? extends KomplexeMenge<A>> getSet();
@Override
public Menge<Menge<A>> cartesianProduct() {
Menge<Menge<A>> ret = null;
//Cloner cloner = new Cloner();
public Set<Set<A>> cartesianProduct() {
Set<Set<A>> ret = null;
for(KomplexeMenge<A> km : this.getSet()){
if(ret == null){
ret = km.cartesianProduct();
}else{
Menge<Menge<A>> cartesianProduct = new Menge<>();
for(Menge<A> r : ret)for(Menge<A> m : km.cartesianProduct()){ //¼r jedes Element aus dem Karthesischen Produkt:
Menge<A> undElement = new Menge<A>();
undElement.addAll(Unify.deepClone(r));
Set<Set<A>> cartesianProduct = new Menge<>();
for(Set<A> r : ret)for(Set<A> m : km.cartesianProduct()){ //¼r jedes Element aus dem Karthesischen Produkt:
Set<A> undElement = new Menge<A>();
undElement.addAll(r);
undElement.addAll(m);
cartesianProduct.add(undElement);
}
ret = cartesianProduct;
}
}
if(ret == null)return new Menge<Menge<A>>();
if(ret == null)return new Menge<>();
return ret;
}

View File

@ -0,0 +1,96 @@
package de.dhbwstuttgart.typeinference;
import java.util.Iterator;
import java.util.Set;
import java.util.Vector;
import de.dhbwstuttgart.logger.Logger;
import de.dhbwstuttgart.logger.*;
import de.dhbwstuttgart.typeinference.unify.Unifikationsalgorithmus;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class UnifyConstraintsSet extends UndMenge<UnifyPair> implements Iterable<UnifyOderConstraint>{
private static final Logger log = Logger.getLogger(UnifyConstraintsSet.class.getName());
private Menge<UnifyOderConstraint> constraintsSet;
public UnifyConstraintsSet(){
constraintsSet = new Menge<UnifyOderConstraint>();
}
public void add(UnifyConstraintsSet CSet){
for(UnifyOderConstraint element : CSet)
add(element);
}
public void add(UnifyOderConstraint constraint){
constraintsSet.add(constraint);
}
@Override
public String toString(){
String ret ="";
for(UnifyOderConstraint constraint : this){
ret += constraint.toString()+"\n";
}
return ret;
}
public Iterator<UnifyOderConstraint> iterator() {
return constraintsSet.iterator();
}
public void filterWrongConstraints(Unifikationsalgorithmus unify) {
/*
* Das ConstraintsSet enthält nur OderConstraints, welche UND-Verknüpft sind.
* Hier werden Constraints in den OderConstraints kontrolliert:
*/
for(UnifyOderConstraint constraint : this){
constraint.filterWrongConstraints(unify);
}
}
/**
* Nimmt alle UndConstraints und filtert mithilfe dieser die falschen Constraints aus den OderConstraints
* @param unifier
*/
public void unifyUndConstraints(Unifikationsalgorithmus unifier) {
Vector<UnifyUndConstraint> uCons = this.filterUndConstraints();
Vector<UnifyPair> alleUndConstraints = new Vector<>();
for(UnifyUndConstraint undConstraint : uCons){
alleUndConstraints.addAll(undConstraint.getConstraintPairs());
}
this.filterWrongConstraints(
(pairs)->{
Set<UnifyPair> undConstraintsUndPairs = new Menge<>();
undConstraintsUndPairs.addAll(pairs);
undConstraintsUndPairs.addAll(alleUndConstraints);
log.debug("Versuche Pairs auszusondern:\n"+pairs, Section.TYPEINFERENCE);
log.debug("Unifiziere:\n"+undConstraintsUndPairs, Section.TYPEINFERENCE);
Set<Set<UnifyPair>> unifyResult = unifier.apply(undConstraintsUndPairs);
return unifyResult;
});
}
/**
* Aus dem ConstraintsSet [ u1, u2, ... (OderConstraint), ... uN ] werden alle
* UndConstraints, welche sich nicht innerhalb eines OderConstraints befinden, herausgefiltert
* @return [u1, ... , uN]
*/
private Vector<UnifyUndConstraint> filterUndConstraints() {
Vector<UnifyUndConstraint> ret = new Vector<>();
for(UnifyOderConstraint con : constraintsSet){
UnifyUndConstraint filtered = con.filterUndConstraints();
if(filtered != null)ret.add(filtered);
}
return ret;
}
public void add(UnifyUndConstraint singleConstraint) {
UnifyOderConstraint toAdd = new UnifyOderConstraint();
toAdd.addConstraint(singleConstraint);
constraintsSet.add(toAdd);
}
@Override
public Menge<? extends KomplexeMenge<UnifyPair>> getSet() {
return this.constraintsSet;
}
}

View File

@ -0,0 +1,112 @@
package de.dhbwstuttgart.typeinference;
import java.util.Set;
import java.util.Vector;
import de.dhbwstuttgart.logger.Logger;
import de.dhbwstuttgart.logger.Section;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.Unifikationsalgorithmus;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class UnifyOderConstraint extends OderMenge<UnifyPair>{
private Set<UnifyUndConstraint> oderConstraintPairs;
private final static Logger logger = Logger.getLogger(UnifyOderConstraint.class.getName());
/**
* Erstellt ein neues Oder Constraint und f<EFBFBD>gt bereits ein Constraint hinzu.
* @param p1
* @param p2
public OderConstraint(Pair p1, Pair p2){
if(p1 == null || p2 == null)throw new NullPointerException();
Pair constraintPair = new Pair(p1,p2);
oderConstraintPairs = new Menge<UndConstraint>();
this.addConstraint(constraintPair);
}
*/
public UnifyOderConstraint(){
oderConstraintPairs = new Menge<UnifyUndConstraint>();
}
/**
* Liefert alle in diesem OderConstraint enthaltene Constraints. Dabei gehen die Verkn<EFBFBD>pfungen (Oder/Und) verloren.
* @return
*/
public Menge<UnifyPair> getConstraintPairs(){
Menge<UnifyPair> ret = new Menge<UnifyPair>();
for(UnifyUndConstraint oC : this.oderConstraintPairs){
ret.addAll(oC.getConstraintPairs());
}
return ret;
}
/**
* Falls die Type des toAdd-Pairs nicht vom Typ RefType bzw. TypePlaceholder sind, so werden sie in einen RefType umgewandelt.
* @param toAdd
*/
public void addConstraint(UnifyPair toAdd){
oderConstraintPairs.add(new UnifySingleConstraint(toAdd));
}
@Override
public String toString(){
String ret = "[";
for(UnifyUndConstraint p : this.getUndConstraints()){
ret += p.toString()+ "| ";
}
return ret+"]";
}
public Set<UnifyUndConstraint> getUndConstraints() {
return this.oderConstraintPairs;
/*
Vector<UndConstraint> ret = new Vector<UndConstraint>();
for(Pair p : this.getConstraintPairs()){
ret.add(new UndConstraint(p.TA1,p.TA2));
}
return ret;
*/
}
public void addConstraint(UnifyUndConstraint constraint) {
oderConstraintPairs.add(constraint);
}
/**
* Filtert die Constraints in diesem ODER-Verknüpften Constraint aus,
* welche keinen Sinn ergeben, also beim unifizieren scheitern.
* @param unifier - Wird für die Unifizierung benutzt
*/
void filterWrongConstraints(Unifikationsalgorithmus unifier) {
Set<UnifyUndConstraint> filteredConstraints = new Menge<>();
for(UnifyUndConstraint cons : this.getUndConstraints()){
Set<Set<UnifyPair>> unifierResult = unifier.apply(cons.getConstraintPairs());
if(!unifierResult.isEmpty()){
filteredConstraints.add(cons);
}else{
logger.debug("Ausgesondertes Constraint: "+cons, Section.TYPEINFERENCE);
}
}
this.oderConstraintPairs = filteredConstraints;
}
UnifyUndConstraint filterUndConstraints() {
if(this.oderConstraintPairs.size()==1){
return this.oderConstraintPairs.iterator().next();
}
return null;
}
@Override
public Set<? extends KomplexeMenge<UnifyPair>> getSet() {
return this.oderConstraintPairs;
}
}

View File

@ -0,0 +1,50 @@
package de.dhbwstuttgart.typeinference;
import java.util.Vector;
import de.dhbwstuttgart.syntaxtree.type.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class UnifySingleConstraint extends UnifyUndConstraint{
private UnifyPair constraintPair; //entspricht θ condition θ'
@Override
public Menge<? extends KomplexeMenge<UnifyPair>> getSet() {
Menge<EinzelElement<UnifyPair>> ret = new Menge<>();
ret.add(new EinzelElement<>(constraintPair));
return ret;
}
public UnifySingleConstraint(UnifyPair toAdd) {
this.addConstraint(toAdd);
}
public UnifyPair getPair(){
return constraintPair;
}
@Override //Methode überschreiben, damit immer nur ein Vector mit nur einem Element zurückgeliefert wird.
public Menge<UnifyPair> getConstraintPairs(){
Menge<UnifyPair> ret = new Menge<>();
ret.add(constraintPair);
return ret;
}
public void addConstraint(UnifyPair toAdd){
if(constraintPair != null)throw new DebugException("Ein Constraint darf nur aus einem ConstraintPair bestehen. Das hinzufügen von "+ toAdd + " ist nicht möglich.");
constraintPair = toAdd;
}
@Override
public String toString(){
return constraintPair.toString();
}
}

View File

@ -0,0 +1,45 @@
package de.dhbwstuttgart.typeinference;
import java.util.Collection;
import java.util.Set;
import java.util.Vector;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.unify.Unifikationsalgorithmus;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
/**
* Stellt ein Constraint dar, welches aus mehreren Constraint-Paaren besteht. Diese gelten alle stets gleichzeitig / sind per "Und" miteinander verknüpft.
* @author janulrich
*
*/
public class UnifyUndConstraint extends UndMenge<UnifyPair> {
Menge<EinzelElement<UnifyPair>> set = new Menge<>();
@Override
public Menge<? extends KomplexeMenge<UnifyPair>> getSet() {
return set;
}
public Set<UnifyPair> getConstraintPairs() {
Set<Set<UnifyPair>> ret = this.cartesianProduct();
if(ret.size() != 1){
//UndConstraints enthalten nur SingleConstraints, wodurch das Karthesische Produkt nur aus einem Element bestehen kann.
throw new DebugException("Fehler in ConstraintPairs-Bildung");
}
return ret.iterator().next();
}
@Override
public String toString() {
String ret = this.getConstraintPairs().toString();
return ret;
}
public void add(UnifyPair pair){
set.add(new EinzelElement<>(pair));
}
}

View File

@ -1,72 +0,0 @@
// ino.module.CSet.8698.package
package de.dhbwstuttgart.typeinference.unify;
// ino.end
// ino.module.CSet.8698.import
import java.util.Iterator;
// ino.end
// ino.class.CSet.27435.description type=javadoc
/**
*
* @author Jrg Buerle
* @version $date
*/
// ino.end
// ino.class.CSet.27435.declaration
public abstract class CSet<E> implements Iterable<E>
// ino.end
// ino.class.CSet.27435.body
{
// ino.method.addElement.27438.declaration
public abstract void addElement(E element);
// ino.end
// ino.method.removeElement.27441.declaration
public abstract void removeElement(E element);
// ino.end
// ino.method.unite.27444.declaration
public abstract void unite(CSet<E> anotherSet);
// ino.end
// ino.method.subtract.27447.declaration
public abstract void subtract(CSet<E> anotherSet);
// ino.end
// ino.method.shallowCopy.27450.declaration
public abstract CSet<E> shallowCopy();
// ino.end
// ino.method.deepCopy.27453.declaration
public abstract CSet<E> deepCopy();
// ino.end
// ino.method.contains.27456.declaration
public abstract boolean contains(E element);
// ino.end
// ino.method.getCardinality.27459.declaration
public abstract int getCardinality();
// ino.end
// ino.method.getIterator.27462.declaration
public abstract Iterator<E> getIterator();
// ino.end
// ino.method.equals.27465.declaration
public abstract boolean equals(Object obj);
// ino.end
// ino.method.toString.27468.definition
public String toString()
// ino.end
// ino.method.toString.27468.body
{
StringBuffer sb = new StringBuffer();
sb.append("Set {\n");
Iterator<E> it = this.getIterator();
while(it.hasNext()){
sb.append(it.next().toString());
sb.append(",\n");
}
if(this.getCardinality()>0){
sb.delete(sb.length()-2, sb.length()-1);
}
sb.append("}");
return sb.toString();
}
// ino.end
}
// ino.end

View File

@ -1,253 +0,0 @@
// ino.module.CSubstitution.8685.package
package de.dhbwstuttgart.typeinference.unify;
// ino.end
// ino.module.CSubstitution.8685.import
import java.util.Iterator;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.logger.Logger;
// ino.end
import de.dhbwstuttgart.myexception.CTypeReconstructionException;
import de.dhbwstuttgart.syntaxtree.type.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.Pair;
// ino.class.CSubstitution.27003.description type=javadoc
/**
* Implementierung einer Typsubstitution. Bildet eine zu ersetzende
* <code>TypePlaceholder</code> auf einen Substitutions-Typ ab. Instanzen dieser
* Klasse werden in der Regel aus
* <code>Pair</code>-Objekten erzeugt.
* @author ¯Â¿Â½rg ¯Â¿Â½uerle
* @version $Date: 2006/07/10 11:27:04 $
*/
// ino.end
// ino.class.CSubstitution.27003.declaration
public class CSubstitution
// ino.end
// ino.class.CSubstitution.27003.body
{
// ino.attribute.m_TypeVar.27006.declaration
private TypePlaceholder m_TypeVar = null;
// ino.end
// ino.attribute.m_Type.27009.declaration
protected Type m_Type = null;
// ino.end
// ino.attribute.inferencelog.27012.declaration
protected static Logger inferencelog = Logger.getLogger("inference");
// ino.end
// ino.method.CSubstitution.27015.definition
public CSubstitution()
// ino.end
// ino.method.CSubstitution.27015.body
{
this(null, null);
}
// ino.end
// ino.method.CSubstitution.27018.definition
public CSubstitution(TypePlaceholder typeVar, Type type)
// ino.end
// ino.method.CSubstitution.27018.body
{
m_TypeVar = typeVar;
m_Type = type;
}
// ino.end
// ino.method.CSubstitution.27021.definition
public CSubstitution(Pair unifier)
throws CTypeReconstructionException
// ino.end
// ino.method.CSubstitution.27021.body
{
if(!(unifier.TA1 instanceof TypePlaceholder)){
throw new CTypeReconstructionException("Unifier enth�lt keinen Typeplaceholder",unifier.TA1);
}
m_TypeVar = (TypePlaceholder)unifier.TA1;
m_Type = unifier.TA2;
}
// ino.end
// ino.method.getType.27024.defdescription type=javadoc
/**
* Author: ¯Â¿Â½rg ¯Â¿Â½uerle<br/>
* @return Returns the Type.
*/
// ino.end
// ino.method.getType.27024.definition
public Type getType()
// ino.end
// ino.method.getType.27024.body
{
return m_Type;
}
// ino.end
// ino.method.setType.27027.defdescription type=javadoc
/**
* Author: ¯Â¿Â½rg ¯Â¿Â½uerle<br/>
* @param type The Type to set.
*/
// ino.end
// ino.method.setType.27027.definition
public void setType(Type type)
// ino.end
// ino.method.setType.27027.body
{
m_Type = type;
}
// ino.end
// ino.method.getTypeVar.27030.defdescription type=javadoc
/**
* Author: ¯Â¿Â½rg ¯Â¿Â½uerle<br/>
* @return Returns the TypeVar.
*/
// ino.end
// ino.method.getTypeVar.27030.definition
public Type getTypeVar()
// ino.end
// ino.method.getTypeVar.27030.body
{
return this.m_TypeVar;
}
// ino.end
// ino.method.setTypeVar.27033.defdescription type=javadoc
/**
* Author: ¯Â¿Â½rg ¯Â¿Â½uerle<br/>
* @param typeVar The TypeVar to set.
*/
// ino.end
// ino.method.setTypeVar.27033.definition
public void setTypeVar(TypePlaceholder typeVar)
// ino.end
// ino.method.setTypeVar.27033.body
{
m_TypeVar = typeVar;
}
// ino.end
// ino.method.equals.27036.definition
public boolean equals(Object obj)
// ino.end
// ino.method.equals.27036.body
{
if(obj instanceof CSubstitution){
CSubstitution sub = (CSubstitution)obj;
boolean ret = true;
ret &= (m_TypeVar.equals(sub.m_TypeVar));
ret &= (m_Type.equals(sub.m_Type));
return ret;
}
else{
return false;
}
}
// ino.end
// ino.method.toString.27039.definition
public String toString()
// ino.end
// ino.method.toString.27039.body
{
//return m_TypeVar.getName() +" --> "+m_Type.getName();
return m_TypeVar.toString() +" --> "+m_Type.toString();
}
// ino.end
// ino.method.clone.27042.definition
public CSubstitution clone()
// ino.end
// ino.method.clone.27042.body
{
CSubstitution copy = new CSubstitution(m_TypeVar.clone(), m_Type.clone());
return copy;
}
// ino.end
// ino.method.applyUnifier.27048.defdescription type=javadoc
/**
* Wendet den Unifier auf die rechte Seite dieser Substitution an.
* <br/>Author: ¯Â¿Â½rg ¯Â¿Â½uerle
* @param unifier
*/
// ino.end
// ino.method.applyUnifier.27048.definition
public void applyUnifier(CSubstitutionSet unifier)
// ino.end
// ino.method.applyUnifier.27048.body
{
Iterator pairIt = unifier.getIterator();
while(pairIt.hasNext()){
CSubstitution subst = (CSubstitution)pairIt.next();
//korrigiert PL 05-07-31 das erste duerfte doch richtig sein.
//subst.setType(this.applySubstitution(subst.getType(), subst));
this.setType(this.applySubstitution(this.getType(), subst));
}
}
// ino.end
// ino.method.applySubstitution.27051.defdescription type=javadoc
/**
* Wendet die �bergebene Substitution rekursiv auf den �bergebenen Typ an.
* <br/>Author: ¯Â¿Â½rg ¯Â¿Â½uerle
* @param type Der zu untersuchende Typ
* @param unifierSub Die anzuwendende Substitution
* @return Den ermittelnden Typ
*/
// ino.end
// ino.method.applySubstitution.27051.definition
private Type applySubstitution(Type type, CSubstitution unifierSub)
// ino.end
// ino.method.applySubstitution.27051.body
{
if(type instanceof TypePlaceholder){
if(type.equals(unifierSub.getTypeVar())){
return unifierSub.getType();
}
}
else if(type instanceof GenericTypeVar){
if(type.equals(unifierSub.getTypeVar())){
return unifierSub.getType();
}
}
else if(type instanceof RefType){
Menge<Type> paras = ((RefType)type).get_ParaList();
if(paras != null){
for(int i=0; i<paras.size(); i++){
paras.setElementAt(this.applySubstitution((Type)paras.elementAt(i), unifierSub), i);
}
}
}
return type;
}
// ino.end
// ino.method.applyThisSubstitution.27054.definition
public Type applyThisSubstitution(Type type)
// ino.end
// ino.method.applyThisSubstitution.27054.body
{
return applySubstitution(type, this);
}
// ino.end
}
// ino.end

View File

@ -1,70 +0,0 @@
// ino.module.CSubstitutionGenVar.8686.package
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.syntaxtree.type.GenericTypeVar;
// ino.end
import de.dhbwstuttgart.syntaxtree.type.Type;
// ino.class.CSubstitutionGenVar.27057.description type=javadoc
/**
* Implementierung einer Typsubstitution der GenVar. Bildet eine zu ersetzende
* <code>TypePlaceholder</code> auf einen Substitutions-Typ ab. Instanzen dieser
* Klasse werden in der Regel aus
* <code>Pair</code>-Objekten erzeugt.
* @author Martin Pl�micke
* @version $Date: 2006/06/13 10:37:32 $
*/
// ino.end
// ino.class.CSubstitutionGenVar.27057.declaration
public class CSubstitutionGenVar extends CSubstitution
// ino.end
// ino.class.CSubstitutionGenVar.27057.body
{
// ino.attribute.m_TypeVar.27061.declaration
private GenericTypeVar m_TypeVar = null;
// ino.end
// ino.method.CSubstitutionGenVar.27064.definition
public CSubstitutionGenVar()
// ino.end
// ino.method.CSubstitutionGenVar.27064.body
{
this(null, null);
}
// ino.end
// ino.method.CSubstitutionGenVar.27067.definition
public CSubstitutionGenVar(GenericTypeVar typeVar, Type type)
// ino.end
// ino.method.CSubstitutionGenVar.27067.body
{
m_TypeVar = typeVar;
m_Type = type;
}
// ino.end
// ino.method.getTypeVar.27070.defdescription type=javadoc
/**
* Author: ¯Â¿Â½rg ¯Â¿Â½uerle<br/>
* @return Returns the TypeVar.
*/
// ino.end
// ino.method.getTypeVar.27070.definition
public Type getTypeVar()
// ino.end
// ino.method.getTypeVar.27070.body
{
return this.m_TypeVar;
}
// ino.end
// ino.method.toString.27073.definition
public String toString()
// ino.end
// ino.method.toString.27073.body
{
return this.m_TypeVar.getName() +" --> "+this.m_Type.getName();
}
// ino.end
}
// ino.end

View File

@ -1,111 +0,0 @@
// ino.module.CSubstitutionSet.8699.package
package de.dhbwstuttgart.typeinference.unify;
// ino.end
// ino.module.CSubstitutionSet.8699.import
import java.util.Iterator;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.myexception.CTypeReconstructionException;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.typeinference.Pair;
// ino.class.CSubstitutionSet.27471.description type=javadoc
/**
* @author ¯Â¿Â½rg ¯Â¿Â½uerle
* @version $Date: 2013/03/27 18:29:34 $
*/
// ino.end
// ino.class.CSubstitutionSet.27471.declaration
public class CSubstitutionSet extends CVectorSet<CSubstitution>
// ino.end
// ino.class.CSubstitutionSet.27471.body
{
// ino.method.CSubstitutionSet.27475.definition
public CSubstitutionSet()
// ino.end
// ino.method.CSubstitutionSet.27475.body
{
super();
}
// ino.end
// ino.method.CSubstitutionSet.27478.definition
public CSubstitutionSet(Menge<Pair> unifiers)
throws CTypeReconstructionException
// ino.end
// ino.method.CSubstitutionSet.27478.body
{
super();
for(int i=0; i<unifiers.size(); i++){
this.addElement(new CSubstitution(unifiers.elementAt(i)));
}
}
// ino.end
// ino.method.shallowCopy.27481.definition
public CSubstitutionSet shallowCopy()
// ino.end
// ino.method.shallowCopy.27481.body
{
CSubstitutionSet copy = new CSubstitutionSet();
copy.setMenge((Menge)this.getMenge().clone());
return copy;
}
// ino.end
// ino.method.deepCopy.27484.definition
public CSubstitutionSet deepCopy()
// ino.end
// ino.method.deepCopy.27484.body
{
CSubstitutionSet copy = new CSubstitutionSet();
Iterator<CSubstitution> substIter = this.getIterator();
while(substIter.hasNext()){
copy.addElement(substIter.next().clone());
}
return copy;
}
// ino.end
// ino.method.applyUnifier.27487.defdescription type=javadoc
/**
* Wendet den Unifier auf die rechten Seiten alle Substitutionen an.
* <br/>Author: ¯Â¿Â½rg ¯Â¿Â½uerle
* @param unifier
*/
// ino.end
// ino.method.applyUnifier.27487.definition
public void applyUnifier(CSubstitutionSet unifier)
// ino.end
// ino.method.applyUnifier.27487.body
{
Iterator<CSubstitution> substIt = this.getIterator();
while(substIt.hasNext()){
substIt.next().applyUnifier(unifier);
}
}
// ino.end
// ino.method.applyThisSubstitutionSet.27490.definition
public Type applyThisSubstitutionSet(Type type)
// ino.end
// ino.method.applyThisSubstitutionSet.27490.body
{
Iterator<CSubstitution> substIt = this.getIterator();
Type ty = type;
while(substIt.hasNext()) {
ty = substIt.next().applyThisSubstitution(ty);
}
return ty;
}
// ino.end
public Iterator<CSubstitution> iterator() {
return this.getIterator();
}
}
// ino.end

View File

@ -1,165 +0,0 @@
// ino.module.CMengeSet.8702.package
package de.dhbwstuttgart.typeinference.unify;
// ino.end
// ino.module.CMengeSet.8702.import
import java.util.Iterator;
import de.dhbwstuttgart.typeinference.Menge;
// ino.end
// ino.class.CMengeSet.27519.description type=javadoc
/**
* @author ¯Â¿Â½rg ¯Â¿Â½uerle
* @version $Date: 2013/02/07 05:08:51 $
*/
// ino.end
// ino.class.CMengeSet.27519.declaration
public abstract class CVectorSet<E> extends CSet<E>
// ino.end
// ino.class.CMengeSet.27519.body
{
// ino.attribute.m_Elements.27523.declaration
private Menge<E> m_Elements = null;
// ino.end
// ino.method.CMengeSet.27526.definition
public CVectorSet()
// ino.end
// ino.method.CMengeSet.27526.body
{
m_Elements = new Menge<E>();
}
// ino.end
// ino.method.addElement.27529.definition
public void addElement(E element)
// ino.end
// ino.method.addElement.27529.body
{
m_Elements.addElement(element);
}
// ino.end
// ino.method.removeElement.27532.definition
public void removeElement(E element)
// ino.end
// ino.method.removeElement.27532.body
{
m_Elements.addElement(element);
}
// ino.end
public void addAll( CVectorSet<E> set )
{
for( int i=0;i<set.getCardinality(); i++ ){
m_Elements.addElement(set.m_Elements.elementAt(i));
}
}
// ino.method.getIterator.27535.definition
public Iterator<E> getIterator()
// ino.end
// ino.method.getIterator.27535.body
{
return m_Elements.iterator();
}
// ino.end
// ino.method.getMenge.27538.definition
public Menge<E> getMenge()
// ino.end
// ino.method.getMenge.27538.body
{
return m_Elements;
}
// ino.end
// ino.method.setMenge.27541.definition
public void setMenge(Menge<E> elements)
// ino.end
// ino.method.setMenge.27541.body
{
m_Elements = elements;
}
// ino.end
/**
* ¼gt ein CMengeSet an!
* Es handelt sich um eine Vereinigung (es werden keine bereits vorhandenen Elemente übernommen)
* @param anotherSet Das hinzuzufügende CMengeSet (CSet wird ignoriert)
*/
// ino.method.unite.27544.definition
public void unite(CSet<E> anotherSet)
// ino.end
// ino.method.unite.27544.body
{
if(!(anotherSet instanceof CVectorSet)){
return;
}
CVectorSet<E> MengeSet = (CVectorSet<E>)anotherSet;
// Elemente der anderen Menge hinzuf�gen:
Iterator<E> it = MengeSet.getIterator();
while(it.hasNext()){
E elem = it.next();
if(!m_Elements.contains(elem)){
m_Elements.addElement(elem);
}
}
//m_Elements.addAll(MengeSet.m_Elements);
}
// ino.end
// ino.method.subtract.27547.definition
public void subtract(CSet<E> anotherSet)
// ino.end
// ino.method.subtract.27547.body
{
if(!(anotherSet instanceof CVectorSet)){
return;
}
CVectorSet<E> MengeSet = (CVectorSet<E>)anotherSet;
// Elemente der anderen Menge entfernen:
m_Elements.removeAll(MengeSet.m_Elements);
}
// ino.end
// ino.method.contains.27550.definition
public boolean contains(E element)
// ino.end
// ino.method.contains.27550.body
{
return m_Elements.contains(element);
}
// ino.end
// ino.method.equals.27553.definition
public boolean equals(Object obj)
// ino.end
// ino.method.equals.27553.body
{
if(obj instanceof CVectorSet){
CVectorSet tripSet= (CVectorSet)obj;
boolean ret = true;
ret &= (m_Elements.containsAll(tripSet.m_Elements));
ret &= (tripSet.m_Elements.containsAll(m_Elements));
return ret;
}
else{
return false;
}
}
// ino.end
// ino.method.getCardinality.27556.definition
public int getCardinality()
// ino.end
// ino.method.getCardinality.27556.body
{
return m_Elements.size();
}
// ino.end
}
// ino.end

View File

@ -1,82 +0,0 @@
// ino.module.FC_TTO.8719.package
package de.dhbwstuttgart.typeinference.unify;
// ino.end
// ino.module.FC_TTO.8719.import
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.syntaxtree.Class;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
// ino.class.FC_TTO.28013.description type=javadoc
/**
* Hilfsklasse ¯Â¿Â½r den Unifizierungsalgorithmus
* @author Martin Pl�micke
* @version $Date: 2013/05/12 14:00:05 $
*/
// ino.end
// ino.class.FC_TTO.28013.declaration
public class FC_TTO
// ino.end
// ino.class.FC_TTO.28013.body
{
// ino.attribute.FC.28016.declaration
Menge<Pair> FC;
// ino.end
// ino.attribute.TTO.28019.declaration
Menge TTO;
// ino.end
Menge<Class> CLASSVEC;
// ino.method.FC_TTO.28022.definition
public FC_TTO(Menge<Pair> fc, Menge tto, Menge<Class> classv)
// ino.end
// ino.method.FC_TTO.28022.body
{
this.FC = fc;
this.TTO = tto;
this.CLASSVEC = classv;
}
// ino.end
// ino.method.getFC.28025.definition
public Menge<Pair> getFC()
// ino.end
// ino.method.getFC.28025.body
{
return FC;
}
// ino.end
// ino.method.getTTO.28028.definition
public Menge getTTO()
// ino.end
// ino.method.getTTO.28028.body
{
return TTO;
}
// ino.end
public Menge<Class> getClasses()
{
return CLASSVEC;
}
@Override
public String toString(){
return "FC: "+getFC()+"\nTTO: "+getTTO()+"\nCLASSVEC: "+getClasses();
}
public void generateFullyNamedTypes(TypeAssumptions ass) {
for(Pair p : this.FC){
p.TA1 = p.TA1.TYPE(ass, p.TA1.getParent());//ass.getTypeFor(p.TA1, p.TA1.getParent()).getType();
p.TA2 = p.TA2.TYPE(ass, p.TA2.getParent());//ass.getTypeFor(p.TA2, p.TA2.getParent()).getType();
}
}
}
// ino.end

View File

@ -1,4 +1,4 @@
package de.dhbwstuttgart.typeinference.unifynew;
package de.dhbwstuttgart.typeinference.unify;
import java.util.List;
import java.util.Set;
@ -7,10 +7,16 @@ import com.google.common.collect.Sets;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
/**
* Implements set operations using google guava.
* @author DH10STF
*
*/
public class GuavaSetOperations implements ISetOperations {
@Override
public <B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets) {
// Wraps the call to google guava
return Sets.cartesianProduct(sets);
}

View File

@ -1,53 +0,0 @@
// ino.module.MUB.8720.package
package de.dhbwstuttgart.typeinference.unify;
// ino.end
// ino.module.MUB.8720.import
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.syntaxtree.type.Type;
import de.dhbwstuttgart.typeinference.Pair;
// ino.class.MUB.28031.declaration
public class MUB
// ino.end
// ino.class.MUB.28031.body
{
// ino.attribute.Mub.28034.declaration
Menge<? extends Type> Mub;
// ino.end
// ino.attribute.sigma.28037.declaration
Menge<Pair> sigma;
// ino.end
// ino.method.MUB.28040.definition
MUB(Menge<? extends Type> M, Menge<Pair> s)
// ino.end
// ino.method.MUB.28040.body
{
Mub = M;
sigma = s;
}
// ino.end
// ino.method.getUnifier.28043.definition
public Menge<Pair> getUnifier()
// ino.end
// ino.method.getUnifier.28043.body
{
return sigma;
}
// ino.end
// ino.method.getMub.28046.definition
public Menge<? extends Type> getMub()
// ino.end
// ino.method.getMub.28046.body
{
return Mub;
}
// ino.end
}
// ino.end

View File

@ -1,4 +1,4 @@
package de.dhbwstuttgart.typeinference.unifynew;
package de.dhbwstuttgart.typeinference.unify;
import java.util.HashMap;
import java.util.HashSet;
@ -6,6 +6,9 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import de.dhbwstuttgart.typeinference.Pair;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
public class Mapping {
private HashMap<de.dhbwstuttgart.typeinference.unify.model.UnifyType, de.dhbwstuttgart.syntaxtree.type.Type> backwardMap = new HashMap<>();
@ -21,15 +24,15 @@ public class Mapping {
return forwardMap.get(type);
}
public de.dhbwstuttgart.typeinference.unify.model.MPair map(de.dhbwstuttgart.typeinference.Pair pair) {
return new de.dhbwstuttgart.typeinference.unify.model.MPair(forwardMap.get(pair.TA1), forwardMap.get(pair.TA2), mapOp(pair.GetOperator()));
public de.dhbwstuttgart.typeinference.unify.model.UnifyPair map(de.dhbwstuttgart.typeinference.Pair pair) {
return new de.dhbwstuttgart.typeinference.unify.model.UnifyPair(forwardMap.get(pair.TA1), forwardMap.get(pair.TA2), mapOp(pair.GetOperator()));
}
public Set<de.dhbwstuttgart.typeinference.unify.model.UnifyType> mapTypeSet(Set<de.dhbwstuttgart.syntaxtree.type.Type> types) {
return types.stream().map(this::map).collect(Collectors.toCollection(HashSet::new));
}
public Set<de.dhbwstuttgart.typeinference.unify.model.MPair> mapPairSet(Set<de.dhbwstuttgart.typeinference.Pair> pairs) {
public Set<de.dhbwstuttgart.typeinference.unify.model.UnifyPair> mapPairSet(Set<de.dhbwstuttgart.typeinference.Pair> pairs) {
return pairs.stream().map(this::map).collect(Collectors.toCollection(HashSet::new));
}
@ -37,13 +40,13 @@ public class Mapping {
return irreversible.contains(type) ? Optional.of(backwardMap.get(type)) : Optional.empty();
}
public Optional<de.dhbwstuttgart.typeinference.Pair> unmap(de.dhbwstuttgart.typeinference.unify.model.MPair mpair) {
public Optional<Pair> unmap(de.dhbwstuttgart.typeinference.unify.model.UnifyPair mpair) {
de.dhbwstuttgart.typeinference.unify.model.UnifyType lhs = mpair.getLhsType();
de.dhbwstuttgart.typeinference.unify.model.UnifyType rhs = mpair.getRhsType();
if(irreversible.contains(lhs) || irreversible.contains(rhs))
return Optional.empty();
return Optional.of(new de.dhbwstuttgart.typeinference.Pair(backwardMap.get(lhs), backwardMap.get(rhs), unmapOp(mpair.getPairOp())));
return Optional.of(new Pair(backwardMap.get(lhs), backwardMap.get(rhs), unmapOp(mpair.getPairOp())));
}
public Optional<Set<de.dhbwstuttgart.syntaxtree.type.Type>> unmapTypeSet(Set<de.dhbwstuttgart.typeinference.unify.model.UnifyType> types) {
@ -51,40 +54,19 @@ public class Mapping {
return result.size() == types.size() ? Optional.of(result) : Optional.empty();
}
public Optional<Set<de.dhbwstuttgart.typeinference.Pair>> unmapPairSet(Set<de.dhbwstuttgart.typeinference.unify.model.MPair> pairs) {
public Optional<Set<de.dhbwstuttgart.typeinference.Pair>> unmapPairSet(Set<de.dhbwstuttgart.typeinference.unify.model.UnifyPair> pairs) {
Set<de.dhbwstuttgart.typeinference.Pair> result = pairs.stream().map(this::unmap).filter(x -> x.isPresent()).map(x -> x.get()).collect(Collectors.toCollection(HashSet::new));
return result.size() == pairs.size() ? Optional.of(result) : Optional.empty();
}
private de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator mapOp(de.dhbwstuttgart.typeinference.Pair.PairOperator op) {
/*
* TODO
* Warum hat der PairOp nur drei Werte? Wie wird SMALLERDOTWC etc im anderen Pair geregelt?
*/
switch(op) {
case Equal:
return de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator.EQUALS;
case Smaller:
return de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator.SMALLER;
case SmallerExtends:
return de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator.SMALLERDOT;
default:
return de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator.EQUALS;
}
private PairOperator mapOp(PairOperator op) {
//TODO: Methode kann entfernt werden:
return op;
}
private de.dhbwstuttgart.typeinference.Pair.PairOperator unmapOp(de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator op) {
switch(op) {
case EQUALS:
return de.dhbwstuttgart.typeinference.Pair.PairOperator.Equal;
case SMALLER:
return de.dhbwstuttgart.typeinference.Pair.PairOperator.Smaller;
case SMALLERDOT:
return de.dhbwstuttgart.typeinference.Pair.PairOperator.SmallerExtends;
default:
return de.dhbwstuttgart.typeinference.Pair.PairOperator.Equal;
}
private PairOperator unmapOp(PairOperator op) {
//TODO: Methode kann entfernt werden:
return op;
}
}

View File

@ -0,0 +1,105 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
/**
* Implementation of the Martelli-Montanari unification algorithm.
* @author Florian Steurer
*/
public class MartelliMontanariUnify implements IUnify {
@Override
public Optional<Unifier> unify(Set<UnifyType> terms) {
// Sets with less than 2 terms are trivially unified
if(terms.size() < 2)
return Optional.of(Unifier.Identity());
// For the the set of terms {t1,...,tn},
// build a list of equations {(t1 = t2), (t2 = t3), (t3 = t4), ....}
ArrayList<UnifyPair> termsList = new ArrayList<UnifyPair>();
Iterator<UnifyType> iter = terms.iterator();
UnifyType prev = iter.next();
while(iter.hasNext()) {
UnifyType next = iter.next();
termsList.add(new UnifyPair(prev, next, PairOperator.EQUALSDOT));
prev = next;
}
// Start with the identity unifier. Substitutions will be added later.
Unifier mgu = Unifier.Identity();
// Apply rules while possible
int idx = 0;
while(idx < termsList.size()) {
UnifyPair pair = termsList.get(idx);
UnifyType rhsType = pair.getRhsType();
UnifyType lhsType = pair.getLhsType();
TypeParams rhsTypeParams = rhsType.getTypeParams();
TypeParams lhsTypeParams = lhsType.getTypeParams();
// DELETE - Rule
if(pair.getRhsType().equals(pair.getLhsType())) {
termsList.remove(idx);
continue;
}
// REDUCE - Rule
if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)
&& (rhsTypeParams.size() != 0 || lhsTypeParams.size() != 0)) {
Set<UnifyPair> result = new HashSet<>();
// f<...> = g<...> with f != g are not unifiable
if(!rhsType.getName().equals(lhsType.getName()))
return Optional.empty(); // conflict
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
if(rhsTypeParams.size() != lhsTypeParams.size())
return Optional.empty(); // conflict
// Unpack the arguments
for(int i = 0; i < rhsTypeParams.size(); i++)
result.add(new UnifyPair(rhsTypeParams.get(i), lhsTypeParams.get(i), PairOperator.EQUALSDOT));
termsList.remove(idx);
termsList.addAll(result);
continue;
}
// SWAP - Rule
if(!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
termsList.remove(idx);
termsList.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT));
continue;
}
// OCCURS-CHECK
if(pair.getLhsType() instanceof PlaceholderType
&& pair.getRhsType().getTypeParams().occurs((PlaceholderType) pair.getLhsType()))
return Optional.empty();
// SUBST - Rule
if(lhsType instanceof PlaceholderType) {
mgu.Add((PlaceholderType) lhsType, rhsType);
termsList = termsList.stream().map(mgu::apply).collect(Collectors.toCollection(ArrayList::new));
idx = idx+1 == termsList.size() ? 0 : idx+1;
continue;
}
idx++;
}
return Optional.of(mgu);
}
}

View File

@ -1,44 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.Menge;
import java.util.stream.Stream;
import de.dhbwstuttgart.typeinference.ConstraintsSet;
import de.dhbwstuttgart.typeinference.Pair;
public class ParallelUnify {
public ParallelUnify(ConstraintsSet constraints){
//constraints.getConstraints();
}
private CartesianProduct parallelCartProd(){
return null;
}
private UnifyResult parallelUnify(Menge<Pair> pairs, FC_TTO fc){
UnifyResult ret = new UnifyResult();
return ret;
}
public UnifyResult unify(){
UnifyResult ret = new UnifyResult();
return ret;
}
}
class ParallelConstraintSet extends ConstraintsSet{
Stream parallelGetConstraints(){
return null;
}
}
class UnifyResult{
}
class CartesianProduct{
}

View File

@ -0,0 +1,778 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.Stack;
import java.util.stream.Collectors;
import junit.framework.Assert;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
/**
* Implementation of the type inference rules.
* @author Florian Steurer
*
*/
public class RuleSet implements IRuleSet{
protected IFiniteClosure finiteClosure;
/**
* Creates a new instance that uses the specified FC for greater, grArg, etc.
* @param fc The FC that is used for greater, grArg, etc.
*/
public RuleSet(IFiniteClosure fc) {
finiteClosure = fc;
}
@Override
public Optional<UnifyPair> reduceUp(UnifyPair pair) {
// Check if reduce up is applicable
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType rhsType = pair.getRhsType();
if(!(rhsType instanceof SuperType))
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
if(!(lhsType instanceof ReferenceType) && !(lhsType instanceof PlaceholderType))
return Optional.empty();
// Rule is applicable, unpack the SuperType
return Optional.of(new UnifyPair(lhsType, ((SuperType) rhsType).getSuperedType(), PairOperator.SMALLERDOT));
}
@Override
public Optional<UnifyPair> reduceLow(UnifyPair pair) {
// Check if rule is applicable
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
if(!(lhsType instanceof ExtendsType))
return Optional.empty();
UnifyType rhsType = pair.getRhsType();
if(!(rhsType instanceof ReferenceType) && !(rhsType instanceof PlaceholderType))
return Optional.empty();
// Rule is applicable, unpack the ExtendsType
return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(), rhsType, PairOperator.SMALLERDOT));
}
@Override
public Optional<UnifyPair> reduceUpLow(UnifyPair pair) {
// Check if rule is applicable
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
if(!(lhsType instanceof ExtendsType))
return Optional.empty();
UnifyType rhsType = pair.getRhsType();
if(!(rhsType instanceof SuperType))
return Optional.empty();
// Rule is applicable, unpack both sides
return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(),((SuperType) rhsType).getSuperedType(), PairOperator.SMALLERDOT));
}
@Override
public Optional<Set<UnifyPair>> reduceExt(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType x = pair.getLhsType();
UnifyType sTypeX;
if(x instanceof ReferenceType)
sTypeX = x;
else if(x instanceof ExtendsType)
sTypeX = ((ExtendsType) x).getExtendedType();
else
return Optional.empty();
UnifyType extY = pair.getRhsType();
if(!(extY instanceof ExtendsType))
return Optional.empty();
if(x.getTypeParams().empty() || extY.getTypeParams().size() != x.getTypeParams().size())
return Optional.empty();
UnifyType xFromFc = finiteClosure.getLeftHandedType(sTypeX.getName()).orElse(null);
if(xFromFc == null || !xFromFc.getTypeParams().arePlaceholders())
return Optional.empty();
if(x instanceof ExtendsType)
xFromFc = new ExtendsType(xFromFc);
UnifyType extYFromFc = finiteClosure.grArg(xFromFc).stream().filter(t -> t.getName().equals(extY.getName())).filter(t -> t.getTypeParams().arePlaceholders()).findAny().orElse(null);
if(extYFromFc == null || extYFromFc.getTypeParams() != xFromFc.getTypeParams())
return Optional.empty();
TypeParams extYParams = extY.getTypeParams();
TypeParams xParams = x.getTypeParams();
int[] pi = pi(xParams, extYParams);
if(pi.length == 0)
return Optional.empty();
Set<UnifyPair> result = new HashSet<>();
for(int rhsIdx = 0; rhsIdx < extYParams.size(); rhsIdx++)
result.add(new UnifyPair(xParams.get(pi[rhsIdx]), extYParams.get(rhsIdx), PairOperator.SMALLERDOTWC));
return Optional.of(result);
}
@Override
public Optional<Set<UnifyPair>> reduceSup(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType x = pair.getLhsType();
UnifyType sTypeX;
if(x instanceof ReferenceType)
sTypeX = x;
else if(x instanceof SuperType)
sTypeX = ((SuperType) x).getSuperedType();
else
return Optional.empty();
UnifyType supY = pair.getRhsType();
if(!(supY instanceof SuperType))
return Optional.empty();
if(x.getTypeParams().empty() || supY.getTypeParams().size() != x.getTypeParams().size())
return Optional.empty();
UnifyType xFromFc = finiteClosure.getLeftHandedType(sTypeX.getName()).orElse(null);
if(xFromFc == null || !xFromFc.getTypeParams().arePlaceholders())
return Optional.empty();
if(x instanceof SuperType)
xFromFc = new SuperType(xFromFc);
UnifyType supYFromFc = finiteClosure.grArg(xFromFc).stream().filter(t -> t.getName().equals(supY.getName())).filter(t -> t.getTypeParams().arePlaceholders()).findAny().orElse(null);
if(supYFromFc == null || supYFromFc.getTypeParams() != xFromFc.getTypeParams())
return Optional.empty();
TypeParams supYParams = supY.getTypeParams();
TypeParams xParams = x.getTypeParams();
Set<UnifyPair> result = new HashSet<>();
int[] pi = pi(xParams, supYParams);
if(pi.length == 0)
return Optional.empty();
for(int rhsIdx = 0; rhsIdx < supYParams.size(); rhsIdx++)
result.add(new UnifyPair(supYParams.get(rhsIdx), xParams.get(pi[rhsIdx]), PairOperator.SMALLERDOTWC));
return Optional.of(result);
}
@Override
public Optional<Set<UnifyPair>> reduceEq(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
if(lhsType instanceof PlaceholderType || lhsType.getTypeParams().empty())
return Optional.empty();
UnifyType rhsType = pair.getRhsType();
if(!rhsType.getName().equals(lhsType.getName()))
return Optional.empty();
if(rhsType instanceof PlaceholderType || lhsType instanceof PlaceholderType || rhsType.getTypeParams().empty())
return Optional.empty();
if(rhsType.getTypeParams().size() != lhsType.getTypeParams().size())
return Optional.empty();
// Keine Permutation wie im Paper nötig
Set<UnifyPair> result = new HashSet<>();
TypeParams lhsTypeParams = lhsType.getTypeParams();
TypeParams rhsTypeParams = rhsType.getTypeParams();
for(int i = 0; i < lhsTypeParams.size(); i++)
result.add(new UnifyPair(lhsTypeParams.get(i), rhsTypeParams.get(i), PairOperator.EQUALSDOT));
return Optional.of(result);
}
@Override
public Optional<Set<UnifyPair>> reduce1(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType c = pair.getLhsType();
if(!(c instanceof ReferenceType))
return Optional.empty();
UnifyType d = pair.getRhsType();
if(!(d instanceof ReferenceType))
return Optional.empty();
ReferenceType lhsSType = (ReferenceType) c;
ReferenceType rhsSType = (ReferenceType) d;
if(lhsSType.getTypeParams().empty() || lhsSType.getTypeParams().size() != rhsSType.getTypeParams().size())
return Optional.empty();
UnifyType cFromFc = finiteClosure.getLeftHandedType(c.getName()).orElse(null);
if(cFromFc == null || !cFromFc.getTypeParams().arePlaceholders())
return Optional.empty();
UnifyType dFromFc = finiteClosure.getAncestors(cFromFc).stream().filter(x -> x.getName().equals(d.getName())).findAny().orElse(null);
if(dFromFc == null || !dFromFc.getTypeParams().arePlaceholders() || dFromFc.getTypeParams().size() != cFromFc.getTypeParams().size())
return Optional.empty();
int[] pi = pi(cFromFc.getTypeParams(), dFromFc.getTypeParams());
if(pi.length == 0)
return Optional.empty();
TypeParams rhsTypeParams = d.getTypeParams();
TypeParams lhsTypeParams = c.getTypeParams();
Set<UnifyPair> result = new HashSet<>();
for(int rhsIdx = 0; rhsIdx < rhsTypeParams.size(); rhsIdx++)
result.add(new UnifyPair(lhsTypeParams.get(pi[rhsIdx]), rhsTypeParams.get(rhsIdx), PairOperator.SMALLERDOTWC));
return Optional.of(result);
}
@Override
public Optional<Set<UnifyPair>> reduce2(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.EQUALSDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
ReferenceType lhsSType;
if(lhsType instanceof ReferenceType)
lhsSType = (ReferenceType) lhsType;
else if(lhsType instanceof WildcardType) {
UnifyType lhsSTypeRaw = ((WildcardType) lhsType).getWildcardedType();
if(lhsSTypeRaw instanceof ReferenceType)
lhsSType = (ReferenceType) lhsSTypeRaw;
else
return Optional.empty();
}
else
return Optional.empty();
if(lhsSType.getTypeParams().empty())
return Optional.empty();
UnifyType rhsType = pair.getLhsType();
ReferenceType rhsSType;
if(rhsType instanceof ReferenceType)
rhsSType = (ReferenceType) rhsType;
else if(rhsType instanceof WildcardType) {
UnifyType rhsSTypeRaw = ((WildcardType) rhsType).getWildcardedType();
if(rhsSTypeRaw instanceof ReferenceType)
rhsSType = (ReferenceType) rhsSTypeRaw;
else
return Optional.empty();
}
else
return Optional.empty();
if(!rhsSType.getName().equals(lhsSType.getName()))
return Optional.empty();
Assert.assertEquals(lhsSType.getTypeParams().size(), rhsSType.getTypeParams().size());
//if(rhsSType.getTypeParams().size() != lhsSType.getTypeParams().size())
// return Optional.empty();
Set<UnifyPair> result = new HashSet<>();
TypeParams rhsTypeParams = rhsSType.getTypeParams();
TypeParams lhsTypeParams = lhsSType.getTypeParams();
for(int i = 0; i < rhsTypeParams.size(); i++)
result.add(new UnifyPair(lhsTypeParams.get(i), rhsTypeParams.get(i), PairOperator.EQUALSDOT));
return Optional.of(result);
}
@Override
public boolean erase1(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return false;
UnifyType lhsType = pair.getLhsType();
if(!(lhsType instanceof ReferenceType) && !(lhsType instanceof PlaceholderType))
return false;
UnifyType rhsType = pair.getRhsType();
if(!(rhsType instanceof ReferenceType) && !(rhsType instanceof PlaceholderType))
return false;
return finiteClosure.greater(lhsType).contains(rhsType);
}
@Override
public boolean erase2(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return false;
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
return finiteClosure.grArg(lhsType).contains(rhsType);
}
@Override
public boolean erase3(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.EQUALSDOT)
return false;
return pair.getLhsType().equals(pair.getRhsType());
}
@Override
public Optional<UnifyPair> swap(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.EQUALSDOT)
return Optional.empty();
if(pair.getLhsType() instanceof PlaceholderType)
return Optional.empty();
if(!(pair.getRhsType() instanceof PlaceholderType))
return Optional.empty();
return Optional.of(new UnifyPair(pair.getRhsType(), pair.getLhsType(), PairOperator.EQUALSDOT));
}
@Override
public Optional<UnifyPair> adapt(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType typeD = pair.getLhsType();
if(!(typeD instanceof ReferenceType))
return Optional.empty();
UnifyType typeDs = pair.getRhsType();
if(!(typeDs instanceof ReferenceType))
return Optional.empty();
/*if(typeD.getTypeParams().size() == 0 || typeDs.getTypeParams().size() == 0)
return Optional.empty();*/
if(typeD.getName().equals(typeDs.getName()))
return Optional.empty();
Optional<UnifyType> opt = finiteClosure.getLeftHandedType(typeD.getName());
if(!opt.isPresent())
return Optional.empty();
// The generic Version of Type D (D<a1, a2, a3, ... >)
UnifyType typeDgen = opt.get();
// Actually greater+ because the types are ensured to have different names
Set<UnifyType> greater = finiteClosure.getAncestors(typeDgen);
opt = greater.stream().filter(x -> x.getName().equals(typeDs.getName())).findAny();
if(!opt.isPresent())
return Optional.empty();
UnifyType newLhs = opt.get();
TypeParams typeDParams = typeD.getTypeParams();
TypeParams typeDgenParams = typeDgen.getTypeParams();
Unifier unif = Unifier.Identity();
for(int i = 0; i < typeDParams.size(); i++)
unif.Add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
return Optional.of(new UnifyPair(unif.apply(newLhs), typeDs, PairOperator.SMALLERDOT));
}
@Override
public Optional<UnifyPair> adaptExt(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType typeD = pair.getLhsType();
if(!(typeD instanceof ReferenceType) && !(typeD instanceof ExtendsType))
return Optional.empty();
UnifyType typeExtDs = pair.getRhsType();
if(!(typeExtDs instanceof ExtendsType))
return Optional.empty();
if(typeD.getTypeParams().size() == 0 || typeExtDs.getTypeParams().size() == 0)
return Optional.empty();
UnifyType typeDgen;
if(typeD instanceof ReferenceType)
typeDgen = finiteClosure.getLeftHandedType(typeD.getName()).orElse(null);
else {
Optional<UnifyType> opt = finiteClosure.getLeftHandedType(((ExtendsType) typeD).getExtendedType().getName());
typeDgen = opt.isPresent() ? new ExtendsType(opt.get()) : null;
}
if(typeDgen == null)
return Optional.empty();
Set<UnifyType> grArg = finiteClosure.grArg(typeDgen);
Optional<UnifyType> opt = grArg.stream().filter(x -> x.getName().equals(typeExtDs.getName())).findAny();
if(!opt.isPresent())
return Optional.empty();
UnifyType newLhs = ((ExtendsType) opt.get()).getExtendedType();
TypeParams typeDParams = typeD.getTypeParams();
TypeParams typeDgenParams = typeDgen.getTypeParams();
Unifier unif = new Unifier((PlaceholderType) typeDgenParams.get(0), typeDParams.get(0));
for(int i = 1; i < typeDParams.size(); i++)
unif.Add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
return Optional.of(new UnifyPair(unif.apply(newLhs), typeExtDs, PairOperator.SMALLERDOTWC));
}
@Override
public Optional<UnifyPair> adaptSup(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType typeDs = pair.getLhsType();
if(!(typeDs instanceof ReferenceType) && !(typeDs instanceof SuperType))
return Optional.empty();
UnifyType typeSupD = pair.getRhsType();
if(!(typeSupD instanceof SuperType))
return Optional.empty();
if(typeDs.getTypeParams().size() == 0 || typeSupD.getTypeParams().size() == 0)
return Optional.empty();
Optional<UnifyType> opt = finiteClosure.getLeftHandedType(((SuperType) typeSupD).getSuperedType().getName());
if(!opt.isPresent())
return Optional.empty();
UnifyType typeDgen = opt.get();
UnifyType typeSupDgen = new SuperType(typeDgen);
// Use of smArg instead of grArg because
// a in grArg(b) => b in smArg(a)
Set<UnifyType> smArg = finiteClosure.smArg(typeSupDgen);
opt = smArg.stream().filter(x -> x.getName().equals(typeDs.getName())).findAny();
if(!opt.isPresent())
return Optional.empty();
// New RHS
UnifyType newRhs = null;
if(typeDs instanceof ReferenceType)
newRhs = new ExtendsType(typeDs);
else
newRhs = new ExtendsType(((SuperType) typeDs).getSuperedType());
// New LHS
UnifyType newLhs = opt.get();
TypeParams typeDParams = typeSupD.getTypeParams();
TypeParams typeSupDsgenParams = typeSupDgen.getTypeParams();
Unifier unif = new Unifier((PlaceholderType) typeSupDsgenParams.get(0), typeDParams.get(0));
for(int i = 1; i < typeDParams.size(); i++)
unif.Add((PlaceholderType) typeSupDsgenParams.get(i), typeDParams.get(i));
return Optional.of(new UnifyPair(unif.apply(newLhs), newRhs, PairOperator.SMALLERDOTWC));
}
/**
* Finds the permutation pi of the type arguments of two types based on the finite closure
* @param C The type which arguments are permuted
* @param D The other type
* @return An array containing the values of pi for every type argument of C or an empty array if the search failed.
*/
private int[] pi(TypeParams cArgs, TypeParams dArgs) {
Assert.assertEquals(cArgs.size(), dArgs.size());
int[] permutation = new int[dArgs.size()];
boolean succ = true;
for (int dArgIdx = 0; dArgIdx < dArgs.size() && succ; dArgIdx++) {
UnifyType dArg = dArgs.get(dArgIdx);
succ = false;
for (int pi = 0; pi < cArgs.size(); pi++)
if (cArgs.get(pi).getName().equals(dArg.getName())) {
permutation[dArgIdx] = pi;
succ = true;
break;
}
}
return succ ? permutation : new int[0];
}
@Override
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs) {
HashMap<UnifyType, Integer> typeMap = new HashMap<>();
Stack<UnifyType> occuringTypes = new Stack<>();
for(UnifyPair pair : pairs) {
occuringTypes.push(pair.getLhsType());
occuringTypes.push(pair.getRhsType());
}
while(!occuringTypes.isEmpty()) {
UnifyType t1 = occuringTypes.pop();
if(!typeMap.containsKey(t1))
typeMap.put(t1, 0);
typeMap.put(t1, typeMap.get(t1)+1);
if(t1 instanceof ExtendsType)
occuringTypes.push(((ExtendsType) t1).getExtendedType());
if(t1 instanceof SuperType)
occuringTypes.push(((SuperType) t1).getSuperedType());
else
t1.getTypeParams().forEach(x -> occuringTypes.push(x));
}
Queue<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
ArrayList<UnifyPair> result = new ArrayList<UnifyPair>();
boolean applied = false;
while(!result1.isEmpty()) {
UnifyPair pair = result1.poll();
PlaceholderType lhsType = null;
UnifyType rhsType;
if(pair.getPairOp() == PairOperator.EQUALSDOT
&& pair.getLhsType() instanceof PlaceholderType)
lhsType = (PlaceholderType) pair.getLhsType();
if(lhsType != null
&& !((rhsType = pair.getRhsType()) instanceof PlaceholderType)
&& typeMap.get(lhsType) > 1 // The type occurs in more pairs in the set than just the recent pair.
&& !rhsType.getTypeParams().occurs(lhsType)) {
Unifier uni = new Unifier(lhsType, rhsType);
result = result.stream().map(uni::apply).collect(Collectors.toCollection(ArrayList::new));
result1 = result1.stream().map(uni::apply).collect(Collectors.toCollection(LinkedList::new));
applied = true;
}
result.add(pair);
}
return applied ? Optional.of(new HashSet<>(result)) : Optional.empty();
}
@Override
public Optional<UnifyPair> reduceWildcardLow(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof ExtendsType) || !(rhsType instanceof ExtendsType))
return Optional.empty();
return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(), ((ExtendsType) rhsType).getExtendedType(), PairOperator.SMALLERDOT));
}
@Override
public Optional<UnifyPair> reduceWildcardLowRight(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof ReferenceType) || !(rhsType instanceof ExtendsType))
return Optional.empty();
return Optional.of(new UnifyPair(lhsType, ((ExtendsType) rhsType).getExtendedType(), PairOperator.SMALLERDOT));
}
@Override
public Optional<UnifyPair> reduceWildcardUp(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof SuperType) || !(rhsType instanceof SuperType))
return Optional.empty();
return Optional.of(new UnifyPair(((SuperType) rhsType).getSuperedType(), ((SuperType) lhsType).getSuperedType(), PairOperator.SMALLERDOT));
}
@Override
public Optional<UnifyPair> reduceWildcardUpRight(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof ReferenceType) || !(rhsType instanceof SuperType))
return Optional.empty();
return Optional.of(new UnifyPair(((SuperType) rhsType).getSuperedType(), lhsType, PairOperator.SMALLERDOTWC));
}
@Override
public Optional<UnifyPair> reduceWildcardLowUp(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof ExtendsType) || !(rhsType instanceof SuperType))
return Optional.empty();
return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(), ((SuperType) rhsType).getSuperedType(), PairOperator.EQUALSDOT));
}
@Override
public Optional<UnifyPair> reduceWildcardUpLow(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof SuperType) || !(rhsType instanceof ExtendsType))
return Optional.empty();
return Optional.of(new UnifyPair(((SuperType) lhsType).getSuperedType(), ((ExtendsType) rhsType).getExtendedType(), PairOperator.EQUALSDOT));
}
@Override
public Optional<UnifyPair> reduceWildcardLeft(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
return Optional.empty();
UnifyType rhsType = pair.getRhsType();
if(!(rhsType instanceof ReferenceType))
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
if(lhsType instanceof WildcardType)
return Optional.of(new UnifyPair(((WildcardType) lhsType).getWildcardedType(), rhsType, PairOperator.EQUALSDOT));
return Optional.empty();
}
@Override
public Optional<Set<UnifyPair>> reduceFunN(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof FunNType) || !(rhsType instanceof FunNType))
return Optional.empty();
FunNType funNLhsType = (FunNType) lhsType;
FunNType funNRhsType = (FunNType) rhsType;
if(funNLhsType.getN() != funNRhsType.getN())
return Optional.empty();
Set<UnifyPair> result = new HashSet<UnifyPair>();
result.add(new UnifyPair(funNLhsType.getTypeParams().get(0), funNRhsType.getTypeParams().get(0), PairOperator.SMALLERDOT));
for(int i = 1; i < funNLhsType.getTypeParams().size(); i++)
result.add(new UnifyPair(funNRhsType.getTypeParams().get(i), funNLhsType.getTypeParams().get(i), PairOperator.SMALLERDOT));
return Optional.of(result);
}
@Override
public Optional<Set<UnifyPair>> greaterFunN(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof FunNType) || !(rhsType instanceof PlaceholderType))
return Optional.empty();
FunNType funNLhsType = (FunNType) lhsType;
Set<UnifyPair> result = new HashSet<UnifyPair>();
UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length; i++)
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
result.add(new UnifyPair(funNLhsType.getTypeParams().get(0), freshPlaceholders[0], PairOperator.SMALLERDOT));
for(int i = 1; i < funNLhsType.getTypeParams().size(); i++)
result.add(new UnifyPair(freshPlaceholders[i], funNLhsType.getTypeParams().get(i), PairOperator.SMALLERDOT));
result.add(new UnifyPair(rhsType, funNLhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT));
return Optional.of(result);
}
@Override
public Optional<Set<UnifyPair>> smallerFunN(UnifyPair pair) {
if(pair.getPairOp() != PairOperator.SMALLERDOT)
return Optional.empty();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof PlaceholderType) || !(rhsType instanceof FunNType))
return Optional.empty();
FunNType funNRhsType = (FunNType) rhsType;
Set<UnifyPair> result = new HashSet<UnifyPair>();
UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length; i++)
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
result.add(new UnifyPair(freshPlaceholders[0], funNRhsType.getTypeParams().get(0), PairOperator.SMALLERDOT));
for(int i = 1; i < funNRhsType.getTypeParams().size(); i++)
result.add(new UnifyPair(funNRhsType.getTypeParams().get(i), freshPlaceholders[i], PairOperator.SMALLERDOT));
result.add(new UnifyPair(lhsType, funNRhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT));
return Optional.of(result);
}
}

View File

@ -1,11 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.typeinference.Pair;
public interface Unifier {
public Menge<Menge<Pair>> apply (Menge<Pair> E);
}

View File

@ -0,0 +1,11 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public interface Unifikationsalgorithmus {
public Set<Set<UnifyPair>> apply (Set<UnifyPair> E);
}

4215
src/de/dhbwstuttgart/typeinference/unify/Unify.java Executable file → Normal file

File diff suppressed because it is too large Load Diff

View File

@ -4,11 +4,16 @@ import java.util.Optional;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.SimpleType;
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
/**
*
* @author Florian Steurer
*/
public interface IFiniteClosure {
/**
@ -37,18 +42,23 @@ public interface IFiniteClosure {
*/
public Set<UnifyType> smArg(UnifyType type);
public Set<UnifyType> grArg(SimpleType type);
public Set<UnifyType> smArg(SimpleType type);
public Set<UnifyType> grArg(ReferenceType type);
public Set<UnifyType> smArg(ReferenceType type);
public Set<UnifyType> grArg(ExtendsType type);
public Set<UnifyType> smArg(ExtendsType type);
public Set<UnifyType> grArg(SuperType type);
public Set<UnifyType> smArg(SuperType type);
public Set<UnifyType> grArg(PlaceholderType type);
public Set<UnifyType> smArg(PlaceholderType type);
public Set<UnifyType> grArg(FunNType type);
public Set<UnifyType> smArg(FunNType type);
public Optional<UnifyType> getGenericType(String typeName);
public Set<UnifyType> getAllTypes(String typeName);
public Optional<UnifyType> getLeftHandedType(String typeName);
public Set<UnifyType> getAncestors(UnifyType t);
public Set<UnifyType> getChildren(UnifyType t);
public Set<UnifyType> getAllTypesByName(String typeName);
}

View File

@ -3,28 +3,69 @@ package de.dhbwstuttgart.typeinference.unify.interfaces;
import java.util.Optional;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.model.MPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
/**
* Contains the inference rules that are applied to the set Eq.
* @author Florian Steurer
*/
public interface IRuleSet {
public Optional<MPair> reduceUp(MPair pair);
public Optional<MPair> reduceLow(MPair pair);
public Optional<MPair> reduceUpLow(MPair pair);
public Optional<Set<MPair>> reduceExt(MPair pair);
public Optional<Set<MPair>> reduceSup(MPair pair);
public Optional<Set<MPair>> reduceEq(MPair pair);
public Optional<Set<MPair>> reduce1(MPair pair);
public Optional<Set<MPair>> reduce2(MPair pair);
public Optional<UnifyPair> reduceUp(UnifyPair pair);
public Optional<UnifyPair> reduceLow(UnifyPair pair);
public Optional<UnifyPair> reduceUpLow(UnifyPair pair);
public Optional<Set<UnifyPair>> reduceExt(UnifyPair pair);
public Optional<Set<UnifyPair>> reduceSup(UnifyPair pair);
public Optional<Set<UnifyPair>> reduceEq(UnifyPair pair);
public Optional<Set<UnifyPair>> reduce1(UnifyPair pair);
public Optional<Set<UnifyPair>> reduce2(UnifyPair pair);
public boolean erase1(MPair pair);
public boolean erase2(MPair pair);
public boolean erase3(MPair pair);
/*
* Missing Reduce-Rules for Wildcards
*/
public Optional<UnifyPair> reduceWildcardLow(UnifyPair pair);
public Optional<UnifyPair> reduceWildcardLowRight(UnifyPair pair);
public Optional<UnifyPair> reduceWildcardUp(UnifyPair pair);
public Optional<UnifyPair> reduceWildcardUpRight(UnifyPair pair);
public Optional<UnifyPair> reduceWildcardLowUp(UnifyPair pair);
public Optional<UnifyPair> reduceWildcardUpLow(UnifyPair pair);
public Optional<UnifyPair> reduceWildcardLeft(UnifyPair pair);
public Optional<MPair> swap(MPair pair);
/*
* FunN Rules
*/
public Optional<Set<UnifyPair>> reduceFunN(UnifyPair pair);
public Optional<Set<UnifyPair>> greaterFunN(UnifyPair pair);
public Optional<Set<UnifyPair>> smallerFunN(UnifyPair pair);
public Optional<MPair> adapt(MPair pair);
public Optional<MPair> adaptExt(MPair pair);
public Optional<MPair> adaptSup(MPair pair);
/**
* Checks whether the erase1-Rule applies to the pair.
* @return True if the pair is erasable, false otherwise.
*/
public boolean erase1(UnifyPair pair);
public Optional<Set<MPair>> subst(Set<MPair> pair);
/**
* Checks whether the erase2-Rule applies to the pair.
* @return True if the pair is erasable, false otherwise.
*/
public boolean erase2(UnifyPair pair);
/**
* Checks whether the erase3-Rule applies to the pair.
* @return True if the pair is erasable, false otherwise.
*/
public boolean erase3(UnifyPair pair);
public Optional<UnifyPair> swap(UnifyPair pair);
public Optional<UnifyPair> adapt(UnifyPair pair);
public Optional<UnifyPair> adaptExt(UnifyPair pair);
public Optional<UnifyPair> adaptSup(UnifyPair pair);
/**
* Applies the subst-Rule to a set of pairs (usually Eq').
* @param pairs The set of pairs where the subst rule should apply.
* @return An optional of the modified set, if there were any substitutions. An empty optional if there were no substitutions.
*/
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs);
}

View File

@ -3,6 +3,14 @@ package de.dhbwstuttgart.typeinference.unify.interfaces;
import java.util.List;
import java.util.Set;
/**
* Contains operations on sets.
* @author Florian Steurer
*/
public interface ISetOperations {
/**
* Calculates the cartesian product of the sets.
* @return The cartesian product
*/
<B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets);
}

View File

@ -14,8 +14,20 @@ import de.dhbwstuttgart.typeinference.unify.model.Unifier;
*/
public interface IUnify {
/**
* Finds the most general unifier sigma of the set {t1,...,tn} so that
* sigma(t1) = sigma(t2) = ... = sigma(tn).
* @param terms The set of terms to be unified
* @return An optional of the most general unifier if it exists or an empty optional if there is no unifier.
*/
public Optional<Unifier> unify(Set<UnifyType> terms);
/**
* Finds the most general unifier sigma of the set {t1,...,tn} so that
* sigma(t1) = sigma(t2) = ... = sigma(tn).
* @param terms The set of terms to be unified
* @return An optional of the most general unifier if it exists or an empty optional if there is no unifier.
*/
default public Optional<Unifier> unify(UnifyType... terms) {
return unify(Arrays.stream(terms).collect(Collectors.toSet()));
}

View File

@ -7,38 +7,28 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
/**
* An extends wildcard type "? extends T".
*/
public final class ExtendsType extends UnifyType {
/**
* The extended type
*/
private UnifyType extendedType;
public final class ExtendsType extends WildcardType {
/**
* Creates a new extends wildcard type.
* @param extendedType The extended type e.g. Integer in "? extends Integer"
*/
public ExtendsType(UnifyType extendedType) {
super("? extends " + extendedType.getName(), extendedType.getTypeParams());
this.extendedType = extendedType;
super("? extends " + extendedType.getName(), extendedType);
}
/**
* Gets the type extended by this wildcard e.g. "Integer" for "? extends Integer"
* @return The extended type.
*/
public UnifyType getExtendedType() {
return extendedType;
return wildcardedType;
}
@Override
public TypeParams getTypeParams() {
return extendedType.getTypeParams();
return wildcardedType.getTypeParams();
}
@Override
public UnifyType setTypeParams(TypeParams newTp) {
return new ExtendsType(extendedType.setTypeParams(newTp));
return new ExtendsType(wildcardedType.setTypeParams(newTp));
}
@Override
@ -53,12 +43,16 @@ public final class ExtendsType extends UnifyType {
@Override
UnifyType apply(Unifier unif) {
return new ExtendsType(extendedType.apply(unif));
return new ExtendsType(wildcardedType.apply(unif));
}
@Override
public int hashCode() {
return extendedType.hashCode() + 17;
/*
* It is important that the prime that is added is different to the prime added in hashCode() of SuperType.
* Otherwise ? extends T and ? super T have the same hashCode() for every Type T.
*/
return wildcardedType.hashCode() + 229;
}
@Override
@ -67,11 +61,12 @@ public final class ExtendsType extends UnifyType {
return false;
ExtendsType other = (ExtendsType) obj;
return other.getExtendedType().equals(extendedType);
return other.getWildcardedType().equals(wildcardedType);
}
@Override
public String toString() {
return "? extends " + extendedType;
return "? extends " + wildcardedType;
}
}

View File

@ -1,25 +1,33 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
import de.dhbwstuttgart.typeinference.unify.MartelliMontanariUnify;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
public class FiniteClosure implements IFiniteClosure {
private HashMap<UnifyType, Node<UnifyType>> inheritanceGraph;
private HashMap<String, HashSet<Node<UnifyType>>> strInheritanceGraph;
private Set<UnifyPair> pairs;
//private Set<UnifyType> basicTypes;
//TODO im konstruktor mitgeben um typenabzuhandeln die keine extends beziehung haben. (Damit die FC diese Typen auch kennt)
//(ALternative: immer die extends zu object beziehung einfügen)
public FiniteClosure(Set<MPair> pairs) {
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
public FiniteClosure(Set<UnifyPair> pairs) {
this.pairs = new HashSet<>(pairs);
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
// Build the transitive closure of the inheritance tree
for(MPair pair : pairs) {
for(UnifyPair pair : pairs) {
if(pair.getPairOp() != PairOperator.SMALLER)
continue;
@ -50,47 +58,216 @@ public class FiniteClosure implements IFiniteClosure {
strInheritanceGraph.get(key.getName()).add(inheritanceGraph.get(key));
}
}
/**
* Returns all types of the finite closure that are subtypes of the argument.
* @return The set of subtypes of the argument.
*/
*/
@Override
public Set<UnifyType> smaller(UnifyType type) {
if(!inheritanceGraph.containsKey(type))
return new HashSet<>();
if(type instanceof FunNType)
return computeSmallerFunN((FunNType) type);
Set<UnifyType> result = inheritanceGraph.get(type).getContentOfDescendants();
result.add(type);
return computeSmaller(type);
}
private Set<UnifyType> computeSmaller(UnifyType type) {
if(inheritanceGraph.containsKey(type)) {
Set<UnifyType> result = new HashSet<>();
result.add(type);
result.addAll(inheritanceGraph.get(type).getContentOfDescendants());
return result;
}
IUnify unify = new MartelliMontanariUnify();
Set<UnifyType> result1 = new HashSet<>();
// if T = T' then T <=* T'
result1.add(type);
{ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
for (UnifyType param : type.getTypeParams())
paramCandidates.add(smArg(param));
Set<TypeParams> permResult = permuteParams(paramCandidates);
for (TypeParams newParams : permResult)
result1.add(type.setTypeParams(newParams));}
Set<UnifyType> result2 = new HashSet<>();
if (strInheritanceGraph.containsKey(type.getName())) {
HashSet<UnifyType> candidates = new HashSet<>();
strInheritanceGraph.get(type.getName()).forEach(x -> candidates.add(x.getContent()));
for(UnifyType typePrime : result1) {
for (UnifyType theta2 : candidates) {
Optional<Unifier> sigma2Opt = unify.unify(typePrime, theta2);
if (!sigma2Opt.isPresent())
continue;
Unifier sigma2 = sigma2Opt.get();
sigma2.swapPlaceholderSubstitutions(typePrime.getTypeParams().toArray());
if(type.equals(theta2))
continue;
Set<UnifyType> theta1s = smaller(theta2);
for (UnifyType theta1 : theta1s) {
// Because only the most general type is calculated, sigma1 = sigma2
UnifyType sigma1Theta1 = sigma2.apply(theta1);
result2.add(sigma1Theta1);
}
}
}
}
else
result2 = result1;
Set<UnifyType> result3 = new HashSet<>();
for(UnifyType t : result2) {
ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
for (UnifyType param : t.getTypeParams())
paramCandidates.add(smArg(param));
Set<TypeParams> permResult = permuteParams(paramCandidates);
for (TypeParams newParams : permResult) {
UnifyType tPrime = t.setTypeParams(newParams);
if(tPrime.equals(t))
result3.add(t);
else
result3.addAll(smaller(tPrime));
}
}
return result3;
}
private Set<UnifyType> computeSmallerFunN(FunNType type) {
Set<UnifyType> result = new HashSet<>();
// if T = T' then T <=* T'
result.add(type);
ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
paramCandidates.add(smaller(type.getTypeParams().get(0)));
for (int i = 1; i < type.getTypeParams().size(); i++)
paramCandidates.add(greater(type.getTypeParams().get(i)));
Set<TypeParams> permResult = permuteParams(paramCandidates);
for (TypeParams newParams : permResult)
result.add(type.setTypeParams(newParams));
return result;
}
/**
* Returns all types of the finite closure that are supertypes of the argument.
* @return The set of supertypes of the argument.
*/
*/
@Override
public Set<UnifyType> greater(UnifyType type) {
if(!inheritanceGraph.containsKey(type))
return new HashSet<>();
if(type instanceof FunNType)
return computeGreaterFunN((FunNType) type);
return computeGreater(type);
}
protected Set<UnifyType> computeGreater(UnifyType type) {
IUnify unify = new MartelliMontanariUnify();
Set<UnifyType> result1 = new HashSet<>();
if(inheritanceGraph.containsKey(type))
result1.addAll(inheritanceGraph.get(type).getContentOfPredecessors());
// if T = T' then T <=* T'
result1.add(type);
{ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
for (UnifyType param : type.getTypeParams())
paramCandidates.add(grArg(param));
Set<TypeParams> permResult = new HashSet<>();
permuteParams(paramCandidates, 0, permResult, new UnifyType[paramCandidates.size()]);
for (TypeParams newParams : permResult)
result1.add(type.setTypeParams(newParams));}
Set<UnifyType> result2 = new HashSet<>();
if (strInheritanceGraph.containsKey(type.getName()) && !inheritanceGraph.containsKey(type)) {
HashSet<UnifyType> candidates = new HashSet<>();
strInheritanceGraph.get(type.getName()).forEach(x -> candidates.add(x.getContent()));
for(UnifyType typePrime : result1) {
for (UnifyType theta2 : candidates) {
Optional<Unifier> sigma2Opt = unify.unify(typePrime, theta2);
if (!sigma2Opt.isPresent())
continue;
if(type.equals(theta2))
continue;
Unifier sigma2 = sigma2Opt.get();
sigma2.swapPlaceholderSubstitutions(typePrime.getTypeParams().toArray());
Set<UnifyType> theta1s = greater(theta2);
for (UnifyType theta1 : theta1s) {
// Because only the most general type is calculated, sigma1 = sigma2
UnifyType sigma1Theta1 = sigma2.apply(theta1);
result2.add(sigma1Theta1);
}
}
}
}
result2.addAll(result1);
Set<UnifyType> result3 = new HashSet<>();
for(UnifyType t : result2) {
ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
for (UnifyType param : t.getTypeParams())
paramCandidates.add(grArg(param));
Set<TypeParams> permResult = new HashSet<>();
permuteParams(paramCandidates, 0, permResult, new UnifyType[paramCandidates.size()]);
for (TypeParams newParams : permResult) {
UnifyType tPrime = t.setTypeParams(newParams);
if(tPrime.equals(t))
result3.add(t);
else
result3.addAll(greater(tPrime));
}
}
return result3;
}
protected Set<UnifyType> computeGreaterFunN(FunNType type) {
Set<UnifyType> result = new HashSet<>();
// if T = T' then T <=* T'
result.add(type);
ArrayList<Set<UnifyType>> paramCandidates = new ArrayList<>();
paramCandidates.add(greater(type.getTypeParams().get(0)));
for (int i = 1; i < type.getTypeParams().size(); i++)
paramCandidates.add(smaller(type.getTypeParams().get(i)));
Set<TypeParams> permResult = permuteParams(paramCandidates);
for (TypeParams newParams : permResult)
result.add(type.setTypeParams(newParams));
Set<UnifyType> result = inheritanceGraph.get(type).getContentOfPredecessors();
result.add(type);
return result;
}
@Override
public Set<UnifyType> grArg(UnifyType type) {
return type.grArg(this);
}
@Override
public Set<UnifyType> grArg(SimpleType type) {
if(!inheritanceGraph.containsKey(type))
return new HashSet<UnifyType>();
Set<UnifyType> result = new HashSet<UnifyType>();
@Override
public Set<UnifyType> grArg(ReferenceType type) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
smaller(type).forEach(x -> result.add(new SuperType(x)));
@ -99,12 +276,15 @@ public class FiniteClosure implements IFiniteClosure {
return result;
}
@Override
public Set<UnifyType> grArg(FunNType type) {
throw new NotImplementedException();
}
@Override
public Set<UnifyType> grArg(ExtendsType type) {
if(!inheritanceGraph.containsKey(type.getExtendedType()))
return new HashSet<UnifyType>();
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getExtendedType();
@ -113,23 +293,25 @@ public class FiniteClosure implements IFiniteClosure {
return result;
}
@Override
@Override
public Set<UnifyType> grArg(SuperType type) {
if(!inheritanceGraph.containsKey(type.getSuperedType()))
return new HashSet<UnifyType>();
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getSuperedType();
UnifyType t = type.getSuperedType();
smaller(t).forEach(x -> result.add(new SuperType(x)));
return result;
}
@Override
@Override
public Set<UnifyType> grArg(PlaceholderType type) {
return new HashSet<>();
HashSet<UnifyType> result = new HashSet<>();
result.add(type);
//result.add(new SuperType(type));
//result.add(new ExtendsType(type));
return result;
}
@Override
@ -137,24 +319,23 @@ public class FiniteClosure implements IFiniteClosure {
return type.smArg(this);
}
@Override
public Set<UnifyType> smArg(SimpleType type) {
if(!inheritanceGraph.containsKey(type))
return new HashSet<UnifyType>();
Set<UnifyType> result = new HashSet<UnifyType>();
@Override
public Set<UnifyType> smArg(ReferenceType type) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
smaller(type).forEach(x -> result.add(new ExtendsType(x)));
return result;
}
@Override
public Set<UnifyType> smArg(FunNType type) {
throw new NotImplementedException();
}
@Override
public Set<UnifyType> smArg(ExtendsType type) {
if(!inheritanceGraph.containsKey(type.getExtendedType()))
return new HashSet<UnifyType>();
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getExtendedType();
@ -168,12 +349,10 @@ public class FiniteClosure implements IFiniteClosure {
}
@Override
@Override
public Set<UnifyType> smArg(SuperType type) {
if(!inheritanceGraph.containsKey(type.getSuperedType()))
return new HashSet<UnifyType>();
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getSuperedType();
@ -186,31 +365,87 @@ public class FiniteClosure implements IFiniteClosure {
return result;
}
@Override
@Override
public Set<UnifyType> smArg(PlaceholderType type) {
return new HashSet<>();
HashSet<UnifyType> result = new HashSet<>();
result.add(type);
return result;
}
public boolean isGenericType(UnifyType t) {
if(t.getTypeParams().size() == 0)
return true;
if(!strInheritanceGraph.containsKey(t.getName()))
return false;
for(UnifyPair pair : pairs)
if(pair.getLhsType().equals(t))
return true;
return false;
}
@Override
public Set<UnifyType> getAllTypesByName(String typeName) {
if(!strInheritanceGraph.containsKey(typeName))
return new HashSet<>();
return strInheritanceGraph.get(typeName).stream().map(x -> x.getContent()).collect(Collectors.toCollection(HashSet::new));
}
@Override
public Optional<UnifyType> getGenericType(String typeName) {
public Optional<UnifyType> getLeftHandedType(String typeName) {
if(!strInheritanceGraph.containsKey(typeName))
return Optional.empty();
HashSet<Node<UnifyType>> candidates = strInheritanceGraph.get(typeName);
for(Node<UnifyType> node : candidates) {
UnifyType candidate = node.getContent();
if(candidate.getTypeParams().arePlaceholders())
return Optional.of(candidate);
}
for(UnifyPair pair : pairs)
if(pair.getLhsType().getName().equals(typeName))
return Optional.of(pair.getLhsType());
return Optional.empty();
}
@Override
public Set<UnifyType> getAllTypes(String typeName) {
if(!strInheritanceGraph.containsKey(typeName))
public Set<UnifyType> getAncestors(UnifyType t) {
if(!inheritanceGraph.containsKey(t))
return new HashSet<>();
return strInheritanceGraph.get(typeName).stream().map(x -> x.getContent()).collect(Collectors.toCollection(HashSet::new));
Set<UnifyType> result = inheritanceGraph.get(t).getContentOfPredecessors();
result.add(t);
return result;
}
@Override
public Set<UnifyType> getChildren(UnifyType t) {
if(!inheritanceGraph.containsKey(t))
return new HashSet<>();
Set<UnifyType> result = inheritanceGraph.get(t).getContentOfDescendants();
result.add(t);
return result;
}
protected Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
Set<TypeParams> result = new HashSet<>();
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
return result;
}
protected void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
if(candidates.size() == idx) {
result.add(new TypeParams(Arrays.copyOf(current, current.length)));
return;
}
Set<UnifyType> localCandidates = candidates.get(idx);
for(UnifyType t : localCandidates) {
current[idx] = t;
permuteParams(candidates, idx+1, result, current);
}
}
@Override
public String toString(){
return this.inheritanceGraph.toString();
}
}

View File

@ -0,0 +1,68 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
public class FunNType extends UnifyType {
protected FunNType(TypeParams p) {
super("FuN", p);
}
public static FunNType getFunNType(TypeParams tp) {
if(!validateTypeParams(tp))
throw new IllegalArgumentException("Invalid TypeParams for a FunNType: " + tp);
return new FunNType(tp);
}
private static boolean validateTypeParams(TypeParams tp) {
if(tp.size() == 0)
return false;
for(UnifyType t : tp)
if(t instanceof WildcardType)
return false;
return true;
}
@Override
public UnifyType setTypeParams(TypeParams newTp) {
return getFunNType(newTp);
}
public int getN() {
return typeParams.size()-1;
}
@Override
Set<UnifyType> smArg(IFiniteClosure fc) {
return fc.smArg(this);
}
@Override
Set<UnifyType> grArg(IFiniteClosure fc) {
return fc.grArg(this);
}
@Override
UnifyType apply(Unifier unif) {
// TODO Auto-generated method stub
return null;
}
@Override
public int hashCode() {
return 31 + typeParams.hashCode();
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof FunNType))
return false;
FunNType other = (FunNType) obj;
return other.getTypeParams().equals(typeParams);
}
}

View File

@ -1,97 +0,0 @@
package de.dhbwstuttgart.typeinference.unify.model;
public class MPair {
public enum PairOperator {
SMALLER,
SMALLERDOT,
SMALLERDOTWC,
EQUALS,
EQUALSDOT;
@Override
public String toString() {
switch (this) {
case SMALLER:
return "<";
case SMALLERDOT:
return "<.";
case SMALLERDOTWC:
return "<.?";
case EQUALS:
return "=";
default:
return "=.";
}
};
}
private UnifyType lhs;
private UnifyType rhs;
private PairOperator pairOp;
/*public MPair(Type t1, Type t2) {
lhs = t1;
rhs = t2;
pairOp = PairOperator.SMALLER;
}*/
public MPair(UnifyType t1, UnifyType t2, PairOperator op) {
lhs = t1;
rhs = t2;
pairOp = op;
}
public UnifyType getLhsType() {
return lhs;
}
public UnifyType getRhsType() {
return rhs;
}
public PairOperator getPairOp() {
return pairOp;
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof MPair))
return false;
MPair other = (MPair) obj;
return other.getPairOp() == pairOp
&& other.getLhsType().equals(lhs)
&& other.getRhsType().equals(rhs);
}
/**
* Substitutes the occurrences of Type t on the left or right side of the pair with the Type subst.
* @param t Type to be replaced.
* @param subst The type replacing t.
* @return A pair where occurrences of t are replaced by subst.
*/
public MPair substitute(UnifyType t, UnifyType subst) {
UnifyType newlhs = lhs;
if(lhs.equals(t)) newlhs = subst;
UnifyType newrhs = rhs;
if(rhs.equals(t)) newrhs = subst;
if(newlhs == lhs && newrhs == rhs) return this;
return new MPair(newlhs, newrhs, pairOp);
}
@Override
public int hashCode() {
return 17 + 31 * lhs.hashCode() + 31 * rhs.hashCode() + 31 * pairOp.hashCode();
}
@Override
public String toString() {
return "(" + lhs + " " + pairOp + " " + rhs + ")";
}
}

View File

@ -4,7 +4,7 @@ import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
public class Node<T> {
class Node<T> {
private T content;
private HashSet<Node<T>> predecessors = new HashSet<>();

View File

@ -0,0 +1,22 @@
package de.dhbwstuttgart.typeinference.unify.model;
public enum PairOperator {
SMALLER,
SMALLERDOT,
SMALLERDOTWC,
EQUALSDOT;
@Override
public String toString() {
switch (this) {
case SMALLER:
return "<";
case SMALLERDOT:
return "<.";
case SMALLERDOTWC:
return "<.?";
default:
return "=.";
}
};
}

View File

@ -1,13 +1,50 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
public final class PlaceholderType extends UnifyType{
protected static final HashSet<String> EXISTING_PLACEHOLDERS = new HashSet<String>();
protected static String nextName = "gen_";
protected static Random rnd = new Random(43558747548978L);
private final boolean IsGenerated;
public PlaceholderType(String name) {
super(name);
super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name);
IsGenerated = false;
}
protected PlaceholderType(String name, boolean isGenerated) {
super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name);
IsGenerated = isGenerated;
}
public static PlaceholderType freshPlaceholder() {
String name = nextName + randomChar();
while(EXISTING_PLACEHOLDERS.contains(name));
name += randomChar();
return new PlaceholderType(name, true);
}
/**
* Returns random char between 'a' and 'z'
*/
private static char randomChar() {
return (char) (rnd.nextInt(22) + 97);
}
public boolean isGenerated() {
return IsGenerated;
}
@Override

View File

@ -4,12 +4,12 @@ import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
public final class SimpleType extends UnifyType {
public SimpleType(String name, UnifyType... typeParams) {
public final class ReferenceType extends UnifyType {
public ReferenceType(String name, UnifyType... typeParams) {
super(name, new TypeParams(typeParams));
}
private SimpleType(String name, TypeParams params) {
public ReferenceType(String name, TypeParams params) {
super(name, params);
}
@ -22,28 +22,28 @@ public final class SimpleType extends UnifyType {
Set<UnifyType> grArg(IFiniteClosure fc) {
return fc.grArg(this);
}
@Override
UnifyType apply(Unifier unif) {
return new SimpleType(typeName, typeParams.apply(unif));
return new ReferenceType(typeName, typeParams.apply(unif));
}
@Override
public UnifyType setTypeParams(TypeParams newTp) {
return new SimpleType(new String(typeName), newTp);
return new ReferenceType(new String(typeName), newTp);
}
@Override
public int hashCode() {
return typeName.hashCode();
return 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof SimpleType))
if(!(obj instanceof ReferenceType))
return false;
SimpleType other = (SimpleType) obj;
ReferenceType other = (ReferenceType) obj;
if(!other.getName().equals(typeName))
return false;

View File

@ -4,32 +4,28 @@ import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
public final class SuperType extends UnifyType {
private UnifyType superedType;
public final class SuperType extends WildcardType {
public SuperType(UnifyType superedType) {
super("? super " + superedType.getName(), superedType.getTypeParams());
this.superedType = superedType;
super("? super " + superedType.getName(), superedType);
}
public UnifyType getSuperedType() {
return superedType;
return wildcardedType;
}
@Override
public String toString() {
return "? super " + superedType;
return "? super " + wildcardedType;
}
@Override
public TypeParams getTypeParams() {
return superedType.getTypeParams();
return wildcardedType.getTypeParams();
}
@Override
public UnifyType setTypeParams(TypeParams newTp) {
return new SuperType(superedType.setTypeParams(newTp));
return new SuperType(wildcardedType.setTypeParams(newTp));
}
@Override
@ -44,12 +40,16 @@ public final class SuperType extends UnifyType {
@Override
UnifyType apply(Unifier unif) {
return new SuperType(superedType.apply(unif));
return new SuperType(wildcardedType.apply(unif));
}
@Override
public int hashCode() {
return superedType.hashCode() + 17;
/*
* It is important that the prime that is added is different to the prime added in hashCode() of ExtendsType.
* Otherwise ? extends T and ? super T have the same hashCode() for every Type T.
*/
return wildcardedType.hashCode() + 3917;
}
@Override
@ -58,6 +58,6 @@ public final class SuperType extends UnifyType {
return false;
SuperType other = (SuperType) obj;
return other.getSuperedType().equals(superedType);
return other.getSuperedType().equals(wildcardedType);
}
}

View File

@ -2,11 +2,20 @@ package de.dhbwstuttgart.typeinference.unify.model;
import java.util.Arrays;
import java.util.Iterator;
import de.dhbwstuttgart.typeinference.Menge;
public final class TypeParams implements Iterable<UnifyType>{
private final UnifyType[] typeParams;
public TypeParams(UnifyType... types) {
public TypeParams(Menge<UnifyType> types){
typeParams = new UnifyType[types.size()];
for(int i=0;i<types.size();i++){
typeParams[i] = types.get(i);
}
}
public TypeParams(UnifyType... types) {
typeParams = types;
}
@ -68,6 +77,9 @@ public final class TypeParams implements Iterable<UnifyType>{
return new TypeParams(newparams);
}
public UnifyType[] toArray() {
return Arrays.copyOf(typeParams, typeParams.length);
}
@Override
public Iterator<UnifyType> iterator() {

View File

@ -5,23 +5,27 @@ import java.util.Map.Entry;
import java.util.Set;
import java.util.function.Function;
public class Unifier implements Function<UnifyType, UnifyType> {
public class Unifier implements Function<UnifyType, UnifyType> /*, Set<MPair>*/ { // TODO set implementieren
private HashMap<PlaceholderType, UnifyType> substitutions = new HashMap<>();
public static Unifier IDENTITY = new Unifier();
public Unifier(PlaceholderType source, UnifyType target) {
public Unifier(PlaceholderType source, UnifyType target) {
substitutions.put(source, target);
}
/**
* Identity function as an "unifier".
*/
public Unifier() {
protected Unifier() {
}
public static Unifier Identity() {
return new Unifier();
}
public void Add(PlaceholderType source, UnifyType target) {
public void Add(PlaceholderType source, UnifyType target) {
Unifier tempU = new Unifier(source, target);
for(PlaceholderType pt : substitutions.keySet())
substitutions.put(pt, substitutions.get(pt).apply(tempU));
@ -33,21 +37,33 @@ public class Unifier implements Function<UnifyType, UnifyType> {
return t.apply(this);
}
public MPair apply(MPair p) {
return new MPair(this.apply(p.getLhsType()), this.apply(p.getRhsType()), p.getPairOp());
public UnifyPair apply(UnifyPair p) {
return new UnifyPair(this.apply(p.getLhsType()), this.apply(p.getRhsType()), p.getPairOp());
}
public boolean hasSubstitute(PlaceholderType t) {
return substitutions.containsKey(t);
}
public UnifyType getSubstitute(UnifyType t) {
public UnifyType getSubstitute(PlaceholderType t) {
return substitutions.get(t);
}
public Set<Entry<PlaceholderType, UnifyType>> getSubstitutions() {
return substitutions.entrySet();
}
public void swapPlaceholderSubstitutions(UnifyType... targetParams) {
for(UnifyType tph : targetParams) {
if(!(tph instanceof PlaceholderType))
continue;
if(substitutions.containsKey(tph) && substitutions.get(tph) instanceof PlaceholderType) {
PlaceholderType newLhs = (PlaceholderType) substitutions.get(tph);
substitutions.remove(tph);
substitutions.put(newLhs, tph);
}
}
}
@Override
public String toString() {

Some files were not shown because too many files have changed in this diff Show More