forked from JavaTX/JavaCompilerCore
Umstellung auf MPair beginnen
This commit is contained in:
parent
f788b74f26
commit
1e9d0517f2
@ -47,7 +47,6 @@ import de.dhbwstuttgart.typeinference.exceptions.DebugException;
|
||||
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
|
||||
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
|
||||
import de.dhbwstuttgart.typeinference.typedeployment.TypeInsertPoint;
|
||||
import de.dhbwstuttgart.typeinference.unify.FC_TTO;
|
||||
import de.dhbwstuttgart.typeinference.unify.Unify;
|
||||
|
||||
import org.apache.commons.bcel6.generic.*;
|
||||
@ -625,7 +624,7 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.TRProg.23110.definition
|
||||
public ConstraintsSet typeReconstruction(FC_TTO supportData, TypeAssumptions globalAssumptions)
|
||||
public ConstraintsSet typeReconstruction(TypeAssumptions globalAssumptions)
|
||||
// ino.end
|
||||
// ino.method.TRProg.23110.body
|
||||
{
|
||||
@ -637,8 +636,7 @@ public class Class extends GTVDeclarationContext implements AClassOrInterface, I
|
||||
//////////////////////////////
|
||||
inferencelog.info("Rufe TRStart()...", Section.TYPEINFERENCE);
|
||||
|
||||
typinferenzLog.debug("Erstellte FiniteClosure: "+supportData, Section.TYPEINFERENCE);
|
||||
//////////////////////////////
|
||||
//////////////////////////////
|
||||
// Ab hier ...
|
||||
// @author A10023 - Andreas Stadelmeier:
|
||||
//////////////////////////////
|
||||
|
@ -9,6 +9,8 @@ import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.Hashtable;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
@ -23,6 +25,7 @@ import de.dhbwstuttgart.myexception.JVMCodeException;
|
||||
import de.dhbwstuttgart.myexception.SCClassException;
|
||||
import de.dhbwstuttgart.myexception.SCException;
|
||||
import de.dhbwstuttgart.parser.JavaClassName;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.misc.DeclId;
|
||||
import de.dhbwstuttgart.syntaxtree.misc.UsedId;
|
||||
import de.dhbwstuttgart.syntaxtree.modifier.Modifiers;
|
||||
@ -37,6 +40,7 @@ import de.dhbwstuttgart.typeinference.ByteCodeResult;
|
||||
import de.dhbwstuttgart.typeinference.ConstraintsSet;
|
||||
import de.dhbwstuttgart.typeinference.FunNInterface;
|
||||
import de.dhbwstuttgart.typeinference.FunNMethod;
|
||||
import de.dhbwstuttgart.typeinference.KomplexeMenge;
|
||||
import de.dhbwstuttgart.typeinference.Pair;
|
||||
import de.dhbwstuttgart.typeinference.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
|
||||
@ -48,18 +52,14 @@ import de.dhbwstuttgart.typeinference.assumptions.ParameterAssumption;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
|
||||
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
|
||||
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
|
||||
import de.dhbwstuttgart.typeinference.unify.FC_TTO;
|
||||
import de.dhbwstuttgart.typeinference.unify.Unifier;
|
||||
import de.dhbwstuttgart.typeinference.unify.Unify;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.MPair;
|
||||
|
||||
|
||||
|
||||
|
||||
// ino.class.SourceFile.21355.declaration
|
||||
public class SourceFile
|
||||
extends SyntaxTreeNode
|
||||
// ino.end
|
||||
// ino.class.SourceFile.21355.body
|
||||
{
|
||||
// ino.attribute.LOAD_BASIC_ASSUMPTIONS_FROM_JRE.21358.decldescription type=javadoc
|
||||
/**
|
||||
@ -170,17 +170,13 @@ public class SourceFile
|
||||
this.KlassenVektor = classDefinitions;
|
||||
}
|
||||
|
||||
// ino.attribute.imports.21382.decldescription type=javadoc
|
||||
/**
|
||||
* HOTI 4.5.06
|
||||
* Beinhaltet alle Imports des aktuell geparsten Files
|
||||
* in Form einer UsedId
|
||||
*/
|
||||
// ino.end
|
||||
// ino.attribute.imports.21382.declaration
|
||||
private ImportDeclarations imports=new ImportDeclarations();
|
||||
// ino.end
|
||||
// ino.attribute.baseTypeTranslationTable.21385.decldescription type=javadoc
|
||||
|
||||
/**
|
||||
* Table zum Ãbersetzen der nicht implementierten Base-Types:
|
||||
* Ãberall im Compiler wird statt bspw. int Integer verwendet
|
||||
@ -189,24 +185,13 @@ public class SourceFile
|
||||
* der JRE gelieferten Base-Typen (int,char, etc) und die Objekt-
|
||||
* Typen umwandeln können
|
||||
*/
|
||||
// ino.end
|
||||
// ino.attribute.baseTypeTranslationTable.21385.declaration
|
||||
private Hashtable<String,String> baseTypeTranslationTable;
|
||||
// ino.end
|
||||
|
||||
|
||||
|
||||
|
||||
// ino.method.addElement.21394.defdescription type=javadoc
|
||||
/**
|
||||
* Fuegt ein neues Element (Interface oder Klasse) hinzu.
|
||||
* @param c
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.addElement.21394.definition
|
||||
public void addElement(AClassOrInterface e)
|
||||
// ino.end
|
||||
// ino.method.addElement.21394.body
|
||||
{
|
||||
if (e instanceof Class) {
|
||||
KlassenVektor.addElement((Class) e);
|
||||
@ -214,447 +199,8 @@ public class SourceFile
|
||||
InterfaceVektor.addElement((Interface) e);
|
||||
}
|
||||
}
|
||||
// ino.end
|
||||
|
||||
|
||||
|
||||
// ino.method.codegen.21397.defdescription type=javadoc
|
||||
/**
|
||||
* Startet die Bytecodegenerierung fuer alle in der Datei
|
||||
* enthaltenen Klassen und Interfaces.
|
||||
*
|
||||
|
||||
// ino.end
|
||||
// ino.method.codegen.21397.definition
|
||||
public Menge<ClassFile> codegen(ResultSet result)
|
||||
throws JVMCodeException
|
||||
// ino.end
|
||||
// ino.method.codegen.21397.body
|
||||
{
|
||||
Menge<ClassFile> ret = new Menge<ClassFile>();
|
||||
codegenlog.info("Anzahl der Interfaces: "
|
||||
+ Integer.toString(InterfaceVektor.size()));
|
||||
for(int i = 0; i < InterfaceVektor.size(); i++) {
|
||||
InterfaceVektor.elementAt(i).codegen(result);
|
||||
}
|
||||
|
||||
codegenlog.info("Anzahl der Klassen: "
|
||||
+ Integer.toString(KlassenVektor.size()));
|
||||
for(int i = 0; i < KlassenVektor.size(); i++) {
|
||||
ret.add(KlassenVektor.elementAt(i).codegen(result));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
// ino.end
|
||||
*/
|
||||
// ino.method.createPairFromClassAndSuperclass.21400.defdescription type=javadoc
|
||||
/**
|
||||
* Erstellt ein Typ-Paar, welches im 1. Durchlauf in die Menge der Finite Closure
|
||||
* aufgenommen wird Input: Klassenname, Name der Superklasse, ParameterDerKlasse,
|
||||
* Parameter der Superklasse
|
||||
* @return
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.createPairFromClassAndSuperclass.21400.definition
|
||||
private Pair createPairFromClassAndSuperclass(Class baseClass, Type superclass, Menge classParaOrg, Menge superclassParaOrg, TypeAssumptions ass)
|
||||
// ino.end
|
||||
// ino.method.createPairFromClassAndSuperclass.21400.body
|
||||
{
|
||||
// Paar erstellen
|
||||
if(classParaOrg!=null && classParaOrg.size()==0){
|
||||
classParaOrg=null;
|
||||
}
|
||||
if(superclassParaOrg!=null && superclassParaOrg.size()==0){
|
||||
superclassParaOrg=null;
|
||||
}
|
||||
/*
|
||||
Pair P = new Pair(
|
||||
new RefType( className.toString(), classParaOrg,-1),
|
||||
new RefType( superclassName.toString(), superclassParaOrg,-1)
|
||||
);
|
||||
*/
|
||||
Pair P = new Pair(baseClass.getType().TYPE(ass, baseClass), superclass.TYPE(ass, baseClass));
|
||||
//PL 04-12-29 freshe Variablen ANFANG
|
||||
RefType r1 = (RefType)P.getTA1Copy();
|
||||
RefType r2 = (RefType)P.getTA2Copy();
|
||||
r1 = (RefType) r1.TYPE(ass, baseClass);
|
||||
r2 = (RefType) r2.TYPE(ass, baseClass);
|
||||
// #JB# 05.04.2005
|
||||
// ###########################################################
|
||||
Hashtable<JavaClassName,Type> substHash = new Hashtable<JavaClassName,Type>(); //fuer jedes Paar komplett neue Variablen
|
||||
Unify.varSubst(r1, substHash);
|
||||
Unify.varSubst(r2, substHash);
|
||||
// ###########################################################
|
||||
P = new Pair(r1, r2);
|
||||
//PL 04-12-29 freshe Variablen ENDE
|
||||
|
||||
//HIER AUSKOMMENTIERT, SOLLTE MAN AM ENDE WIEDER DAZU NEHMEN PL 04-12-28
|
||||
// gleiches Paar aufnehmen
|
||||
//vFC.add( new Pair( P.getTA1Copy(), P.getTA1Copy() ) );
|
||||
|
||||
return(P);
|
||||
|
||||
}
|
||||
// ino.end
|
||||
// ino.method.makeFC.21403.defdescription type=javadoc
|
||||
/**
|
||||
* Erstellt die Finite Closure
|
||||
* @return FC_TTO-Object, welches die Finite Closure repräsentiert
|
||||
*/
|
||||
public FC_TTO makeFC( TypeAssumptions ass )
|
||||
{
|
||||
|
||||
// Menge FC bilden
|
||||
|
||||
Menge<Pair> vFC = new Menge<Pair>(); // Menge FC
|
||||
TypeAssumptions globalAssumptions = this.makeBasicAssumptionsFromJRE(imports, false);
|
||||
globalAssumptions.add(this.getPublicFieldAssumptions());
|
||||
// 1. Menge <= in FC aufnehmen --> Iteration ueber alle Klassen
|
||||
|
||||
Menge<Type> ignoreTypes = new Menge<>(); //Enthält die Typen, welche nicht in der FC als Supertypen enthalten sein sollen.
|
||||
ignoreTypes.add(new RefType("Long",null,-1).TYPE(globalAssumptions, parent));
|
||||
ignoreTypes.add(new RefType("Float",null,-1).TYPE(globalAssumptions, parent));
|
||||
ignoreTypes.add(new RefType("Double",null,-1).TYPE(globalAssumptions, parent));
|
||||
ignoreTypes.add(new RefType("String",null,-1).TYPE(globalAssumptions, parent));
|
||||
ignoreTypes.add(new RefType("Integer",null,-1).TYPE(globalAssumptions, parent));
|
||||
ignoreTypes.add(new RefType("Object",null,-1).TYPE(globalAssumptions, parent));
|
||||
|
||||
Menge<Class> basicAssumptionsClassMenge = new Menge<>(); //die Klassen aus den BasicAssumptions und den Importierten Klassen
|
||||
for(ClassAssumption cAss : ass.getClassAssumptions()){
|
||||
Type t1 = cAss.getAssumedClass().getType();
|
||||
Type t2 = cAss.getAssumedClass().getSuperClass();
|
||||
if(t2 != null){
|
||||
Pair p = new Pair(t1, t2);
|
||||
//System.out.println("FCPair: "+p);
|
||||
if(! t1.equals(t2)){//Um FC_TTO darf kein T <. T stehen.
|
||||
Type superTypeFromAssumptions = ass.getTypeFor(t2, t2); //In den Assumptions den SuperTyp nachschlagen
|
||||
if(superTypeFromAssumptions != null && ! ignoreTypes.contains(superTypeFromAssumptions)){//Die Superklasse eines Typs nur anfügen, wenn er auch in den Assumptions vorkommt.
|
||||
vFC.add(p);
|
||||
}
|
||||
basicAssumptionsClassMenge.add(cAss.getAssumedClass());//Klasse ohne die Superklasse anfügen
|
||||
}else{
|
||||
//System.out.println("Wurde nicht aufgenommen");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for( int i = 0; i < KlassenVektor.size(); i++ )
|
||||
{
|
||||
Class tempKlasse = KlassenVektor.elementAt(i);
|
||||
inferencelog.debug("Verarbeite "+tempKlasse.getName(), Section.TYPEINFERENCE);
|
||||
//TODO: SuperKlasse erstellen, dies sollte am besten beim Konstruktoraufruf von Class geschehen. Diese kann dann mit getSuperClass abgefragt werden.
|
||||
if( tempKlasse.superclassid != null ) { // Klasse hat Superklasse
|
||||
Pair P=createPairFromClassAndSuperclass(tempKlasse,tempKlasse.getSuperClass(),tempKlasse.get_ParaList(),tempKlasse.superclassid.get_ParaList(), globalAssumptions);
|
||||
vFC.add( P );
|
||||
}
|
||||
if(tempKlasse.getSuperInterfaces()!=null){
|
||||
Iterator<Type> interfaceIterator=tempKlasse.getSuperInterfaces().iterator();
|
||||
while(interfaceIterator.hasNext()){
|
||||
RefType intf=(RefType) interfaceIterator.next();
|
||||
Pair P=createPairFromClassAndSuperclass(tempKlasse,intf,tempKlasse.get_ParaList(),intf.get_ParaList(),globalAssumptions);
|
||||
vFC.add( P );
|
||||
|
||||
}
|
||||
}
|
||||
} // Schleifenende durch Klassenvektor
|
||||
for(int i=0; i<InterfaceVektor.size();i++){
|
||||
Interface intf= InterfaceVektor.get(i);
|
||||
if(intf.getSuperInterfaces()!=null){
|
||||
Iterator<Type> interfaceIterator=intf.getSuperInterfaces().iterator();
|
||||
while(interfaceIterator.hasNext()){
|
||||
RefType superintf=(RefType) interfaceIterator.next();
|
||||
Pair P=createPairFromClassAndSuperclass(intf,superintf,intf.getParaList(), superintf.get_ParaList(),globalAssumptions);
|
||||
vFC.add( P );
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
Menge tto = (Menge)vFC.clone();
|
||||
|
||||
Unify.printMenge( "FC", vFC, 6 );
|
||||
/* z.B.
|
||||
*******************************
|
||||
Menge FC = {
|
||||
(Vektor< A >, Vektor< A >),
|
||||
(Vektor< A >, AbstractList< A >),
|
||||
(Matrix< A >, Matrix< A >),
|
||||
(Matrix< A >, Vektor< Vektor< A > >),
|
||||
(ExMatrix< A >, ExMatrix< A >),
|
||||
(ExMatrix< A >, Matrix< A >) }
|
||||
*******************************
|
||||
|
||||
ODER
|
||||
|
||||
*******************************
|
||||
Menge FC = {
|
||||
(BB< A >, BB< A >),
|
||||
(BB< A >, CC< A >),
|
||||
(AA< A, B >, AA< A, B >),
|
||||
(AA< A, B >, BB< DD< B, A > >) }
|
||||
*******************************
|
||||
|
||||
*/
|
||||
|
||||
// 2. Regel 2 der Huellendefinition "eingeschraenkt" anwenden
|
||||
// d.h. sinnvolle Substitutionen suchen (nicht alle)
|
||||
|
||||
boolean bPaarHinzu = true;
|
||||
while( bPaarHinzu )
|
||||
{
|
||||
bPaarHinzu = false; //PL 04-12-29 nur wenn hinzugefuegt auf true setzen
|
||||
// konkret: rechte Seite von FC nach Typkonstruktoren in der Parameterliste durchsuchen
|
||||
for( int n = 0; n < vFC.size(); n++ )
|
||||
{
|
||||
// Elemente in FC k�nnen nur Pair's sein --> Cast ohne Abfrage
|
||||
Pair PTypKonst = vFC.elementAt(n);
|
||||
|
||||
// Parameter des rechten Typausdrucks des betrachteten Paars extrahieren
|
||||
Menge<Type> vPara = ((RefType)(PTypKonst.TA2)).get_ParaList();
|
||||
Integer Subst = null; // Substitution
|
||||
int nSubstStelle = 0;
|
||||
inferencelog.debug("nSubstStelleStart" + nSubstStelle + " " + n, Section.FINITECLOSURE);
|
||||
|
||||
// Parameter durchlaufen und nach Typkonstruktor suchen
|
||||
// #JB# 17.05.2005
|
||||
// ###########################################################
|
||||
if(vPara!=null){
|
||||
// ###########################################################
|
||||
for( ; nSubstStelle < vPara.size(); nSubstStelle++ )
|
||||
{
|
||||
inferencelog.debug("nSubstStelle" + nSubstStelle, Section.FINITECLOSURE);
|
||||
if( vPara.elementAt(nSubstStelle) instanceof RefType && ((RefType)vPara.elementAt(nSubstStelle)).get_ParaList() != null )
|
||||
{
|
||||
// Typkonstruktor gefunden -> wird nun als Substitution verwendet
|
||||
Subst = 1;//new RefType( (RefType)vPara.elementAt(nSubstStelle) ,-1);
|
||||
inferencelog.debug( "Ausgangstyp:" + ((RefType)PTypKonst.TA2).getName() , Section.FINITECLOSURE);
|
||||
inferencelog.debug( "RefType = " + ((RefType)vPara.elementAt(nSubstStelle)).getName() , Section.FINITECLOSURE);
|
||||
break; // Einschraenkung - nur fuer ein RefType wird eine Substitution gesucht
|
||||
}
|
||||
}
|
||||
// ###########################################################
|
||||
}
|
||||
// ###########################################################
|
||||
if( Subst != null )
|
||||
{
|
||||
// Rechter Typ hat einen Typkonstruktor --> sinvolles neues Paar bilden
|
||||
// d.h. Rechter Typ auf linker Paarseite suchen
|
||||
// System.out.println("Subststelle = " + nSubstStelle );
|
||||
|
||||
for( int t = 0; t < vFC.size(); t++ )
|
||||
{
|
||||
Pair PSuchen = vFC.elementAt(t);
|
||||
if( ((RefType)(PTypKonst.TA2)).getTypeName().equals( ((RefType)PSuchen.TA1).getTypeName() ) )
|
||||
{
|
||||
inferencelog.debug(" gefundener Typ links: " + ((RefType)(PSuchen.TA1)).getName(), Section.FINITECLOSURE );
|
||||
inferencelog.debug(" gefundener Typ rechts: " + ((RefType)(PSuchen.TA2)).getName() , Section.FINITECLOSURE);
|
||||
// Paar gefunden, das als linken Typ den gleichen Typen enth�lt, der als Parameter einen Typkonstruktor hat
|
||||
// Substitution
|
||||
//Pair P = new Pair( PSuchen.getTA1Copy( ), PSuchen.getTA2Copy( ) );
|
||||
//linker Typterm bleibt gleich
|
||||
//rechter Typterm wird aussen auf den Supertyp gesetzt.
|
||||
//restliches FC erfolgt ueber die Transitivitaet
|
||||
//siehe im unteren Teil
|
||||
Pair P = new Pair( PTypKonst.getTA1Copy( ), PSuchen.getTA2Copy( ) );
|
||||
// System.out.println(" Subst " + Subst.getName() );
|
||||
// System.out.println(" Vor: P = " + P.toString() + P.TA1 );
|
||||
// System.out.println(" Vor: PSuchen = " + PSuchen.toString() + PSuchen.TA1 );
|
||||
|
||||
// Parameter, der substituiert wird, sollte TV sein ???
|
||||
//TypePlaceholder TV = null;
|
||||
// if( ((RefType)P.TA1).isTV( nSubstStelle ) )
|
||||
// try
|
||||
// {
|
||||
// TV = new TypePlaceholder( ((RefType)P.TA1).getParaN( nSubstStelle ) );
|
||||
// }
|
||||
// catch( Exception E )
|
||||
// {
|
||||
// continue;
|
||||
// }
|
||||
// else
|
||||
// continue;
|
||||
|
||||
//es werden alle Parameter in einem Typeterm, der
|
||||
//der Argumente hat ersetzt PL 04-12-28
|
||||
Hashtable<JavaClassName,Type> hts = new Hashtable<JavaClassName,Type>();
|
||||
//for(int u = nSubstStelle; u < vPara.size(); u++) {
|
||||
for(int u = 0; u < vPara.size(); u++) {
|
||||
try {
|
||||
// #JB# 05.04.2005
|
||||
// ###########################################################
|
||||
//TV = new TypePlaceholder( ((RefType)PSuchen.TA1).getParaN(u) );
|
||||
//System.out.println("TV_Name: " + u + TV.Type2String());
|
||||
// ###########################################################
|
||||
inferencelog.debug("Typterm_Name: " + vPara.elementAt(u), Section.FINITECLOSURE);
|
||||
inferencelog.debug("Typterm_Name: " + ((Type)vPara.elementAt(u)).Type2String(), Section.FINITECLOSURE);
|
||||
hts.put(new JavaClassName(((RefType)PSuchen.TA1).getParaN(u)), vPara.elementAt(u));
|
||||
}
|
||||
catch( Exception E ) {
|
||||
inferencelog.error(E.getMessage(), Section.FINITECLOSURE);
|
||||
//FIXME Throw Exception or Error instead of exiting!
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// Subst( P,
|
||||
// 2,
|
||||
// TV,
|
||||
// new RefType( (RefType)vPara.elementAt(u) ),
|
||||
// false ); // rechte Seite substituieren
|
||||
//Es genuegt die rechte Seite zu substituieren, da
|
||||
//die linke Seite ein Typterm ausschlie�lich mit
|
||||
//Typvariablen ist
|
||||
}
|
||||
//Unify.SubstHashtableGeneric(((RefType)P.TA1), hts); //funktioniert nicht
|
||||
Unify.SubstHashtableGeneric(((RefType)P.TA2), hts); //funktioniert nicht
|
||||
// System.out.println(" TV!!!= " + TV.getName() );
|
||||
//Subst( P, 1, TV, Subst, false ); // linke Seite substituieren
|
||||
//Subst( P, 2, TV, Subst, false ); // rechte Seite substituieren
|
||||
// System.out.println(" nach Subst: P = " + P.toString() );
|
||||
// System.out.println(" Nach: PSuchen = " + PSuchen.toString() );
|
||||
// System.out.println(" Nach: " + P.toString() );
|
||||
|
||||
// Paar einfuegen, falls noch nicht vorhanden
|
||||
// System.out.println("Paar alt:" + PSuchen.toString() );
|
||||
// System.out.println("Paar neu:" + P.toString() );
|
||||
if( !P.isInMenge( vFC ) )
|
||||
{
|
||||
vFC.add( P );
|
||||
Unify.printMenge( "FC", vFC, 6 );
|
||||
bPaarHinzu = true;
|
||||
}
|
||||
//PL 04-12-29
|
||||
// else //unnoetig, da am Anfang bereits false gesetzt
|
||||
// {
|
||||
// bPaarHinzu = false;
|
||||
// }
|
||||
|
||||
}
|
||||
}
|
||||
} // end if: Substitution gefunden???
|
||||
} // end for: Typkonstruktor suchen
|
||||
|
||||
|
||||
// Transitivitaet berechnen
|
||||
for( int u = 0; u < vFC.size(); u++ )
|
||||
{
|
||||
Pair PTemp = vFC.elementAt(u);
|
||||
|
||||
// falls rechtes Paar = RefType
|
||||
if( PTemp.TA2 instanceof RefType )
|
||||
{
|
||||
RefType R = (RefType)PTemp.TA2;
|
||||
|
||||
// rechte Seite auf linker Seite suchen
|
||||
for( int e = 0; e < vFC.size(); e++ )
|
||||
{
|
||||
Pair PSuch = vFC.elementAt(e);
|
||||
// als linke Paarseite theortisch nur RefType's moeglich --> Cast
|
||||
RefType RSuch = (RefType)PSuch.TA1;
|
||||
|
||||
//if( R.getName().equals(RSuch.getName()) )
|
||||
if (R.is_Equiv(RSuch, new Hashtable<JavaClassName,Type>())) //eingefuegt PL 05-01-07
|
||||
{
|
||||
// Paar einfuegen, falls noch nicht vorhanden
|
||||
RefType L1 = (RefType)PTemp.getTA1Copy();
|
||||
RefType L2 = (RefType)PTemp.getTA2Copy();
|
||||
RefType R1 = (RefType)PSuch.getTA1Copy();
|
||||
RefType R2 = (RefType)PSuch.getTA2Copy();
|
||||
|
||||
//zunaechst Variablen disjunkt machen ANFANG
|
||||
// #JB# 05.04.2005
|
||||
// ###########################################################
|
||||
Hashtable<JavaClassName,Type> substHash1 = new Hashtable<JavaClassName,Type>();
|
||||
Unify.varSubst(L1, substHash1);
|
||||
Unify.varSubst(L2, substHash1);
|
||||
Hashtable<JavaClassName,Type> substHash2 = new Hashtable<JavaClassName,Type>();
|
||||
Unify.varSubst(R1, substHash2);
|
||||
Unify.varSubst(R2, substHash2);
|
||||
// ###########################################################
|
||||
//zunaechst Variablen disjunkt machen ENDE
|
||||
|
||||
//Variablen so umbennen, dass transitiver Abschluss richtige
|
||||
//Namen hat ANFANG
|
||||
|
||||
// #JB# 05.04.2005
|
||||
// ###########################################################
|
||||
Hashtable<JavaClassName,Type> h = new Hashtable<JavaClassName,Type>();
|
||||
L2.Equiv2Equal(R1, h);
|
||||
Hashtable<JavaClassName,Type> substHash3 = h;
|
||||
Unify.varSubst(L1, substHash3);
|
||||
Unify.varSubst(R2, substHash3);
|
||||
// ###########################################################
|
||||
//Variablen so umbennen, dass transitiver Abschluss richitge
|
||||
//Namen hat ENDE
|
||||
|
||||
//Pair P = new Pair( (RefType)PTemp.TA1, (RefType)PSuch.TA2 );
|
||||
Pair P = new Pair(L1, R2);
|
||||
if( !P.isInMenge( vFC ) )
|
||||
{
|
||||
vFC.add( P );
|
||||
bPaarHinzu = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
bPaarHinzu = false;
|
||||
}
|
||||
}
|
||||
} // end for: linke Seite suchen
|
||||
} // end if: Element ist RefType
|
||||
} // end for: Transitivit�ten berechnen
|
||||
//PL HIER REFLEXIVE HUELLE EINFUEGEN
|
||||
// 05-01-07
|
||||
|
||||
} // Ende WHILE
|
||||
|
||||
/* z.B.
|
||||
*******************************
|
||||
Menge nach trans: FC = {
|
||||
(Vektor< A >, Vektor< A >),
|
||||
(Vektor< A >, AbstractList< A >),
|
||||
(Matrix< A >, Matrix< A >),
|
||||
(Matrix< A >, Vektor< Vektor< A > >),
|
||||
(ExMatrix< A >, ExMatrix< A >),
|
||||
(ExMatrix< A >, Matrix< A >),
|
||||
(Vektor< Vektor< A > >, Vektor< Vektor< A > >),
|
||||
(Vektor< Vektor< A > >, AbstractList< Vektor< A > >),
|
||||
(Matrix< A >, AbstractList< Vektor< A > >),
|
||||
(ExMatrix< A >, Vektor< Vektor< A > >),
|
||||
(ExMatrix< A >, AbstractList< Vektor< A > >) }
|
||||
|
||||
ODER
|
||||
|
||||
*******************************
|
||||
Menge nach trans: FC = {
|
||||
(BB< A >, BB< A >),
|
||||
(BB< A >, CC< A >),
|
||||
(AA< A, B >, AA< A, B >),
|
||||
(AA< A, B >, BB< DD< B, A > >),
|
||||
(BB< DD< B, A > >, BB< DD< B, A > >),
|
||||
(BB< DD< B, A > >, CC< DD< B, A > >),
|
||||
(AA< A, B >, CC< DD< B, A > >) }
|
||||
*******************************
|
||||
|
||||
******************************* */
|
||||
|
||||
|
||||
// printMenge( "nach trans: FC", vFC, 6 );
|
||||
|
||||
Menge<Class> KlassenVektorunImportierteKlassen = new Menge<>();
|
||||
KlassenVektorunImportierteKlassen.addAll(basicAssumptionsClassMenge);
|
||||
KlassenVektorunImportierteKlassen.addAll(KlassenVektor);
|
||||
|
||||
FC_TTO fctto = new FC_TTO(vFC, tto, KlassenVektorunImportierteKlassen);
|
||||
return fctto;
|
||||
}
|
||||
|
||||
public TypeAssumptions getPublicFieldAssumptions(){
|
||||
TypeAssumptions publicAssumptions = new TypeAssumptions(null);
|
||||
//Alle PublicAssumptions der in dieser SourceFile enthaltenen Klassen sammeln:
|
||||
for(Class klasse : KlassenVektor){
|
||||
publicAssumptions.add(klasse.getPublicFieldAssumptions());
|
||||
}
|
||||
return publicAssumptions;
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////
|
||||
// TypeReconstructionAlgorithmus
|
||||
/////////////////////////////////////////////////////////////////////////
|
||||
@ -691,24 +237,32 @@ public class SourceFile
|
||||
typinferenzLog.debug("Von JRE erstellte Assumptions: "+importAssumptions, Section.TYPEINFERENCE);
|
||||
|
||||
//FiniteClosure generieren:
|
||||
FC_TTO finiteClosure = this.makeFC(globalAssumptions);
|
||||
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(globalAssumptions);
|
||||
|
||||
typinferenzLog.debug("FiniteClosure: \n"+finiteClosure, Section.TYPEINFERENCE);
|
||||
|
||||
ConstraintsSet oderConstraints = new ConstraintsSet();
|
||||
//Alle Constraints der in dieser SourceFile enthaltenen Klassen sammeln:
|
||||
for(Class klasse : KlassenVektor){
|
||||
oderConstraints.add(klasse.typeReconstruction(finiteClosure, globalAssumptions));
|
||||
oderConstraints.add(klasse.typeReconstruction(globalAssumptions));
|
||||
}
|
||||
|
||||
/*////////////////
|
||||
* Paare in MPairs umwandeln
|
||||
* (Wird zunächst mal weggelassen. Constraints werden erst beim Unifizieren umgewandelt
|
||||
*/////////////////
|
||||
//UnifyTypeFactory.convert(oderConstraints);
|
||||
|
||||
|
||||
////////////////
|
||||
//Karthesisches Produkt bilden:
|
||||
////////////////
|
||||
|
||||
//Unmögliche ConstraintsSets aussortieren durch Unifizierung
|
||||
Unifier unifier = (pairs)->{
|
||||
Function<Menge<Pair>,Menge<Menge<Pair>>> unifier = (pairs)->{
|
||||
Menge<Menge<Pair>> retValue = new Menge<>();
|
||||
retValue = Unify.unify(pairs, finiteClosure);
|
||||
Set<MPair> convertPairs = UnifyTypeFactory.convert(pairs);
|
||||
Set<Set<MPair>> unifiedPairs = new Unify().unify(convertPairs, finiteClosure);
|
||||
return retValue;};
|
||||
//oderConstraints.filterWrongConstraints(unifier);
|
||||
|
||||
@ -716,17 +270,17 @@ public class SourceFile
|
||||
|
||||
typinferenzLog.debug("Ãbriggebliebene Konstraints:\n"+oderConstraints+"\n", Section.TYPEINFERENCE);
|
||||
//Die Constraints in Pair's umwandeln (Karthesisches Produkt bilden):
|
||||
Menge<Menge<Pair>> xConstraints = oderConstraints.cartesianProduct();
|
||||
Set<Set<Pair>> xConstraints = oderConstraints.cartesianProduct();
|
||||
|
||||
typinferenzLog.debug("Karthesisches Produkt der Constraints: "+xConstraints, Section.TYPEINFERENCE);
|
||||
|
||||
finiteClosure.generateFullyNamedTypes(globalAssumptions);
|
||||
//finiteClosure.generateFullyNamedTypes(globalAssumptions);
|
||||
|
||||
//////////////////////////////
|
||||
// Unifizierung der Constraints:
|
||||
//////////////////////////////
|
||||
boolean unifyFail = true;
|
||||
for(Menge<Pair> constraints : xConstraints){
|
||||
for(Set<Pair> constraints : xConstraints){
|
||||
//Alle durch das Karthesische Produkt entstandenen Möglichkeiten durchgehen:
|
||||
Menge<Menge<Pair>> result = new Menge<Menge<Pair>>();
|
||||
|
||||
@ -741,7 +295,7 @@ public class SourceFile
|
||||
}
|
||||
|
||||
//Erst die Unifizierung erstellen:
|
||||
Menge<Pair> constraintsClone = (Menge<Pair>)constraints.clone();
|
||||
Menge<MPair> constraintsClone = (Menge<MPair>)constraints.clone();
|
||||
|
||||
//IDEE: Man bildet Zusammenhangskomponenten von Paaren, die gemeinsame Variablen haben
|
||||
// und unifizert nur die Zusammenhangskomponenten in Schritten 1 - 5
|
||||
@ -762,17 +316,17 @@ public class SourceFile
|
||||
|
||||
//Schritt 3: Umwandlung der Indizes in die zugehoerigen Elemente
|
||||
// In streamconstraintsclone sind die Mengen von Paar enthalten die unifiziert werden muessen
|
||||
Stream<Menge<Pair>> streamconstraintsclone = indexeset.stream().map(x -> x.stream()
|
||||
Stream<Menge<MPair>> streamconstraintsclone = indexeset.stream().map(x -> x.stream()
|
||||
.map(i -> constraintsClone.elementAt(i))
|
||||
.<Menge<Pair>>collect(Menge::new, Menge::add, Menge::addAll));
|
||||
.<Menge<MPair>>collect(Menge::new, Menge::add, Menge::addAll));
|
||||
//Menge<Menge<Pair>> vecconstraintsclone = streamconstraintsclone.collect(Menge::new, Menge::add, Menge::addAll);
|
||||
//System.out.println();
|
||||
//Schritt 4: Unifikation
|
||||
Menge<Menge<Menge<Pair>>> vecunifyResult =
|
||||
Set<Set<Set<MPair>>> vecunifyResult =
|
||||
//streamconstraintsclone.map(x -> Unify.unify(x, finiteClosure)).collect(Menge::new, Menge::add, Menge::addAll);
|
||||
//DEBUG-Variante
|
||||
streamconstraintsclone.map(x ->
|
||||
{ Menge<Menge<Pair>> z = Unify.unify(x, finiteClosure);
|
||||
{ Set<Set<MPair>> z = new Unify().unify(x, finiteClosure);
|
||||
return z;
|
||||
}
|
||||
).collect(Menge::new, Menge::add, Menge::addAll);
|
||||
@ -785,19 +339,19 @@ public class SourceFile
|
||||
//Schritt 5: Bildung des cartesischen Produkts
|
||||
//sollte wieder entfernt werden: Weiterarbeit mit:
|
||||
//[[x_1 -> t_1, x_2 -> t2], [x_1 -> t'_1, x_2 -> t'_2]] x ... x [[x_n -> t_1n], [x_n -> t2n], [x_n -> t3n]]
|
||||
Menge<Menge<Pair>> cardprodret_start = new Menge<>();
|
||||
Set<Set<Pair>> cardprodret_start = new Menge<>();
|
||||
cardprodret_start.add(new Menge<Pair>());
|
||||
|
||||
//cart. Produkt mit Linkverschiebung
|
||||
Menge<Menge<Pair>> unifyResult = vecunifyResult.stream().reduce(cardprodret_start, (x, y) -> {
|
||||
Menge<Menge<Pair>> cardprodret= new Menge<>();
|
||||
Set<Set<Pair>> unifyResult = vecunifyResult.stream().reduce(cardprodret_start, (x, y) -> {
|
||||
Set<Set<Pair>> cardprodret= new Menge<>();
|
||||
if (y.size() > 0) {
|
||||
//System.out.println(y);
|
||||
//Menge<Menge<Pair>> cardprodretold = x;
|
||||
//cardprodret = new Menge<>();
|
||||
for(int j = 0; j < x.size(); j++) {
|
||||
for (int k = 0; k < y.size(); k++){
|
||||
Menge<Pair> help = new Menge<>();
|
||||
Set<Pair> help = new Menge<>();
|
||||
help.addAll(y.elementAt(k));
|
||||
help.addAll(x.elementAt(j));
|
||||
cardprodret.add(help);
|
||||
|
@ -11,7 +11,11 @@ import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.Type;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.syntaxtree.type.WildcardType;
|
||||
import de.dhbwstuttgart.typeinference.KomplexeMenge;
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
import de.dhbwstuttgart.typeinference.OderMenge;
|
||||
import de.dhbwstuttgart.typeinference.Pair;
|
||||
import de.dhbwstuttgart.typeinference.UndMenge;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.ClassAssumption;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||
@ -24,7 +28,7 @@ import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
|
||||
public class UnifyTypeFactory {
|
||||
|
||||
|
||||
public static FiniteClosure generateFC(TypeAssumptions fromAss){
|
||||
HashSet<MPair> pairs = new HashSet<>();
|
||||
for(ClassAssumption cAss : fromAss.getClassAssumptions()){
|
||||
@ -44,7 +48,7 @@ public class UnifyTypeFactory {
|
||||
|
||||
public static UnifyType convert(Type t){
|
||||
//Es wurde versucht ein Typ umzuwandeln, welcher noch nicht von der Factory abgedeckt ist
|
||||
if(t instanceof GenericTypeVar){
|
||||
if(t instanceof GenericTypeVar){ //WTF ?
|
||||
return UnifyTypeFactory.convert((GenericTypeVar)t);
|
||||
}
|
||||
System.out.println("Der Typ "+t+" kann nicht umgewandelt werden");
|
||||
@ -80,4 +84,12 @@ public class UnifyTypeFactory {
|
||||
public static UnifyType convert(GenericTypeVar t){
|
||||
return new SimpleType(t.get_Name());
|
||||
}
|
||||
|
||||
public static UndMenge<MPair> convert(UndMenge<Pair> constraints) {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static OderMenge<MPair> convert(OderMenge<Pair> constraints) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@ -22,10 +22,6 @@ public class Unify_FC_TTO_Builder {
|
||||
fc.add(new Pair(t1, t2));
|
||||
}
|
||||
|
||||
public FC_TTO Get_FC_TTO() {
|
||||
return new FC_TTO(fc, (Menge<?>) fc.clone(), classes);
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
fc = new Menge<Pair>();
|
||||
classes = new Menge<Class>();
|
||||
|
@ -29,8 +29,6 @@ import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
|
||||
import de.dhbwstuttgart.typeinference.TypeinferenceResults;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
|
||||
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
|
||||
import de.dhbwstuttgart.typeinference.unify.CSubstitutionGenVar;
|
||||
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
|
||||
|
||||
|
||||
|
||||
@ -182,41 +180,6 @@ public class RefType extends ObjectType implements IMatchable
|
||||
return name + "<"+para + " >" ;
|
||||
}
|
||||
}
|
||||
// ino.end
|
||||
|
||||
/**
|
||||
* Wandelt die Parameter des RefTypes in TPHs um, sofern es sich um Generische Variablen handelt.
|
||||
* @return
|
||||
*/
|
||||
// ino.method.GenericTypeVar2TypePlaceholder.26652.definition
|
||||
public CSubstitutionSet GenericTypeVar2TypePlaceholder ()
|
||||
// ino.end
|
||||
// ino.method.GenericTypeVar2TypePlaceholder.26652.body
|
||||
{
|
||||
//throw new NotImplementedException();
|
||||
///*
|
||||
CSubstitutionSet sub = new CSubstitutionSet();
|
||||
if(parameter != null)
|
||||
{
|
||||
for (int i = 0; i < parameter.size(); i++)
|
||||
{
|
||||
if (parameter.elementAt(i) instanceof GenericTypeVar)
|
||||
{
|
||||
TypePlaceholder tlv = TypePlaceholder.fresh(null);
|
||||
sub.addElement(new CSubstitutionGenVar((GenericTypeVar)parameter.elementAt(i), tlv));
|
||||
parameter.set(i, tlv);
|
||||
}
|
||||
if (parameter.elementAt(i) instanceof RefType)
|
||||
{
|
||||
CSubstitutionSet parasub = ((RefType)parameter.elementAt(i)).GenericTypeVar2TypePlaceholder();
|
||||
sub.addAll(parasub); //korrigiert PL 07=07=29
|
||||
}
|
||||
}
|
||||
}
|
||||
return sub;
|
||||
//*/
|
||||
}
|
||||
// ino.end
|
||||
|
||||
/**
|
||||
* Wandelt die Parameter des RefTypes in TPHs um, sofern es sich um Generische Variablen handelt.
|
||||
|
@ -1,6 +1,7 @@
|
||||
package de.dhbwstuttgart.typeinference;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
import java.util.Vector;
|
||||
import de.dhbwstuttgart.logger.Logger;
|
||||
import de.dhbwstuttgart.logger.*;
|
||||
@ -57,12 +58,12 @@ public class ConstraintsSet extends UndMenge<Pair> implements Iterable<OderConst
|
||||
}
|
||||
this.filterWrongConstraints(
|
||||
(pairs)->{
|
||||
Menge<Pair> undConstraintsUndPairs = new Menge<>();
|
||||
Set<Pair> undConstraintsUndPairs = new Menge<>();
|
||||
undConstraintsUndPairs.addAll(pairs);
|
||||
undConstraintsUndPairs.addAll(alleUndConstraints);
|
||||
log.debug("Versuche Pairs auszusondern:\n"+pairs, Section.TYPEINFERENCE);
|
||||
log.debug("Unifiziere:\n"+undConstraintsUndPairs, Section.TYPEINFERENCE);
|
||||
Menge<Menge<Pair>> unifyResult = unifier.apply(undConstraintsUndPairs);
|
||||
Set<Set<Pair>> unifyResult = unifier.apply(undConstraintsUndPairs);
|
||||
return unifyResult;
|
||||
});
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
package de.dhbwstuttgart.typeinference;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
public interface KomplexeMenge<A>{
|
||||
Menge<? extends KomplexeMenge<A>> getSet();
|
||||
Menge<Menge<A>> cartesianProduct();
|
||||
Set<? extends KomplexeMenge<A>> getSet();
|
||||
Set<Set<A>> cartesianProduct();
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
package de.dhbwstuttgart.typeinference;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.Vector;
|
||||
|
||||
import de.dhbwstuttgart.logger.Logger;
|
||||
@ -10,7 +11,7 @@ import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.unify.Unifier;
|
||||
|
||||
public class OderConstraint extends OderMenge<Pair>{
|
||||
private Menge<UndConstraint> oderConstraintPairs;
|
||||
private Set<UndConstraint> oderConstraintPairs;
|
||||
|
||||
private final static Logger logger = Logger.getLogger(OderConstraint.class.getName());
|
||||
|
||||
@ -70,7 +71,7 @@ public class OderConstraint extends OderMenge<Pair>{
|
||||
return ret+"]";
|
||||
}
|
||||
|
||||
public Vector<UndConstraint> getUndConstraints() {
|
||||
public Set<UndConstraint> getUndConstraints() {
|
||||
return this.oderConstraintPairs;
|
||||
/*
|
||||
Vector<UndConstraint> ret = new Vector<UndConstraint>();
|
||||
@ -91,9 +92,9 @@ public class OderConstraint extends OderMenge<Pair>{
|
||||
* @param unifier - Wird für die Unifizierung benutzt
|
||||
*/
|
||||
void filterWrongConstraints(Unifier unifier) {
|
||||
Menge<UndConstraint> filteredConstraints = new Menge<>();
|
||||
Set<UndConstraint> filteredConstraints = new Menge<>();
|
||||
for(UndConstraint cons : this.getUndConstraints()){
|
||||
Menge<Menge<Pair>> unifierResult = unifier.apply(cons.getConstraintPairs());
|
||||
Set<Set<Pair>> unifierResult = unifier.apply(cons.getConstraintPairs());
|
||||
if(!unifierResult.isEmpty()){
|
||||
filteredConstraints.add(cons);
|
||||
}else{
|
||||
@ -111,7 +112,7 @@ public class OderConstraint extends OderMenge<Pair>{
|
||||
}
|
||||
|
||||
@Override
|
||||
public Menge<? extends KomplexeMenge<Pair>> getSet() {
|
||||
public Set<? extends KomplexeMenge<Pair>> getSet() {
|
||||
return this.oderConstraintPairs;
|
||||
}
|
||||
|
||||
|
@ -2,6 +2,7 @@ package de.dhbwstuttgart.typeinference;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
import de.dhbwstuttgart.logger.Logger;
|
||||
@ -13,11 +14,11 @@ import de.dhbwstuttgart.typeinference.unify.Unifier;
|
||||
|
||||
public abstract class OderMenge<A> implements KomplexeMenge<A>{
|
||||
|
||||
public abstract Menge<? extends KomplexeMenge<A>> getSet();
|
||||
public abstract Set<? extends KomplexeMenge<A>> getSet();
|
||||
|
||||
@Override
|
||||
public Menge<Menge<A>> cartesianProduct() {
|
||||
Menge<Menge<A>> ret = new Menge<>();
|
||||
public Set<Set<A>> cartesianProduct() {
|
||||
Set<Set<A>> ret = new Menge<>();
|
||||
for(KomplexeMenge<A> km : this.getSet()){
|
||||
ret.addAll(km.cartesianProduct());
|
||||
}
|
||||
|
@ -3,24 +3,25 @@ package de.dhbwstuttgart.typeinference;
|
||||
import de.dhbwstuttgart.typeinference.unify.Unify;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
|
||||
//import com.rits.cloning.Cloner;
|
||||
|
||||
public abstract class UndMenge<A extends DeepCloneable> implements KomplexeMenge<A>{
|
||||
public abstract class UndMenge<A> implements KomplexeMenge<A>{
|
||||
|
||||
public abstract Menge<? extends KomplexeMenge<A>> getSet();
|
||||
|
||||
@Override
|
||||
public Menge<Menge<A>> cartesianProduct() {
|
||||
Menge<Menge<A>> ret = null;
|
||||
public Set<Set<A>> cartesianProduct() {
|
||||
Set<Set<A>> ret = null;
|
||||
//Cloner cloner = new Cloner();
|
||||
for(KomplexeMenge<A> km : this.getSet()){
|
||||
if(ret == null){
|
||||
ret = km.cartesianProduct();
|
||||
}else{
|
||||
Menge<Menge<A>> cartesianProduct = new Menge<>();
|
||||
for(Menge<A> r : ret)for(Menge<A> m : km.cartesianProduct()){ //Für jedes Element aus dem Karthesischen Produkt:
|
||||
Menge<A> undElement = new Menge<A>();
|
||||
Set<Set<A>> cartesianProduct = new Menge<>();
|
||||
for(Set<A> r : ret)for(Set<A> m : km.cartesianProduct()){ //Für jedes Element aus dem Karthesischen Produkt:
|
||||
Set<A> undElement = new Menge<A>();
|
||||
undElement.addAll(Unify.deepClone(r));
|
||||
undElement.addAll(m);
|
||||
cartesianProduct.add(undElement);
|
||||
@ -28,7 +29,7 @@ public abstract class UndMenge<A extends DeepCloneable> implements KomplexeMenge
|
||||
ret = cartesianProduct;
|
||||
}
|
||||
}
|
||||
if(ret == null)return new Menge<Menge<A>>();
|
||||
if(ret == null)return new Menge<>();
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -1,72 +0,0 @@
|
||||
// ino.module.CSet.8698.package
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
// ino.end
|
||||
|
||||
// ino.module.CSet.8698.import
|
||||
import java.util.Iterator;
|
||||
// ino.end
|
||||
|
||||
// ino.class.CSet.27435.description type=javadoc
|
||||
/**
|
||||
*
|
||||
* @author Jrg Buerle
|
||||
* @version $date
|
||||
*/
|
||||
// ino.end
|
||||
// ino.class.CSet.27435.declaration
|
||||
public abstract class CSet<E> implements Iterable<E>
|
||||
// ino.end
|
||||
// ino.class.CSet.27435.body
|
||||
{
|
||||
// ino.method.addElement.27438.declaration
|
||||
public abstract void addElement(E element);
|
||||
// ino.end
|
||||
// ino.method.removeElement.27441.declaration
|
||||
public abstract void removeElement(E element);
|
||||
// ino.end
|
||||
// ino.method.unite.27444.declaration
|
||||
public abstract void unite(CSet<E> anotherSet);
|
||||
// ino.end
|
||||
// ino.method.subtract.27447.declaration
|
||||
public abstract void subtract(CSet<E> anotherSet);
|
||||
// ino.end
|
||||
// ino.method.shallowCopy.27450.declaration
|
||||
public abstract CSet<E> shallowCopy();
|
||||
// ino.end
|
||||
// ino.method.deepCopy.27453.declaration
|
||||
public abstract CSet<E> deepCopy();
|
||||
// ino.end
|
||||
// ino.method.contains.27456.declaration
|
||||
public abstract boolean contains(E element);
|
||||
// ino.end
|
||||
// ino.method.getCardinality.27459.declaration
|
||||
public abstract int getCardinality();
|
||||
// ino.end
|
||||
// ino.method.getIterator.27462.declaration
|
||||
public abstract Iterator<E> getIterator();
|
||||
// ino.end
|
||||
// ino.method.equals.27465.declaration
|
||||
public abstract boolean equals(Object obj);
|
||||
// ino.end
|
||||
|
||||
// ino.method.toString.27468.definition
|
||||
public String toString()
|
||||
// ino.end
|
||||
// ino.method.toString.27468.body
|
||||
{
|
||||
StringBuffer sb = new StringBuffer();
|
||||
sb.append("Set {\n");
|
||||
Iterator<E> it = this.getIterator();
|
||||
while(it.hasNext()){
|
||||
sb.append(it.next().toString());
|
||||
sb.append(",\n");
|
||||
}
|
||||
if(this.getCardinality()>0){
|
||||
sb.delete(sb.length()-2, sb.length()-1);
|
||||
}
|
||||
sb.append("}");
|
||||
return sb.toString();
|
||||
}
|
||||
// ino.end
|
||||
}
|
||||
// ino.end
|
@ -1,253 +0,0 @@
|
||||
// ino.module.CSubstitution.8685.package
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
// ino.end
|
||||
|
||||
// ino.module.CSubstitution.8685.import
|
||||
import java.util.Iterator;
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import de.dhbwstuttgart.logger.Logger;
|
||||
// ino.end
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import de.dhbwstuttgart.myexception.CTypeReconstructionException;
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericTypeVar;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.Type;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.Pair;
|
||||
|
||||
// ino.class.CSubstitution.27003.description type=javadoc
|
||||
/**
|
||||
* Implementierung einer Typsubstitution. Bildet eine zu ersetzende
|
||||
* <code>TypePlaceholder</code> auf einen Substitutions-Typ ab. Instanzen dieser
|
||||
* Klasse werden in der Regel aus
|
||||
* <code>Pair</code>-Objekten erzeugt.
|
||||
* @author J�rg B�uerle
|
||||
* @version $Date: 2006/07/10 11:27:04 $
|
||||
*/
|
||||
// ino.end
|
||||
// ino.class.CSubstitution.27003.declaration
|
||||
public class CSubstitution
|
||||
// ino.end
|
||||
// ino.class.CSubstitution.27003.body
|
||||
{
|
||||
// ino.attribute.m_TypeVar.27006.declaration
|
||||
private TypePlaceholder m_TypeVar = null;
|
||||
// ino.end
|
||||
// ino.attribute.m_Type.27009.declaration
|
||||
protected Type m_Type = null;
|
||||
// ino.end
|
||||
// ino.attribute.inferencelog.27012.declaration
|
||||
protected static Logger inferencelog = Logger.getLogger("inference");
|
||||
// ino.end
|
||||
// ino.method.CSubstitution.27015.definition
|
||||
public CSubstitution()
|
||||
// ino.end
|
||||
// ino.method.CSubstitution.27015.body
|
||||
{
|
||||
this(null, null);
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.CSubstitution.27018.definition
|
||||
public CSubstitution(TypePlaceholder typeVar, Type type)
|
||||
// ino.end
|
||||
// ino.method.CSubstitution.27018.body
|
||||
{
|
||||
m_TypeVar = typeVar;
|
||||
m_Type = type;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.CSubstitution.27021.definition
|
||||
public CSubstitution(Pair unifier)
|
||||
throws CTypeReconstructionException
|
||||
// ino.end
|
||||
// ino.method.CSubstitution.27021.body
|
||||
{
|
||||
if(!(unifier.TA1 instanceof TypePlaceholder)){
|
||||
throw new CTypeReconstructionException("Unifier enth�lt keinen Typeplaceholder",unifier.TA1);
|
||||
}
|
||||
m_TypeVar = (TypePlaceholder)unifier.TA1;
|
||||
m_Type = unifier.TA2;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
|
||||
// ino.method.getType.27024.defdescription type=javadoc
|
||||
/**
|
||||
* Author: J�rg B�uerle<br/>
|
||||
* @return Returns the Type.
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.getType.27024.definition
|
||||
public Type getType()
|
||||
// ino.end
|
||||
// ino.method.getType.27024.body
|
||||
{
|
||||
return m_Type;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.setType.27027.defdescription type=javadoc
|
||||
/**
|
||||
* Author: J�rg B�uerle<br/>
|
||||
* @param type The Type to set.
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.setType.27027.definition
|
||||
public void setType(Type type)
|
||||
// ino.end
|
||||
// ino.method.setType.27027.body
|
||||
{
|
||||
m_Type = type;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.getTypeVar.27030.defdescription type=javadoc
|
||||
/**
|
||||
* Author: J�rg B�uerle<br/>
|
||||
* @return Returns the TypeVar.
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.getTypeVar.27030.definition
|
||||
public Type getTypeVar()
|
||||
// ino.end
|
||||
// ino.method.getTypeVar.27030.body
|
||||
{
|
||||
return this.m_TypeVar;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.setTypeVar.27033.defdescription type=javadoc
|
||||
/**
|
||||
* Author: J�rg B�uerle<br/>
|
||||
* @param typeVar The TypeVar to set.
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.setTypeVar.27033.definition
|
||||
public void setTypeVar(TypePlaceholder typeVar)
|
||||
// ino.end
|
||||
// ino.method.setTypeVar.27033.body
|
||||
{
|
||||
m_TypeVar = typeVar;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.equals.27036.definition
|
||||
public boolean equals(Object obj)
|
||||
// ino.end
|
||||
// ino.method.equals.27036.body
|
||||
{
|
||||
if(obj instanceof CSubstitution){
|
||||
CSubstitution sub = (CSubstitution)obj;
|
||||
boolean ret = true;
|
||||
ret &= (m_TypeVar.equals(sub.m_TypeVar));
|
||||
ret &= (m_Type.equals(sub.m_Type));
|
||||
return ret;
|
||||
}
|
||||
else{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.toString.27039.definition
|
||||
public String toString()
|
||||
// ino.end
|
||||
// ino.method.toString.27039.body
|
||||
{
|
||||
//return m_TypeVar.getName() +" --> "+m_Type.getName();
|
||||
return m_TypeVar.toString() +" --> "+m_Type.toString();
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.clone.27042.definition
|
||||
public CSubstitution clone()
|
||||
// ino.end
|
||||
// ino.method.clone.27042.body
|
||||
{
|
||||
CSubstitution copy = new CSubstitution(m_TypeVar.clone(), m_Type.clone());
|
||||
return copy;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
|
||||
// ino.method.applyUnifier.27048.defdescription type=javadoc
|
||||
/**
|
||||
* Wendet den Unifier auf die rechte Seite dieser Substitution an.
|
||||
* <br/>Author: J�rg B�uerle
|
||||
* @param unifier
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.applyUnifier.27048.definition
|
||||
public void applyUnifier(CSubstitutionSet unifier)
|
||||
// ino.end
|
||||
// ino.method.applyUnifier.27048.body
|
||||
{
|
||||
Iterator pairIt = unifier.getIterator();
|
||||
while(pairIt.hasNext()){
|
||||
CSubstitution subst = (CSubstitution)pairIt.next();
|
||||
|
||||
//korrigiert PL 05-07-31 das erste duerfte doch richtig sein.
|
||||
//subst.setType(this.applySubstitution(subst.getType(), subst));
|
||||
this.setType(this.applySubstitution(this.getType(), subst));
|
||||
}
|
||||
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.applySubstitution.27051.defdescription type=javadoc
|
||||
/**
|
||||
* Wendet die �bergebene Substitution rekursiv auf den �bergebenen Typ an.
|
||||
* <br/>Author: J�rg B�uerle
|
||||
* @param type Der zu untersuchende Typ
|
||||
* @param unifierSub Die anzuwendende Substitution
|
||||
* @return Den ermittelnden Typ
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.applySubstitution.27051.definition
|
||||
private Type applySubstitution(Type type, CSubstitution unifierSub)
|
||||
// ino.end
|
||||
// ino.method.applySubstitution.27051.body
|
||||
{
|
||||
if(type instanceof TypePlaceholder){
|
||||
if(type.equals(unifierSub.getTypeVar())){
|
||||
return unifierSub.getType();
|
||||
}
|
||||
}
|
||||
else if(type instanceof GenericTypeVar){
|
||||
if(type.equals(unifierSub.getTypeVar())){
|
||||
return unifierSub.getType();
|
||||
}
|
||||
}
|
||||
else if(type instanceof RefType){
|
||||
Menge<Type> paras = ((RefType)type).get_ParaList();
|
||||
if(paras != null){
|
||||
for(int i=0; i<paras.size(); i++){
|
||||
paras.setElementAt(this.applySubstitution((Type)paras.elementAt(i), unifierSub), i);
|
||||
}
|
||||
}
|
||||
}
|
||||
return type;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.applyThisSubstitution.27054.definition
|
||||
public Type applyThisSubstitution(Type type)
|
||||
// ino.end
|
||||
// ino.method.applyThisSubstitution.27054.body
|
||||
{
|
||||
return applySubstitution(type, this);
|
||||
}
|
||||
// ino.end
|
||||
}
|
||||
// ino.end
|
@ -1,70 +0,0 @@
|
||||
// ino.module.CSubstitutionGenVar.8686.package
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericTypeVar;
|
||||
// ino.end
|
||||
import de.dhbwstuttgart.syntaxtree.type.Type;
|
||||
|
||||
// ino.class.CSubstitutionGenVar.27057.description type=javadoc
|
||||
/**
|
||||
* Implementierung einer Typsubstitution der GenVar. Bildet eine zu ersetzende
|
||||
* <code>TypePlaceholder</code> auf einen Substitutions-Typ ab. Instanzen dieser
|
||||
* Klasse werden in der Regel aus
|
||||
* <code>Pair</code>-Objekten erzeugt.
|
||||
* @author Martin Pl�micke
|
||||
* @version $Date: 2006/06/13 10:37:32 $
|
||||
*/
|
||||
// ino.end
|
||||
// ino.class.CSubstitutionGenVar.27057.declaration
|
||||
public class CSubstitutionGenVar extends CSubstitution
|
||||
// ino.end
|
||||
// ino.class.CSubstitutionGenVar.27057.body
|
||||
{
|
||||
// ino.attribute.m_TypeVar.27061.declaration
|
||||
private GenericTypeVar m_TypeVar = null;
|
||||
// ino.end
|
||||
|
||||
// ino.method.CSubstitutionGenVar.27064.definition
|
||||
public CSubstitutionGenVar()
|
||||
// ino.end
|
||||
// ino.method.CSubstitutionGenVar.27064.body
|
||||
{
|
||||
this(null, null);
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.CSubstitutionGenVar.27067.definition
|
||||
public CSubstitutionGenVar(GenericTypeVar typeVar, Type type)
|
||||
// ino.end
|
||||
// ino.method.CSubstitutionGenVar.27067.body
|
||||
{
|
||||
m_TypeVar = typeVar;
|
||||
m_Type = type;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.getTypeVar.27070.defdescription type=javadoc
|
||||
/**
|
||||
* Author: J�rg B�uerle<br/>
|
||||
* @return Returns the TypeVar.
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.getTypeVar.27070.definition
|
||||
public Type getTypeVar()
|
||||
// ino.end
|
||||
// ino.method.getTypeVar.27070.body
|
||||
{
|
||||
return this.m_TypeVar;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.toString.27073.definition
|
||||
public String toString()
|
||||
// ino.end
|
||||
// ino.method.toString.27073.body
|
||||
{
|
||||
return this.m_TypeVar.getName() +" --> "+this.m_Type.getName();
|
||||
}
|
||||
// ino.end
|
||||
}
|
||||
// ino.end
|
@ -1,111 +0,0 @@
|
||||
// ino.module.CSubstitutionSet.8699.package
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
// ino.end
|
||||
|
||||
// ino.module.CSubstitutionSet.8699.import
|
||||
import java.util.Iterator;
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import de.dhbwstuttgart.myexception.CTypeReconstructionException;
|
||||
import de.dhbwstuttgart.syntaxtree.type.Type;
|
||||
import de.dhbwstuttgart.typeinference.Pair;
|
||||
|
||||
// ino.class.CSubstitutionSet.27471.description type=javadoc
|
||||
/**
|
||||
* @author J�rg B�uerle
|
||||
* @version $Date: 2013/03/27 18:29:34 $
|
||||
*/
|
||||
// ino.end
|
||||
// ino.class.CSubstitutionSet.27471.declaration
|
||||
public class CSubstitutionSet extends CVectorSet<CSubstitution>
|
||||
// ino.end
|
||||
// ino.class.CSubstitutionSet.27471.body
|
||||
{
|
||||
// ino.method.CSubstitutionSet.27475.definition
|
||||
public CSubstitutionSet()
|
||||
// ino.end
|
||||
// ino.method.CSubstitutionSet.27475.body
|
||||
{
|
||||
super();
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.CSubstitutionSet.27478.definition
|
||||
public CSubstitutionSet(Menge<Pair> unifiers)
|
||||
throws CTypeReconstructionException
|
||||
// ino.end
|
||||
// ino.method.CSubstitutionSet.27478.body
|
||||
{
|
||||
super();
|
||||
for(int i=0; i<unifiers.size(); i++){
|
||||
this.addElement(new CSubstitution(unifiers.elementAt(i)));
|
||||
}
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.shallowCopy.27481.definition
|
||||
public CSubstitutionSet shallowCopy()
|
||||
// ino.end
|
||||
// ino.method.shallowCopy.27481.body
|
||||
{
|
||||
CSubstitutionSet copy = new CSubstitutionSet();
|
||||
copy.setMenge((Menge)this.getMenge().clone());
|
||||
return copy;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.deepCopy.27484.definition
|
||||
public CSubstitutionSet deepCopy()
|
||||
// ino.end
|
||||
// ino.method.deepCopy.27484.body
|
||||
{
|
||||
CSubstitutionSet copy = new CSubstitutionSet();
|
||||
Iterator<CSubstitution> substIter = this.getIterator();
|
||||
while(substIter.hasNext()){
|
||||
copy.addElement(substIter.next().clone());
|
||||
}
|
||||
return copy;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.applyUnifier.27487.defdescription type=javadoc
|
||||
/**
|
||||
* Wendet den Unifier auf die rechten Seiten alle Substitutionen an.
|
||||
* <br/>Author: J�rg B�uerle
|
||||
* @param unifier
|
||||
*/
|
||||
// ino.end
|
||||
// ino.method.applyUnifier.27487.definition
|
||||
public void applyUnifier(CSubstitutionSet unifier)
|
||||
// ino.end
|
||||
// ino.method.applyUnifier.27487.body
|
||||
{
|
||||
Iterator<CSubstitution> substIt = this.getIterator();
|
||||
|
||||
while(substIt.hasNext()){
|
||||
substIt.next().applyUnifier(unifier);
|
||||
}
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.applyThisSubstitutionSet.27490.definition
|
||||
public Type applyThisSubstitutionSet(Type type)
|
||||
// ino.end
|
||||
// ino.method.applyThisSubstitutionSet.27490.body
|
||||
{
|
||||
Iterator<CSubstitution> substIt = this.getIterator();
|
||||
Type ty = type;
|
||||
|
||||
while(substIt.hasNext()) {
|
||||
ty = substIt.next().applyThisSubstitution(ty);
|
||||
}
|
||||
return ty;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
|
||||
public Iterator<CSubstitution> iterator() {
|
||||
return this.getIterator();
|
||||
}
|
||||
}
|
||||
// ino.end
|
@ -1,165 +0,0 @@
|
||||
// ino.module.CMengeSet.8702.package
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
// ino.end
|
||||
|
||||
// ino.module.CMengeSet.8702.import
|
||||
import java.util.Iterator;
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
// ino.end
|
||||
|
||||
// ino.class.CMengeSet.27519.description type=javadoc
|
||||
/**
|
||||
* @author J�rg B�uerle
|
||||
* @version $Date: 2013/02/07 05:08:51 $
|
||||
*/
|
||||
// ino.end
|
||||
// ino.class.CMengeSet.27519.declaration
|
||||
public abstract class CVectorSet<E> extends CSet<E>
|
||||
// ino.end
|
||||
// ino.class.CMengeSet.27519.body
|
||||
{
|
||||
// ino.attribute.m_Elements.27523.declaration
|
||||
private Menge<E> m_Elements = null;
|
||||
// ino.end
|
||||
|
||||
// ino.method.CMengeSet.27526.definition
|
||||
public CVectorSet()
|
||||
// ino.end
|
||||
// ino.method.CMengeSet.27526.body
|
||||
{
|
||||
m_Elements = new Menge<E>();
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.addElement.27529.definition
|
||||
public void addElement(E element)
|
||||
// ino.end
|
||||
// ino.method.addElement.27529.body
|
||||
{
|
||||
m_Elements.addElement(element);
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.removeElement.27532.definition
|
||||
public void removeElement(E element)
|
||||
// ino.end
|
||||
// ino.method.removeElement.27532.body
|
||||
{
|
||||
m_Elements.addElement(element);
|
||||
}
|
||||
// ino.end
|
||||
|
||||
public void addAll( CVectorSet<E> set )
|
||||
{
|
||||
for( int i=0;i<set.getCardinality(); i++ ){
|
||||
m_Elements.addElement(set.m_Elements.elementAt(i));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ino.method.getIterator.27535.definition
|
||||
public Iterator<E> getIterator()
|
||||
// ino.end
|
||||
// ino.method.getIterator.27535.body
|
||||
{
|
||||
return m_Elements.iterator();
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.getMenge.27538.definition
|
||||
public Menge<E> getMenge()
|
||||
// ino.end
|
||||
// ino.method.getMenge.27538.body
|
||||
{
|
||||
return m_Elements;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.setMenge.27541.definition
|
||||
public void setMenge(Menge<E> elements)
|
||||
// ino.end
|
||||
// ino.method.setMenge.27541.body
|
||||
{
|
||||
m_Elements = elements;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
/**
|
||||
* Fügt ein CMengeSet an!
|
||||
* Es handelt sich um eine Vereinigung (es werden keine bereits vorhandenen Elemente übernommen)
|
||||
* @param anotherSet Das hinzuzufügende CMengeSet (CSet wird ignoriert)
|
||||
*/
|
||||
// ino.method.unite.27544.definition
|
||||
public void unite(CSet<E> anotherSet)
|
||||
// ino.end
|
||||
// ino.method.unite.27544.body
|
||||
{
|
||||
if(!(anotherSet instanceof CVectorSet)){
|
||||
return;
|
||||
}
|
||||
CVectorSet<E> MengeSet = (CVectorSet<E>)anotherSet;
|
||||
|
||||
// Elemente der anderen Menge hinzuf�gen:
|
||||
Iterator<E> it = MengeSet.getIterator();
|
||||
while(it.hasNext()){
|
||||
E elem = it.next();
|
||||
if(!m_Elements.contains(elem)){
|
||||
m_Elements.addElement(elem);
|
||||
}
|
||||
}
|
||||
//m_Elements.addAll(MengeSet.m_Elements);
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.subtract.27547.definition
|
||||
public void subtract(CSet<E> anotherSet)
|
||||
// ino.end
|
||||
// ino.method.subtract.27547.body
|
||||
{
|
||||
if(!(anotherSet instanceof CVectorSet)){
|
||||
return;
|
||||
}
|
||||
CVectorSet<E> MengeSet = (CVectorSet<E>)anotherSet;
|
||||
|
||||
// Elemente der anderen Menge entfernen:
|
||||
m_Elements.removeAll(MengeSet.m_Elements);
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.contains.27550.definition
|
||||
public boolean contains(E element)
|
||||
// ino.end
|
||||
// ino.method.contains.27550.body
|
||||
{
|
||||
return m_Elements.contains(element);
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.equals.27553.definition
|
||||
public boolean equals(Object obj)
|
||||
// ino.end
|
||||
// ino.method.equals.27553.body
|
||||
{
|
||||
if(obj instanceof CVectorSet){
|
||||
CVectorSet tripSet= (CVectorSet)obj;
|
||||
boolean ret = true;
|
||||
ret &= (m_Elements.containsAll(tripSet.m_Elements));
|
||||
ret &= (tripSet.m_Elements.containsAll(m_Elements));
|
||||
return ret;
|
||||
}
|
||||
else{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.getCardinality.27556.definition
|
||||
public int getCardinality()
|
||||
// ino.end
|
||||
// ino.method.getCardinality.27556.body
|
||||
{
|
||||
return m_Elements.size();
|
||||
}
|
||||
// ino.end
|
||||
}
|
||||
// ino.end
|
@ -1,82 +0,0 @@
|
||||
// ino.module.FC_TTO.8719.package
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
// ino.end
|
||||
|
||||
// ino.module.FC_TTO.8719.import
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.Class;
|
||||
import de.dhbwstuttgart.typeinference.Pair;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
|
||||
|
||||
// ino.class.FC_TTO.28013.description type=javadoc
|
||||
/**
|
||||
* Hilfsklasse f�r den Unifizierungsalgorithmus
|
||||
* @author Martin Pl�micke
|
||||
* @version $Date: 2013/05/12 14:00:05 $
|
||||
*/
|
||||
// ino.end
|
||||
// ino.class.FC_TTO.28013.declaration
|
||||
public class FC_TTO
|
||||
// ino.end
|
||||
// ino.class.FC_TTO.28013.body
|
||||
{
|
||||
|
||||
// ino.attribute.FC.28016.declaration
|
||||
Menge<Pair> FC;
|
||||
// ino.end
|
||||
// ino.attribute.TTO.28019.declaration
|
||||
Menge TTO;
|
||||
// ino.end
|
||||
|
||||
Menge<Class> CLASSVEC;
|
||||
|
||||
// ino.method.FC_TTO.28022.definition
|
||||
public FC_TTO(Menge<Pair> fc, Menge tto, Menge<Class> classv)
|
||||
// ino.end
|
||||
// ino.method.FC_TTO.28022.body
|
||||
{
|
||||
|
||||
this.FC = fc;
|
||||
this.TTO = tto;
|
||||
this.CLASSVEC = classv;
|
||||
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.getFC.28025.definition
|
||||
public Menge<Pair> getFC()
|
||||
// ino.end
|
||||
// ino.method.getFC.28025.body
|
||||
{
|
||||
return FC;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.getTTO.28028.definition
|
||||
public Menge getTTO()
|
||||
// ino.end
|
||||
// ino.method.getTTO.28028.body
|
||||
{
|
||||
return TTO;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
public Menge<Class> getClasses()
|
||||
{
|
||||
return CLASSVEC;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(){
|
||||
return "FC: "+getFC()+"\nTTO: "+getTTO()+"\nCLASSVEC: "+getClasses();
|
||||
}
|
||||
|
||||
public void generateFullyNamedTypes(TypeAssumptions ass) {
|
||||
for(Pair p : this.FC){
|
||||
p.TA1 = p.TA1.TYPE(ass, p.TA1.getParent());//ass.getTypeFor(p.TA1, p.TA1.getParent()).getType();
|
||||
p.TA2 = p.TA2.TYPE(ass, p.TA2.getParent());//ass.getTypeFor(p.TA2, p.TA2.getParent()).getType();
|
||||
}
|
||||
}
|
||||
}
|
||||
// ino.end
|
@ -1,4 +1,4 @@
|
||||
package de.dhbwstuttgart.typeinference.unifynew;
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
@ -1,53 +0,0 @@
|
||||
// ino.module.MUB.8720.package
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
// ino.end
|
||||
|
||||
// ino.module.MUB.8720.import
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.Type;
|
||||
import de.dhbwstuttgart.typeinference.Pair;
|
||||
|
||||
// ino.class.MUB.28031.declaration
|
||||
public class MUB
|
||||
// ino.end
|
||||
// ino.class.MUB.28031.body
|
||||
{
|
||||
// ino.attribute.Mub.28034.declaration
|
||||
Menge<? extends Type> Mub;
|
||||
// ino.end
|
||||
// ino.attribute.sigma.28037.declaration
|
||||
Menge<Pair> sigma;
|
||||
// ino.end
|
||||
|
||||
// ino.method.MUB.28040.definition
|
||||
MUB(Menge<? extends Type> M, Menge<Pair> s)
|
||||
// ino.end
|
||||
// ino.method.MUB.28040.body
|
||||
{
|
||||
Mub = M;
|
||||
sigma = s;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.getUnifier.28043.definition
|
||||
public Menge<Pair> getUnifier()
|
||||
// ino.end
|
||||
// ino.method.getUnifier.28043.body
|
||||
{
|
||||
return sigma;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
// ino.method.getMub.28046.definition
|
||||
public Menge<? extends Type> getMub()
|
||||
// ino.end
|
||||
// ino.method.getMub.28046.body
|
||||
{
|
||||
return Mub;
|
||||
}
|
||||
// ino.end
|
||||
|
||||
}
|
||||
// ino.end
|
||||
|
@ -1,4 +1,4 @@
|
||||
package de.dhbwstuttgart.typeinference.unifynew;
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
@ -1,4 +1,4 @@
|
||||
package de.dhbwstuttgart.typeinference.unifynew;
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.AbstractMap;
|
||||
import java.util.ArrayList;
|
@ -1,44 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.ConstraintsSet;
|
||||
import de.dhbwstuttgart.typeinference.Pair;
|
||||
|
||||
public class ParallelUnify {
|
||||
|
||||
public ParallelUnify(ConstraintsSet constraints){
|
||||
//constraints.getConstraints();
|
||||
}
|
||||
|
||||
private CartesianProduct parallelCartProd(){
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private UnifyResult parallelUnify(Menge<Pair> pairs, FC_TTO fc){
|
||||
UnifyResult ret = new UnifyResult();
|
||||
return ret;
|
||||
}
|
||||
|
||||
public UnifyResult unify(){
|
||||
UnifyResult ret = new UnifyResult();
|
||||
return ret;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class ParallelConstraintSet extends ConstraintsSet{
|
||||
Stream parallelGetConstraints(){
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
class UnifyResult{
|
||||
|
||||
}
|
||||
|
||||
class CartesianProduct{
|
||||
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package de.dhbwstuttgart.typeinference.unifynew;
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
@ -1,11 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.Pair;
|
||||
|
||||
public interface Unifier {
|
||||
|
||||
public Menge<Menge<Pair>> apply (Menge<Pair> E);
|
||||
|
||||
}
|
4041
src/de/dhbwstuttgart/typeinference/unify/Unify.java
Executable file → Normal file
4041
src/de/dhbwstuttgart/typeinference/unify/Unify.java
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
@ -8,10 +8,10 @@ import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.MartelliMontanariUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unifynew.MartelliMontanariUnify;
|
||||
|
||||
public class FiniteClosure implements IFiniteClosure {
|
||||
|
||||
|
@ -1,480 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unifynew;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
import de.dhbwstuttgart.typeinference.Pair;
|
||||
import de.dhbwstuttgart.typeinference.exceptions.NotImplementedException;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.MPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
|
||||
|
||||
|
||||
/**
|
||||
* Implementation of the type unification algorithm
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class Unify {
|
||||
|
||||
public Set<Set<MPair>> unify(Set<MPair> eq, IFiniteClosure fc) {
|
||||
/*
|
||||
* Step 1: Repeated application of reduce, adapt, erase, swap
|
||||
*/
|
||||
|
||||
Set<MPair> eq0 = applyTypeUnificationRules(eq, fc);
|
||||
|
||||
/*
|
||||
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
|
||||
*/
|
||||
|
||||
Set<MPair> eq1s = new HashSet<>();
|
||||
Set<MPair> eq2s = new HashSet<>();
|
||||
splitEq(eq0, eq1s, eq2s);
|
||||
|
||||
/*
|
||||
* Step 4: Create possible typings
|
||||
*
|
||||
* "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
|
||||
* mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
|
||||
*/
|
||||
|
||||
// There are up to 10 toplevel set. 8 of 10 are the result of the
|
||||
// cartesian product of the sets created by pattern matching.
|
||||
List<Set<Set<MPair>>> topLevelSets = new ArrayList<>();
|
||||
|
||||
if(eq1s.size() != 0) {
|
||||
Set<Set<MPair>> wrap = new HashSet<>();
|
||||
wrap.add(eq1s);
|
||||
topLevelSets.add(wrap); // Add Eq1'
|
||||
}
|
||||
|
||||
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
|
||||
Set<MPair> bufferSet = eq2s.stream()
|
||||
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
if(bufferSet.size() != 0) {
|
||||
Set<Set<MPair>> wrap = new HashSet<>();
|
||||
wrap.add(bufferSet);
|
||||
topLevelSets.add(wrap);
|
||||
}
|
||||
|
||||
// Sets that originate from pair pattern matching
|
||||
// Sets of the "second level"
|
||||
Set<Set<Set<MPair>>> secondLevelSets = calculatePairSets(eq2s, fc);
|
||||
|
||||
/* Up to here, no cartesian products are calculated.
|
||||
* filters for pairs and sets can be applied here */
|
||||
|
||||
ISetOperations setOps = new GuavaSetOperations();
|
||||
|
||||
// Sub cartesian products of the second level (pattern matched) sets
|
||||
for(Set<Set<MPair>> secondLevelSet : secondLevelSets) {
|
||||
List<Set<MPair>> secondLevelSetList = new ArrayList<>(secondLevelSet);
|
||||
topLevelSets.add(setOps.cartesianProduct(secondLevelSetList)
|
||||
.stream().map(x -> new HashSet<>(x))
|
||||
.collect(Collectors.toCollection(HashSet::new)));
|
||||
}
|
||||
|
||||
// Cartesian product over all (up to 10) top level sets
|
||||
Set<Set<Set<MPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
|
||||
.stream().map(x -> new HashSet<>(x))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
//System.out.println(result);
|
||||
|
||||
/*
|
||||
* Step 5: Substitution
|
||||
*/
|
||||
|
||||
/*
|
||||
* TODO hier das ergebnis schonh flach machen? (wird im unify old (glaub ich) so gemacht)
|
||||
*/
|
||||
Set<Set<MPair>> eqPrimeSetFlat = new HashSet<>();
|
||||
for(Set<Set<MPair>> setToFlatten : eqPrimeSet) {
|
||||
Set<MPair> buffer = new HashSet<>();
|
||||
setToFlatten.stream().forEach(x -> buffer.addAll(x));
|
||||
eqPrimeSetFlat.add(buffer);
|
||||
}
|
||||
|
||||
IRuleSet rules = new RuleSet(fc);
|
||||
Set<Set<MPair>> changed = new HashSet<>();
|
||||
Set<Set<MPair>> eqPrimePrimeSet = new HashSet<>();
|
||||
|
||||
for(Set<MPair> eqPrime : eqPrimeSetFlat) {
|
||||
Optional<Set<MPair>> eqPrimePrime = rules.subst(eqPrime);
|
||||
|
||||
if(eqPrimePrime.isPresent())
|
||||
changed.add(eqPrimePrime.get());
|
||||
else
|
||||
eqPrimePrimeSet.add(eqPrime);
|
||||
}
|
||||
|
||||
/*
|
||||
* Step 6 a) Restart for pairs where subst was applied
|
||||
* b) Build the union over everything
|
||||
*/
|
||||
|
||||
for(Set<MPair> eqss : changed) {
|
||||
eqPrimePrimeSet.addAll(this.unify(eqss, fc));
|
||||
}
|
||||
|
||||
/*
|
||||
* Step 7: Filter result for solved pairs
|
||||
*/
|
||||
return eqPrimePrimeSet;
|
||||
|
||||
}
|
||||
|
||||
protected Set<MPair> applyTypeUnificationRules(Set<MPair> eq, IFiniteClosure fc) {
|
||||
|
||||
/*
|
||||
* Rule Application Strategy:
|
||||
*
|
||||
* 1. Swap all pairs and erase all erasable pairs
|
||||
* 2. Apply all possible rules to a single pair, then move it to the result set.
|
||||
* Iterating over pairs first, then iterating over rules prevents the application
|
||||
* of rules to a "finished" pair over and over.
|
||||
* 2.1 Apply all rules repeatedly except for erase rules. If
|
||||
* the application of a rule creates new pairs, check immediately
|
||||
* against the erase rules.
|
||||
*/
|
||||
|
||||
|
||||
LinkedHashSet<MPair> targetSet = new LinkedHashSet<MPair>();
|
||||
LinkedList<MPair> eqQueue = new LinkedList<>();
|
||||
IRuleSet rules = new RuleSet(fc);
|
||||
|
||||
/*
|
||||
* Swap all pairs and erase all erasable pairs
|
||||
*/
|
||||
eq.forEach(x -> swapAddOrErase(x, rules, eqQueue));
|
||||
|
||||
/*
|
||||
* Apply rules until the queue is empty
|
||||
*/
|
||||
while(!eqQueue.isEmpty()) {
|
||||
MPair pair = eqQueue.pollFirst();
|
||||
|
||||
// ReduceUp, ReduceLow, ReduceUpLow
|
||||
Optional<MPair> opt = rules.reduceUpLow(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceLow(pair);
|
||||
opt = opt.isPresent() ? opt : rules.reduceUp(pair);
|
||||
|
||||
// One of the rules has been applied
|
||||
if(opt.isPresent()) {
|
||||
swapAddOrErase(opt.get(), rules, eqQueue);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
|
||||
Optional<Set<MPair>> optSet = rules.reduce1(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduce2(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair);
|
||||
optSet = optSet.isPresent() ? optSet : rules.reduceEq(pair);
|
||||
|
||||
// One of the rules has been applied
|
||||
if(optSet.isPresent()) {
|
||||
optSet.get().forEach(x -> swapAddOrErase(x, rules, eqQueue));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Adapt, AdaptExt, AdaptSup
|
||||
opt = rules.adapt(pair);
|
||||
opt = opt.isPresent() ? opt : rules.adaptExt(pair);
|
||||
opt = opt.isPresent() ? opt : rules.adaptSup(pair);
|
||||
|
||||
// One of the rules has been applied
|
||||
if(opt.isPresent()) {
|
||||
swapAddOrErase(opt.get(), rules, eqQueue);
|
||||
continue;
|
||||
}
|
||||
|
||||
// None of the rules has been applied
|
||||
targetSet.add(pair);
|
||||
}
|
||||
|
||||
return targetSet;
|
||||
}
|
||||
|
||||
protected void swapAddOrErase(MPair pair, IRuleSet rules, Collection<MPair> collection) {
|
||||
Optional<MPair> opt = rules.swap(pair);
|
||||
MPair pair2 = opt.isPresent() ? opt.get() : pair;
|
||||
|
||||
if(rules.erase1(pair2) || rules.erase3(pair2) || rules.erase2(pair2))
|
||||
return;
|
||||
|
||||
collection.add(pair2);
|
||||
}
|
||||
|
||||
protected void splitEq(Set<MPair> eq, Set<MPair> eq1s, Set<MPair> eq2s) {
|
||||
for(MPair pair : eq)
|
||||
if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
|
||||
eq1s.add(pair);
|
||||
else
|
||||
eq2s.add(pair);
|
||||
}
|
||||
|
||||
|
||||
protected Set<Set<Set<MPair>>> calculatePairSets(Set<MPair> eq2s, IFiniteClosure fc) {
|
||||
List<Set<Set<MPair>>> result = new ArrayList<>();
|
||||
|
||||
// Init all 8 cases
|
||||
for(int i = 0; i < 8; i++)
|
||||
result.add(new HashSet<>());
|
||||
|
||||
for(MPair pair : eq2s) {
|
||||
|
||||
PairOperator pairOp = pair.getPairOp();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
|
||||
// Case 1: (a <. Theta')
|
||||
if(pairOp == PairOperator.SMALLERDOT && lhsType instanceof PlaceholderType)
|
||||
result.get(0).add(unifyCase1((PlaceholderType) pair.getLhsType(), pair.getRhsType(), fc));
|
||||
|
||||
// Case 2: (a <.? ? ext Theta')
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
|
||||
result.get(1).add(unifyCase2((PlaceholderType) pair.getLhsType(), (ExtendsType) pair.getRhsType(), fc));
|
||||
|
||||
// Case 3: (a <.? ? sup Theta')
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
|
||||
result.get(2).add(unifyCase3((PlaceholderType) lhsType, (SuperType) rhsType, fc));
|
||||
|
||||
// Case 4: (a <.? Theta')
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType)
|
||||
result.get(3).add(unifyCase4((PlaceholderType) lhsType, rhsType, fc));
|
||||
|
||||
// Case 5: (Theta <. a)
|
||||
else if(pairOp == PairOperator.SMALLERDOT && rhsType instanceof PlaceholderType)
|
||||
result.get(4).add(unifyCase5(lhsType, (PlaceholderType) rhsType, fc));
|
||||
|
||||
// Case 6: (? ext Theta <.? a)
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType)
|
||||
result.get(5).add(unifyCase6((ExtendsType) lhsType, (PlaceholderType) rhsType, fc));
|
||||
|
||||
// Case 7: (? sup Theta <.? a)
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType)
|
||||
result.get(6).add(unifyCase7((SuperType) lhsType, (PlaceholderType) rhsType, fc));
|
||||
|
||||
// Case 8: (Theta <.? a)
|
||||
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
|
||||
result.get(7).add(unifyCase8(lhsType, (PlaceholderType) rhsType, fc));
|
||||
}
|
||||
|
||||
return result.stream().filter(x -> x.size() > 0).collect(Collectors.toCollection(HashSet::new));
|
||||
}
|
||||
|
||||
protected Set<MPair> unifyCase1(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
|
||||
Set<MPair> result = new HashSet<>();
|
||||
IUnify unify = new MartelliMontanariUnify();
|
||||
|
||||
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());
|
||||
|
||||
for(UnifyType c : cs) {
|
||||
|
||||
// Wenn die fc nach spezifikation funktioniert ist das hier nicht mehr nötig?
|
||||
Set<UnifyType> thetaQs = fc.smaller(c).stream().filter(x -> x.getTypeParams().arePlaceholders()).collect(Collectors.toCollection(HashSet::new));
|
||||
thetaQs.add(c); // reflexive
|
||||
|
||||
Set<UnifyType> thetaQPrimes = new HashSet<>();
|
||||
TypeParams cParams = c.getTypeParams();
|
||||
if(cParams.size() == 0)
|
||||
thetaQPrimes.add(c);
|
||||
else {
|
||||
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
|
||||
for(UnifyType param : cParams)
|
||||
candidateParams.add(fc.grArg(param));
|
||||
|
||||
for(TypeParams tp : permuteParams(candidateParams))
|
||||
thetaQPrimes.add(c.setTypeParams(tp));
|
||||
}
|
||||
|
||||
for(UnifyType tqp : thetaQPrimes) {
|
||||
Optional<Unifier> opt = unify.unify(tqp, thetaPrime);
|
||||
if (!opt.isPresent())
|
||||
continue;
|
||||
|
||||
Unifier unifier = opt.get();
|
||||
Set<Entry<PlaceholderType, UnifyType>> substitutions = unifier.getSubstitutions();
|
||||
for (Entry<PlaceholderType, UnifyType> sigma : substitutions)
|
||||
result.add(new MPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
|
||||
for (UnifyType tq : thetaQs) {
|
||||
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq));
|
||||
smaller.stream().map(x -> new MPair(a, x, PairOperator.EQUALSDOT))
|
||||
.forEach(x -> result.add(x));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Set<MPair> unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
|
||||
Set<MPair> result = new HashSet<>();
|
||||
IUnify unify = new MartelliMontanariUnify();
|
||||
|
||||
UnifyType thetaPrime = extThetaPrime.getExtendedType();
|
||||
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());
|
||||
|
||||
for(UnifyType c : cs) {
|
||||
|
||||
// Wenn die fc nach spezifikation funktioniert ist das hier nicht mehr nötig?
|
||||
Set<UnifyType> thetaQs = fc.smaller(c).stream().filter(x -> x.getTypeParams().arePlaceholders()).collect(Collectors.toCollection(HashSet::new));
|
||||
thetaQs.add(c); // reflexive
|
||||
|
||||
Set<UnifyType> thetaQPrimes = new HashSet<>();
|
||||
TypeParams cParams = c.getTypeParams();
|
||||
if(cParams.size() == 0)
|
||||
thetaQPrimes.add(c);
|
||||
else {
|
||||
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
|
||||
for(UnifyType param : cParams)
|
||||
candidateParams.add(fc.grArg(param));
|
||||
|
||||
for(TypeParams tp : permuteParams(candidateParams))
|
||||
thetaQPrimes.add(c.setTypeParams(tp));
|
||||
}
|
||||
|
||||
for(UnifyType tqp : thetaQPrimes) {
|
||||
Optional<Unifier> opt = unify.unify(tqp, thetaPrime);
|
||||
if (!opt.isPresent())
|
||||
continue;
|
||||
|
||||
Unifier unifier = opt.get();
|
||||
Set<Entry<PlaceholderType, UnifyType>> substitutions = unifier.getSubstitutions();
|
||||
for (Entry<PlaceholderType, UnifyType> sigma : substitutions)
|
||||
result.add(new MPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
|
||||
for (UnifyType tq : thetaQs) {
|
||||
ExtendsType extTq = new ExtendsType(tq);
|
||||
Set<UnifyType> smaller = fc.smaller(unifier.apply(extTq));
|
||||
smaller.stream().map(x -> new MPair(a, x, PairOperator.EQUALSDOT))
|
||||
.forEach(x -> result.add(x));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Set<MPair> unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
|
||||
Set<MPair> result = new HashSet<>();
|
||||
for(UnifyType theta : fc.smArg(subThetaPrime))
|
||||
result.add(new MPair(a, theta, PairOperator.EQUALSDOT));
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Set<MPair> unifyCase4(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
|
||||
Set<MPair> result = new HashSet<>();
|
||||
result.add(new MPair(a, thetaPrime, PairOperator.EQUALSDOT));
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Set<MPair> unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
|
||||
Set<MPair> result = new HashSet<>();
|
||||
for(UnifyType thetaS : fc.greater(theta))
|
||||
result.add(new MPair(a, thetaS, PairOperator.EQUALSDOT));
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Set<MPair> unifyCase6(ExtendsType extTheta, PlaceholderType a, IFiniteClosure fc) {
|
||||
Set<MPair> result = new HashSet<>();
|
||||
for(UnifyType thetaS : fc.grArg(extTheta))
|
||||
result.add(new MPair(a, thetaS, PairOperator.EQUALSDOT));
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Set<MPair> unifyCase7(SuperType supTheta, PlaceholderType a, IFiniteClosure fc) {
|
||||
Set<MPair> result = new HashSet<>();
|
||||
IUnify unify = new MartelliMontanariUnify();
|
||||
|
||||
UnifyType theta = supTheta.getSuperedType();
|
||||
Set<UnifyType> cs = fc.getAllTypesByName(theta.getName());
|
||||
|
||||
for(UnifyType c : cs) {
|
||||
|
||||
// Wenn die fc nach spezifikation funktioniert ist das hier nicht mehr nötig?
|
||||
Set<UnifyType> thetaQs = fc.smaller(c).stream().filter(x -> x.getTypeParams().arePlaceholders()).collect(Collectors.toCollection(HashSet::new));
|
||||
thetaQs.add(c); // reflexive
|
||||
|
||||
Set<UnifyType> thetaQPrimes = new HashSet<>();
|
||||
TypeParams cParams = c.getTypeParams();
|
||||
if(cParams.size() == 0)
|
||||
thetaQPrimes.add(c);
|
||||
else {
|
||||
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
|
||||
for(UnifyType param : cParams)
|
||||
candidateParams.add(fc.grArg(param));
|
||||
|
||||
for(TypeParams tp : permuteParams(candidateParams))
|
||||
thetaQPrimes.add(c.setTypeParams(tp));
|
||||
}
|
||||
|
||||
for(UnifyType tqp : thetaQPrimes) {
|
||||
Optional<Unifier> opt = unify.unify(tqp, theta);
|
||||
if (!opt.isPresent())
|
||||
continue;
|
||||
|
||||
Unifier unifier = opt.get();
|
||||
Set<Entry<PlaceholderType, UnifyType>> substitutions = unifier.getSubstitutions();
|
||||
for (Entry<PlaceholderType, UnifyType> sigma : substitutions)
|
||||
result.add(new MPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
|
||||
for (UnifyType tq : thetaQs) {
|
||||
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq));
|
||||
smaller.stream().map(x -> new MPair(a, new SuperType(x), PairOperator.EQUALSDOT))
|
||||
.forEach(x -> result.add(x));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Set<MPair> unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
|
||||
Set<MPair> result = new HashSet<>();
|
||||
for(UnifyType thetaS : fc.grArg(theta))
|
||||
result.add(new MPair(a, thetaS, PairOperator.EQUALSDOT));
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
|
||||
Set<TypeParams> result = new HashSet<>();
|
||||
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
|
||||
return result;
|
||||
}
|
||||
|
||||
private void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
|
||||
if(candidates.size() == idx) {
|
||||
result.add(new TypeParams(Arrays.copyOf(current, current.length)));
|
||||
return;
|
||||
}
|
||||
|
||||
Set<UnifyType> localCandidates = candidates.get(idx);
|
||||
|
||||
for(UnifyType t : localCandidates) {
|
||||
current[idx] = t;
|
||||
permuteParams(candidates, idx+1, result, current);
|
||||
}
|
||||
}
|
||||
}
|
@ -8,13 +8,13 @@ import junit.framework.Assert;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.RuleSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.MPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.SimpleType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unifynew.RuleSet;
|
||||
|
||||
|
||||
public class RuleSetTest {
|
||||
|
@ -7,11 +7,11 @@ import junit.framework.Assert;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.MartelliMontanariUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.MPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unifynew.MartelliMontanariUnify;
|
||||
|
||||
public class StandardUnifyTest {
|
||||
|
||||
|
@ -6,12 +6,12 @@ import java.util.Set;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.Unify;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.MPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.MPair.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unifynew.Unify;
|
||||
|
||||
public class UnifyTest extends Unify {
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user