1
0

Compare commits

...

17 Commits

Author SHA1 Message Date
dab2087f0a BCEL aktualisiert 2016-06-17 14:21:30 +02:00
e19dee4c99 Umstellung auf Intellij IDEA 2016-06-17 13:55:14 +02:00
f88d58d548 Update für StackMapTable 2016-06-17 07:02:15 +02:00
9d9526ae72 Testen von StackMapTable-Generator 2016-06-15 20:58:55 +02:00
86a94dfae9 Merge branch 'unify' of ssh://gohorb.ba-horb.de/bahome/projekt/git/JavaCompilerCore into refactoring 2016-06-03 13:47:37 +02:00
fe35a1f56d Merge mit bytecode 2016-06-03 13:47:22 +02:00
52b6f438b0 Merge mit Bytecode 2016-05-25 18:18:05 +02:00
07ea25b5b7 Filtern von Constraints vor dem Karthesischen Produkt bilden 2016-05-25 17:46:33 +02:00
0d51c3e94d Bug 49 und 52 fix 2016-05-24 00:36:59 +02:00
b67816775e Merge branch 'refactoring' of ssh://gohorb.ba-horb.de/bahome/projekt/git/JavaCompilerCore into refactoring 2016-05-12 19:48:39 +02:00
748f25a2fd gen-TPH richtig konvertieren 2016-05-12 19:48:19 +02:00
318f6e1cc5 code review todos erledigt 2016-05-06 16:59:59 +02:00
223f6facc7 modified: ../../../src/de/dhbwstuttgart/bytecode/ClassGenerator.java
in addMethod :
String methodNameAndTypes = m.getName()+Arrays.toString(m.getArgumentTypes());
durch
String methodNameAndTypes = m.getReturnType().getSignature()+m.getName()+Arrays.toString(m.getArgumentTypes());
ersetzt
	modified:   OL.jav
Main-Klasse hinzugefuegt
2016-05-04 00:28:57 +02:00
edda41f546 Generic Type Var werden zu PlaceholderTypen konvertiert 2016-05-03 22:43:43 +02:00
5d760d4222 Aufruf im UnifyTest OverloadingVector ändern 2016-05-03 22:15:19 +02:00
005ed60c26 fixed vector 2016-05-03 21:07:52 +02:00
78794e377a fixed vector 2016-05-03 20:44:55 +02:00
34 changed files with 595 additions and 462 deletions

8
.idea/modules.xml generated Normal file

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/JavaCompilerCore.iml" filepath="$PROJECT_DIR$/JavaCompilerCore.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml generated Normal file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

75
JavaCompilerCore.iml Normal file

@ -0,0 +1,75 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="EclipseModuleManager">
<libelement value="jar://$MODULE_DIR$/lib/junit-4.0.jar!/" />
<libelement value="jar://$MODULE_DIR$/lib/cloning.jar!/" />
<libelement value="jar://$MODULE_DIR$/lib/guava-10.0.1.jar!/" />
<libelement value="jar://$MODULE_DIR$/lib/commons-bcel6-6.0-SNAPSHOT.jar!/" />
<libelement value="jar://$MODULE_DIR$/lib/bcel-6.0-SNAPSHOT.jar!/" />
<src_description expected_position="0">
<src_folder value="file://$MODULE_DIR$/src" expected_position="0" />
<src_folder value="file://$MODULE_DIR$/BCEL" expected_position="1" />
<src_folder value="file://$MODULE_DIR$/" expected_position="2" />
<src_folder value="file://$MODULE_DIR$/test" expected_position="3" />
</src_description>
</component>
<component name="NewModuleRootManager" inherit-compiler-output="false">
<output url="file://$MODULE_DIR$/bin" />
<exclude-output />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/test" isTestSource="false" />
</content>
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="jdk" jdkName="1.8" jdkType="JavaSDK" />
<orderEntry type="module-library">
<library name="junit-4.0.jar">
<CLASSES>
<root url="jar://$MODULE_DIR$/lib/junit-4.0.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/junit/junit/4.0/junit-4.0-sources.jar!/" />
</SOURCES>
</library>
</orderEntry>
<orderEntry type="module-library">
<library name="cloning.jar">
<CLASSES>
<root url="jar://$MODULE_DIR$/lib/cloning.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</orderEntry>
<orderEntry type="module-library">
<library name="guava-10.0.1.jar">
<CLASSES>
<root url="jar://$MODULE_DIR$/lib/guava-10.0.1.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</orderEntry>
<orderEntry type="module-library">
<library name="commons-bcel6-6.0-SNAPSHOT.jar">
<CLASSES>
<root url="jar://$MODULE_DIR$/lib/commons-bcel6-6.0-SNAPSHOT.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="file://$USER_HOME$/Development/intellijworkspace/bcel/src/main/java" />
</SOURCES>
</library>
</orderEntry>
<orderEntry type="module-library">
<library name="bcel-6.0-SNAPSHOT.jar">
<CLASSES>
<root url="jar://$MODULE_DIR$/lib/bcel-6.0-SNAPSHOT.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</orderEntry>
</component>
</module>

Binary file not shown.

@ -173,6 +173,7 @@ public class ClassGenerator extends ClassGen{
@Override
public void addMethod(Method m) {
String methodNameAndTypes = m.getReturnType().toString()+m.getName()+Arrays.toString(m.getArgumentTypes());
if(methodsNamesAndTypes.contains(methodNameAndTypes)){
return;
@ -190,5 +191,4 @@ public class ClassGenerator extends ClassGen{
return methodGenerator;
}
}

@ -70,7 +70,8 @@ public class MethodGenerator extends MethodGen{
il.append(factory.createReturn( org.apache.commons.bcel6.generic.Type.VOID));
}
}
method.getInstructionList().setPositions();
method.stripAttributes(true);
method.setMaxStack(); //Die Stack Größe automatisch berechnen lassen (erst nach dem alle Instructions angehängt wurden)
method.setMaxLocals();

File diff suppressed because it is too large Load Diff

@ -663,15 +663,6 @@ modifier : PUBLIC
classtype : classorinterfacetype
{
//PL 05-07-30 eingefuegt containedTypes ANFANG
RefType RT = new RefType($1.get_Name_1Element(),null,-1);
//RT.set_UsedId($1);
//RT.setName(RT.get_UsedId().get_Name_1Element());
RT.set_ParaList($1.get_RealParaList());
//RT.setName($1.get_Name_1Element());
containedTypes.addElement(RT);
//PL 05-07-30 eingefuegt containedTypes ENDE
$$ = $1;
}
@ -2026,6 +2017,7 @@ classinstancecreationexpression : NEW classtype '(' ')'
{
NewClass NC = new NewClass($2.getOffset(),$2.getVariableLength());
NC.set_UsedId($2);
usedIdsToCheck.addElement($2);
//auskommentiert von Andreas Stadelmeier (a10023) NC.setType(TypePlaceholder.fresh());
$$=NC;

@ -4,11 +4,14 @@ package de.dhbwstuttgart.syntaxtree;
// ino.module.SourceFile.8722.import
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
@ -49,7 +52,9 @@ import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.Unifikationsalgorithmus;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
@ -195,7 +200,122 @@ public class SourceFile
InterfaceVektor.addElement((Interface) e);
}
}
/**
* PL 2014-10-25
* schnitt1 checkt ob die Typeplaceholders aus in den Elemeneten aus vars enthalten sind
* Rückgabe ist die Menge der Indizies von vars der Schnittmengen mit var nicht leer sind.
* @param var
* @param vars
* @param indexe
* @return
*/
static Menge<Integer> schnitt1 (Menge<PlaceholderType> var, Menge<Menge<PlaceholderType>> vars, Menge<Integer> indexe) {
int j = -1;
for (Menge<PlaceholderType> varelems : vars) {
j++;
if (varelems != null) {
if (var.stream().map(x -> varelems.contains(x)).reduce(false, (a,b) -> (a || b))
&& (!indexe.contains(j)))
{
Menge<PlaceholderType> rekvarelements = vars.elementAt(j);
vars.setElementAt(null, j);//Element erledigt muss nicht nochmals bearbeitet werden.
indexe.addElement(j);
indexe = schnitt1(rekvarelements, vars, indexe);
}
}
}
return indexe;
}
/**
* Bildet Schnittmengen der Mengen von Typeplaceholders
* Rueckgabe ist die Menge der Menge von Indizies die Schnittmengen sind.
* @param vars
* @return
*/
public static Menge<Menge<Integer>> schnitt (Menge<Menge<PlaceholderType>> vars) {
Menge<Menge<Integer>> ret = new Menge<>();
int i = -1;
for (Menge<PlaceholderType> var : vars) {
i++;
if (var != null) {//Element wurde noch bearbeitet
Menge<Integer> indexe = new Menge<>();
indexe.add(i);
ret.add(schnitt1(var, vars, indexe));
}
}
return ret;
}
public static Set<Set<UnifyPair>> cartesianProduct(List<UnifyPair> constraints, FiniteClosure finiteClosure){
//IDEE: Man bildet Zusammenhangskomponenten von Paaren, die gemeinsame Variablen haben
// und unifizert nur die Zusammenhangskomponenten in Schritten 1 - 5
//Schritt 1: Alle Variablen in den Paaren von Elementen einsammeln
Menge<Menge<PlaceholderType>> constraintsclonevars = constraints.stream().map(p -> {Menge<PlaceholderType> TPHs = new Menge<>();
TPHs.addAll(p.getInvolvedPlaceholderTypes());
TPHs.addAll(p.getInvolvedPlaceholderTypes());
return TPHs;}
).collect(Menge::new, Menge::add, Menge::addAll);
//Schritt 2: Schnittmengen jedes Elements mit jedem Elememt von vars bilden und dann index zusammenfassen
//in indexset sind dann die Mengen von Indizes enthalten, die gemeisam unifiziert wreden müssen
Menge<Menge<Integer>> indexeset = new Menge<>();
if (constraintsclonevars != null && constraintsclonevars.size()>0) {
indexeset = SourceFile.schnitt(constraintsclonevars);
}
//Schritt 3: Umwandlung der Indizes in die zugehoerigen Elemente
// In streamconstraintsclone sind die Mengen von Paar enthalten die unifiziert werden muessen
Stream<Menge<UnifyPair>> streamconstraintsclone = indexeset.stream().<Menge<UnifyPair>>map(x -> x.stream()
.<UnifyPair>map(i -> constraints.get(i))
.<Menge<UnifyPair>>collect(Menge::new, Menge::add, Menge::addAll));
//Menge<Menge<Pair>> vecconstraintsclone = streamconstraintsclone.collect(Menge::new, Menge::add, Menge::addAll);
//System.out.println();
//Schritt 4: Unifikation
Menge<Set<Set<UnifyPair>>> vecunifyResult =
//streamconstraintsclone.map(x -> Unify.unify(x, finiteClosure)).collect(Menge::new, Menge::add, Menge::addAll);
//DEBUG-Variante
streamconstraintsclone.map(x ->
{ Set<Set<UnifyPair>> z = new TypeUnify().unify(x, finiteClosure);
return z;
}
).collect(Menge::new, Menge::add, Menge::addAll);
//card gibt die Cardinalitaet der unifizierten Mengen an
Menge<Integer> card = vecunifyResult.stream().map(x -> x.size()).collect(Menge::new, Menge::add, Menge::addAll);
;//.reduce(1,(a,b) -> { if ((a > 0) && (b > 0)) return (a * b); else return 1; });
//Schritt 5: Bildung des cartesischen Produkts
//sollte wieder entfernt werden: Weiterarbeit mit:
//[[x_1 -> t_1, x_2 -> t2], [x_1 -> t'_1, x_2 -> t'_2]] x ... x [[x_n -> t_1n], [x_n -> t2n], [x_n -> t3n]]
Set<Set<UnifyPair>> cardprodret_start = new Menge<>();
cardprodret_start.add(new Menge<UnifyPair>());
//cart. Produkt mit Linkverschiebung
Set<Set<UnifyPair>> unifyResult = vecunifyResult.stream().reduce(cardprodret_start, (x, y) -> {
Set<Set<UnifyPair>> cardprodret= new Menge<>();
if (y.size() > 0) {
//System.out.println(y);
//Menge<Menge<Pair>> cardprodretold = x;
//cardprodret = new Menge<>();
for(Set<UnifyPair> xElement : x) {
for (Set<UnifyPair> yElement : y){
Set<UnifyPair> help = new Menge<>();
help.addAll(yElement);
help.addAll(xElement);
cardprodret.add(help);
}
}
}
else
return new Menge<>(); //kein unifiziertes Ergebnis, damit wird das Geseamtergebnis []
return cardprodret;
});
return unifyResult;
}
/////////////////////////////////////////////////////////////////////////
// TypeReconstructionAlgorithmus
@ -232,11 +352,6 @@ public class SourceFile
globalAssumptions.add(importAssumptions);
typinferenzLog.debug("Von JRE erstellte Assumptions: "+importAssumptions, Section.TYPEINFERENCE);
//FiniteClosure generieren:
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(globalAssumptions);
typinferenzLog.debug("FiniteClosure: \n"+finiteClosure, Section.TYPEINFERENCE);
ConstraintsSet oderConstraints = new ConstraintsSet();
//Alle Constraints der in dieser SourceFile enthaltenen Klassen sammeln:
for(Class klasse : KlassenVektor){
@ -249,6 +364,11 @@ public class SourceFile
*/////////////////
//UnifyTypeFactory.convert(oderConstraints);
//FiniteClosure generieren:
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(globalAssumptions);
typinferenzLog.debug("FiniteClosure: \n"+finiteClosure, Section.TYPEINFERENCE);
////////////////
//Typen in UnifyTypen umwandeln:
@ -256,13 +376,11 @@ public class SourceFile
UnifyConstraintsSet unifyConstraints = UnifyTypeFactory.convert(oderConstraints);
//Unmögliche ConstraintsSets aussortieren durch Unifizierung
Function<Menge<UnifyPair>,Menge<Menge<UnifyPair>>> unifier = (pairs)->{
Menge<Menge<UnifyPair>> retValue = new Menge<>();
Set<Set<UnifyPair>> unifiedPairs = new TypeUnify().unify(pairs, finiteClosure);
return retValue;};
//oderConstraints.filterWrongConstraints(unifier);
Unifikationsalgorithmus unifier = (pairs)->new TypeUnify().unify(pairs, finiteClosure);
//oderConstraints.unifyUndConstraints(unifier); //rausgeworfen für Tests (08.12.2015)
unifyConstraints.filterWrongConstraints(unifier);
unifyConstraints.unifyUndConstraints(unifier); //rausgeworfen für Tests (08.12.2015)
typinferenzLog.debug("Übriggebliebene Konstraints:\n"+oderConstraints+"\n", Section.TYPEINFERENCE);
@ -273,7 +391,9 @@ public class SourceFile
////////////////
Set<Set<UnifyPair>> xConstraints = unifyConstraints.cartesianProduct();
//Sets zu Listen umwandeln:
//Set<List<UnifyPair>> allUnifiedConstraints = xConstraints.stream().map((set)-> new ArrayList<>(set)).collect(Menge::new, Menge::add, Menge::addAll);;
typinferenzLog.debug("Finite Closure: "+finiteClosure, Section.TYPEINFERENCE);
typinferenzLog.debug("Karthesisches Produkt der Constraints: "+xConstraints, Section.TYPEINFERENCE);
@ -286,74 +406,7 @@ public class SourceFile
boolean unifyFail = true;
for(Set<UnifyPair> constraints : xConstraints){
//Alle durch das Karthesische Produkt entstandenen Möglichkeiten durchgehen:
//Menge<Menge<Pair>> result = new Menge<Menge<Pair>>();
//IDEE: Man bildet Zusammenhangskomponenten von Paaren, die gemeinsame Variablen haben
// und unifizert nur die Zusammenhangskomponenten in Schritten 1 - 5
/*
//Schritt 1: Alle Variablen in den Paaren von Elementen einsammeln
Menge<Menge<TypePlaceholder>> constraintsclonevars = constraints.stream().map(p -> {Menge<TypePlaceholder> TPHs = new Menge<>();
TPHs.addAll(p.TA1.getInvolvedTypePlaceholder());
TPHs.addAll(p.TA2.getInvolvedTypePlaceholder());
return TPHs;}
).collect(Menge::new, Menge::add, Menge::addAll);
//Schritt 2: Schnittmengen jedes Elements mit jedem Elememt von vars bilden und dann index zusammenfassen
//in indexset sind dann die Mengen von Indizes enthalten, die gemeisam unifiziert wreden müssen
Menge<Menge<Integer>> indexeset = new Menge<>();
if (constraintsclonevars != null && constraintsclonevars.size()>0) {
indexeset = Unify.schnitt(constraintsclonevars);
}
//Schritt 3: Umwandlung der Indizes in die zugehoerigen Elemente
// In streamconstraintsclone sind die Mengen von Paar enthalten die unifiziert werden muessen
Stream<Menge<MPair>> streamconstraintsclone = indexeset.stream().map(x -> x.stream()
.map(i -> constraintsClone.elementAt(i))
.<Menge<MPair>>collect(Menge::new, Menge::add, Menge::addAll));
//Menge<Menge<Pair>> vecconstraintsclone = streamconstraintsclone.collect(Menge::new, Menge::add, Menge::addAll);
//System.out.println();
//Schritt 4: Unifikation
Set<Set<Set<MPair>>> vecunifyResult =
//streamconstraintsclone.map(x -> Unify.unify(x, finiteClosure)).collect(Menge::new, Menge::add, Menge::addAll);
//DEBUG-Variante
streamconstraintsclone.map(x ->
{ Set<Set<MPair>> z = new Unify().unify(x, finiteClosure);
return z;
}
).collect(Menge::new, Menge::add, Menge::addAll);
//card gibt die Cardinalitaet der unifizierten Mengen an
Menge<Integer> card = vecunifyResult.stream().map(x -> x.size()).collect(Menge::new, Menge::add, Menge::addAll);
;//.reduce(1,(a,b) -> { if ((a > 0) && (b > 0)) return (a * b); else return 1; });
//Schritt 5: Bildung des cartesischen Produkts
//sollte wieder entfernt werden: Weiterarbeit mit:
//[[x_1 -> t_1, x_2 -> t2], [x_1 -> t'_1, x_2 -> t'_2]] x ... x [[x_n -> t_1n], [x_n -> t2n], [x_n -> t3n]]
Set<Set<Pair>> cardprodret_start = new Menge<>();
cardprodret_start.add(new Menge<Pair>());
//cart. Produkt mit Linkverschiebung
Set<Set<Pair>> unifyResult = vecunifyResult.stream().reduce(cardprodret_start, (x, y) -> {
Set<Set<Pair>> cardprodret= new Menge<>();
if (y.size() > 0) {
//System.out.println(y);
//Menge<Menge<Pair>> cardprodretold = x;
//cardprodret = new Menge<>();
for(int j = 0; j < x.size(); j++) {
for (int k = 0; k < y.size(); k++){
Set<Pair> help = new Menge<>();
help.addAll(y.elementAt(k));
help.addAll(x.elementAt(j));
cardprodret.add(help);
}
}
}
else
return new Menge<>(); //kein unifiziertes Ergebnis, damit wird das Geseamtergebnis []
return cardprodret;
});
*/
typinferenzLog.debug("\nUnifiziere Constraints:\n"+constraints, Section.TYPEINFERENCE);
typinferenzLog.debug("\nFC:\n"+finiteClosure, Section.TYPEINFERENCE);
long start = System.currentTimeMillis();
@ -361,7 +414,7 @@ public class SourceFile
long time = System.currentTimeMillis()-start;
typinferenzLog.debug("\nErgebnis der Unifizierung:\n"+unifyResult, Section.TYPEINFERENCE);
typinferenzLog.debug("\nAnzahl Lösungen:\n"+unifyResult.size(), Section.TYPEINFERENCE);
typinferenzLog.debug("\nZeit für Unifizierung: "+time + "ms", Section.TYPEINFERENCE);
//typinferenzLog.debug("\nZeit für Unifizierung: "+time + "ms", Section.TYPEINFERENCE);
Menge<Menge<Pair>> convertedResult = unifyResult.parallelStream().<Menge<Pair>>map((Set<UnifyPair> resultSet)->{

@ -50,10 +50,10 @@ public class UnifyTypeFactory {
public static FiniteClosure generateFC(TypeAssumptions fromAss){
HashSet<UnifyPair> pairs = new HashSet<>();
for(ClassAssumption cAss : fromAss.getClassAssumptions()){
UnifyType tl = UnifyTypeFactory.convert(cAss.getAssumedClass().getType());
RefType superClass = cAss.getAssumedClass().getSuperClass();
UnifyType tl = UnifyTypeFactory.convert(cAss.getAssumedClass().getType().TYPE(fromAss, cAss.getAssumedClass()));
Type superClass = cAss.getAssumedClass().getSuperClass();
if(superClass != null){
UnifyType tr = UnifyTypeFactory.convert(superClass);
UnifyType tr = UnifyTypeFactory.convert(superClass.TYPE(fromAss, cAss.getAssumedClass()));
pairs.add(generateSmallerPair(tl, tr));
}
}
@ -125,8 +125,8 @@ public class UnifyTypeFactory {
}
public static UnifyType convert(GenericTypeVar t){
//return new PlaceholderType(t.get_Name());
return new ReferenceType(t.get_Name());
return new PlaceholderType(TypePlaceholder.fresh(NULL_NODE).get_Name());
//return new ReferenceType(t.get_Name());
}
public static UnifyConstraintsSet convert(ConstraintsSet constraints) {
@ -197,7 +197,7 @@ public class UnifyTypeFactory {
public static Type convert(PlaceholderType t) {
TypePlaceholder ret = TypePlaceholder.getInstance(t.getName());
if(ret == null){ //Dieser TPH wurde vom Unifikationsalgorithmus erstellt
ret = TypePlaceholder.fresh(NULL_NODE);
ret = TypePlaceholder.fresh(t.getName(), NULL_NODE);
}
return ret;
}

@ -95,7 +95,9 @@ public abstract class Operator extends SyntaxTreeNode
}else if(returnType.equals("java.lang.Long")){
il.append(_cg.getInstructionFactory().createInvoke("java.lang.Long", "longValue", org.apache.commons.bcel6.generic.Type.LONG, new org.apache.commons.bcel6.generic.Type[] {}, Constants.INVOKEVIRTUAL));
}else if(returnType.equals("java.lang.String")){
throw new NotImplementedException(returnType);
}else if(returnType.equals("java.lang.Boolean")){
il.append(_cg.getInstructionFactory().createInvoke("java.lang.Boolean", "booleanValue", org.apache.commons.bcel6.generic.Type.BOOLEAN, new org.apache.commons.bcel6.generic.Type[] {}, Constants.INVOKEVIRTUAL));
}else{
throw new NotImplementedException(returnType);
}

@ -1,10 +1,12 @@
// ino.module.NewClass.8642.package
package de.dhbwstuttgart.syntaxtree.statement;
import java.util.ArrayList;
// ino.end
// ino.module.NewClass.8642.import
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.bcel6.Constants;
import org.apache.commons.bcel6.generic.ClassGen;
@ -23,6 +25,7 @@ import de.dhbwstuttgart.myexception.NotImplementedException;
import de.dhbwstuttgart.myexception.SCExcept;
import de.dhbwstuttgart.myexception.SCStatementException;
import de.dhbwstuttgart.syntaxtree.Class;
import de.dhbwstuttgart.syntaxtree.ParameterList;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.syntaxtree.misc.UsedId;
import de.dhbwstuttgart.syntaxtree.type.FunN;
@ -60,6 +63,7 @@ public class NewClass extends Expr
// ino.attribute.arglist.25837.declaration
private ArgumentList arglist;
private boolean isStatement = false;
private List<Type> parameterList = new ArrayList<>();
// ino.end
// ino.attribute.parserlog.25840.declaration
protected static Logger parserlog = Logger.getLogger("parser");
@ -131,7 +135,6 @@ public class NewClass extends Expr
@Override
public ConstraintsSet TYPEExpr(TypeAssumptions assumptions) {
//TODO: Das hier noch vervollständigen
ConstraintsSet ret = new ConstraintsSet();
UndConstraint callConstraints = new UndConstraint();
@ -144,7 +147,19 @@ public class NewClass extends Expr
ret.add(this.arglist.expr.elementAt(i).TYPEExpr(assumptions));
callConstraints.addConstraint( this.arglist.expr.elementAt(i).getType().TYPE(assumptions, this), cA.getParameterType(i).TYPE(assumptions, this));
}
Type thisT = assumptions.checkType(new RefType(this.get_Name(),this,0), (SyntaxTreeNode)this);
//Die Parameterliste befindet sich in der UsedID:
for(Type param : this.usedid.get_ParaList()){
this.parameterList.add(param.TYPE(assumptions, this));
}
//Den Typ der Instruktion setzen:
RefType refTypeToCheck;
if(this.parameterList.size() == 0){
refTypeToCheck = new RefType(this.get_Name(),this,0);
}else{
refTypeToCheck = new RefType(this.get_Name(),this.parameterList,this,0);
}
Type thisT = assumptions.checkType(refTypeToCheck, (SyntaxTreeNode)this);
this.setType(thisT);
return ret;

@ -287,7 +287,7 @@ public class RuleSet implements IRuleSet{
if(lhsSType.getTypeParams().empty())
return Optional.empty();
UnifyType rhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
ReferenceType rhsSType;
if(rhsType instanceof ReferenceType)

@ -410,8 +410,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// Case unknown: If a pair fits no other case, then the type unification has failed.
// Through application of the rules, every pair should have one of the above forms.
// Pairs that do not have one of the aboves form are contradictory.
else
undefined.add(pair);
else {
// If a pair is not defined, the unificiation will fail, so the loop can be stopped here.
undefined.add(pair);
break;
}
}
// Filter empty sets or sets that only contain an empty set.
@ -425,9 +428,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
protected Set<Set<UnifyPair>> unifyCase1(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
boolean allGen = true;
boolean allGen = thetaPrime.getTypeParams().size() > 0;
for(UnifyType t : thetaPrime.getTypeParams())
if(t instanceof PlaceholderType && !((PlaceholderType) t).isGenerated()) {
if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
allGen = false;
break;
}
@ -437,7 +440,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
for(UnifyType c : cs) {
Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
thetaQs.add(thetaPrime);
//thetaQs.add(thetaPrime);
Set<UnifyType> thetaQPrimes = new HashSet<>();
TypeParams cParams = c.getTypeParams();
if(cParams.size() == 0)
@ -493,7 +496,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/**
* Cartesian Product Case 2: (a <.? ? ext Theta')
*/
protected Set<Set<UnifyPair>> unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
private Set<Set<UnifyPair>> unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
@ -513,7 +516,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/**
* Cartesian Product Case 3: (a <.? ? sup Theta')
*/
protected Set<Set<UnifyPair>> unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
private Set<Set<UnifyPair>> unifyCase3(PlaceholderType a, SuperType subThetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
@ -531,27 +534,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
return result;
}
/**
* Cartesian Product Case 4: (a <.? Theta')
*/
protected Set<Set<UnifyPair>> unifyCase4(PlaceholderType a, UnifyType thetaPrime, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.EQUALSDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 5: (Theta <. a)
*/
protected Set<Set<UnifyPair>> unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
private Set<Set<UnifyPair>> unifyCase5(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
boolean allGen = true;
boolean allGen = theta.getTypeParams().size() > 0;
for(UnifyType t : theta.getTypeParams())
if(t instanceof PlaceholderType && !((PlaceholderType) t).isGenerated()) {
if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
allGen = false;
break;
}
@ -574,44 +565,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
return result;
}
/**
* Cartesian Product Case 6: (? ext Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase6(ExtendsType extTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType extFreshTph = new ExtendsType(freshTph);
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extFreshTph, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(extTheta.getExtendedType(), freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 7: (? sup Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase7(SuperType supTheta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType theta = supTheta.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, theta, PairOperator.SMALLERDOT));
result.add(resultPrime);
return result;
}
/**
* Cartesian Product Case 8: (Theta <.? a)
*/
protected Set<Set<UnifyPair>> unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
private Set<Set<UnifyPair>> unifyCase8(UnifyType theta, PlaceholderType a, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
//for(UnifyType thetaS : fc.grArg(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();

@ -1,5 +1,7 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@ -81,4 +83,5 @@ public final class ExtendsType extends WildcardType {
return "? extends " + wildcardedType;
}
}

@ -26,7 +26,7 @@ public class FiniteClosure implements IFiniteClosure {
/**
* A map that maps every typename to the nodes of the inheritance graph that contain a type with that name.
*/
private HashMap<String, HashSet<Node<UnifyType>>> strInheritanceGraph;
private HashMap<String, Set<Node<UnifyType>>> strInheritanceGraph;
/**
* The initial pairs of that define the inheritance tree
@ -251,7 +251,6 @@ public class FiniteClosure implements IFiniteClosure {
@Override
public Set<UnifyType> grArg(FunNType type) {
// TODO ist das richtig?
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
smaller(type).forEach(x -> result.add(new SuperType(x)));
@ -298,7 +297,6 @@ public class FiniteClosure implements IFiniteClosure {
@Override
public Set<UnifyType> smArg(FunNType type) {
// TODO ist das richtig?
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
return result;

@ -1,5 +1,7 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
@ -106,4 +108,12 @@ public final class PlaceholderType extends UnifyType{
return ((PlaceholderType) obj).getName().equals(typeName);
}
@Override
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
ret.add(this);
return ret;
}
}

@ -1,6 +1,8 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import de.dhbwstuttgart.typeinference.Menge;
@ -168,5 +170,13 @@ public final class TypeParams implements Iterable<UnifyType>{
res += t + ",";
return "<" + res.substring(0, res.length()-1) + ">";
}
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
for(UnifyType t : typeParams){
ret.addAll(t.getInvolvedPlaceholderTypes());
}
return ret;
}
}

@ -1,5 +1,9 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* A pair which contains two types and an operator, e.q. (Integer <. a).
* @author Florian Steurer
@ -83,6 +87,13 @@ public class UnifyPair {
public String toString() {
return "(" + lhs + " " + pairOp + " " + rhs + ")";
}
public List<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
ret.addAll(lhs.getInvolvedPlaceholderTypes());
ret.addAll(rhs.getInvolvedPlaceholderTypes());
return ret;
}
}

@ -1,5 +1,8 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@ -87,4 +90,10 @@ public abstract class UnifyType {
return typeName + params;
}
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
ret.addAll(typeParams.getInvolvedPlaceholderTypes());
return ret;
}
}

@ -1,5 +1,8 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Collection;
/**
* A wildcard type that is either a ExtendsType or a SuperType.
* @author Florian Steurer
@ -53,4 +56,12 @@ public abstract class WildcardType extends UnifyType {
WildcardType other = (WildcardType) obj;
return other.getWildcardedType().equals(wildcardedType);
}
@Override
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
ret.addAll(wildcardedType.getInvolvedPlaceholderTypes());
return ret;
}
}

@ -2,6 +2,7 @@ package KomplexeMenge;
import static org.junit.Assert.*;
import java.util.Set;
import java.util.Vector;
import org.junit.Test;
@ -36,7 +37,7 @@ class TestKlasseOderMenge extends OderMenge<TestKlasse>{
}
public void addItem(TestKlasse string) {
TestUndMenge toAdd = new TestKlasseUndMenge();
TestKlasseUndMenge toAdd = new TestKlasseUndMenge();
toAdd.addItem(string);
set.add(toAdd);
}
@ -61,24 +62,28 @@ class TestKlasseUndMenge extends UndMenge<TestKlasse>{
}
}
/**
* @Depreciated
* Tests möglicherweise obsolet
*/
public class KeineDoppeltenVerweise {
@Test
public void test() {
OderMenge<TestKlasse> oM1 = new TestOderMenge();
OderMenge<TestKlasse> oM2 = new OderMenge<>();
UndMenge<TestKlasse> oM3 = new UndMenge<>();
TestKlasseOderMenge oM1 = new TestKlasseOderMenge();
TestKlasseOderMenge oM2 = new TestKlasseOderMenge();
TestKlasseUndMenge oM3 = new TestKlasseUndMenge();
oM1.addItem(new TestKlasse("Menge 1, Item 1"));
oM1.addItem(new TestKlasse("Menge 1, Item 2"));
oM2.addItem(new TestKlasse("Menge 2, Item 1"));
oM2.addItem(new TestKlasse("Menge 2, Item 2"));
oM3.addItems(oM1);
oM3.addItems(oM2);
Menge<Menge<TestKlasse>> cP = oM3.cartesianProduct();
Set<Set<TestKlasse>> cP = oM3.cartesianProduct();
System.out.println(cP);
cP.firstElement().firstElement().name="neu";
cP.iterator().next().iterator().next().name="neu";
System.out.println(cP);
check(cP);
//check(cP); //TODO Muss neu implementiert werden
}
private <A> void check(Menge<Menge<A>> cP){

@ -1,9 +0,0 @@
class IfTest{
public static void main(String[] args){
System.out.println(new IfElseStatement().method(true));
System.out.println(new IfElseStatement().method(false));
}
}

@ -21,6 +21,7 @@ public abstract class SourceFileBytecodeTest extends TestCase{
protected String testName;
public SourceFileBytecodeTest(){
super("Bytecode Test");
init();
if(testName != null){
@ -29,6 +30,7 @@ public abstract class SourceFileBytecodeTest extends TestCase{
outputDirectory = "";
SingleClassTester.compileToBytecode(rootDirectory+testFile, rootDirectory+outputDirectory);
System.out.println("Test");
}else{
throw new RuntimeException("rootDirectory, testFile or outputFile is null.");
}

@ -1,27 +0,0 @@
/**
* Diese Klasse testet die generierte EmptyClass.class-Datei
*/
class Test{
public static void main(String[] args){
new EmptyClass();
new Assign();
System.out.println(new Return().method());
new MethodCall().method();
System.out.println(new FieldDeclaration().field);
System.out.println(new Runnable().method().apply());
Runnable r = new Runnable().method().apply();
Test t = new Identity<Test,Test>().op.apply(new Test());
System.out.println(t);
}
}
class Test2<X>{
Fun1<? extends X,? super X> m() {
Fun1<? extends X,? super X> f = new Identity<X,X>().op;
return f;
}
public static void main(String[] args){
Matrix2<Integer,Integer> m2 = new Matrix2<>();
System.out.println(m2.op.apply((Integer x) -> x));
}
}

@ -1,6 +0,0 @@
class Test2{
public static void main(java.lang.String[] args){
new NewStatement(1).method();
}
}

@ -1,6 +0,0 @@
class Test3{
public static void main(String[] args){
System.out.println(new LambdaExpr2Test().op.apply());
}
}

@ -1,8 +0,0 @@
class TestStackMap{
public static void main(String[] args){
IfElseStatement test = new IfElseStatement();
System.out.println(test.method(false));
}
}

@ -1,8 +1,14 @@
package bytecode;
import org.junit.Test;
import org.junit.runners.Suite;
public class WhileTest extends SourceFileBytecodeTest{
public WhileTest(){
super();
}
@Override
protected void init() {
testName = "WhileTest";

@ -1,6 +1,18 @@
class OL {
m(x) { return x + x; }
m(Boolean x) {return x; }
m(x) { return x + x; }
m(Boolean x) { return x; }
}
class Main {
main(x) {
ol;
ol = new OL();
return ol.m(x);
}
}

@ -7,6 +7,7 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.typeinference.Menge;
import org.junit.Test;
@ -43,8 +44,9 @@ public class InsertSingleTypeTest {
String inferedSource = "";
MyCompilerAPI compiler = MyCompiler.getAPI(new LoggerConfiguration().setOutput(Section.TYPEINFERENCE, System.out));
try {
compiler.parse(new File(rootDirectory + sourceFileToInfere));
Menge<TypeinferenceResultSet> results = compiler.typeReconstruction();
SourceFile parsed = compiler.parse(new File(rootDirectory + sourceFileToInfere));Menge<SourceFile> sourceFiles = new Menge<>();
sourceFiles.add(parsed);
Menge<TypeinferenceResultSet> results = compiler.typeReconstruction(sourceFiles);
TestCase.assertTrue("Es darf nicht mehr als eine L�sungsm�glichkeit geben und nicht "+results.size(), results.size()==1);
return results.firstElement();
} catch (IOException | yyException e) {

@ -3,6 +3,7 @@ package plugindevelopment.MartinTestCases;
import java.io.File;
import java.io.IOException;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.typeinference.Menge;
import org.junit.Test;
@ -27,8 +28,10 @@ public class Tester extends TypeInsertTester{
String inferedSource = "";
MyCompilerAPI compiler = MyCompiler.getAPI(new LoggerConfiguration().setOutput(Section.TYPEINFERENCE, System.out));
try {
compiler.parse(new File(rootDirectory + sourceFileToInfere));
Menge<TypeinferenceResultSet> results = compiler.typeReconstruction();
SourceFile parsed = compiler.parse(new File(rootDirectory + sourceFileToInfere));
Menge<SourceFile> sourceFiles = new Menge<>();
sourceFiles.add(parsed);
Menge<TypeinferenceResultSet> results = compiler.typeReconstruction(sourceFiles);
//TestCase.assertTrue("Es darf nicht mehr als eine L�sungsm�glichkeit geben und nicht "+results.size(), results.size()==1);
for(TypeinferenceResultSet result : results){
TypeInsertSet point = result.getTypeInsertionPoints();

@ -8,6 +8,8 @@ import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.typeinference.Menge;
import de.dhbwstuttgart.logger.Logger;
@ -44,8 +46,10 @@ public class TypeInsertTester{
String inferedSource = "";
MyCompilerAPI compiler = MyCompiler.getAPI(logConfig);
try {
compiler.parse(new File(rootDirectory + sourceFileToInfere));
Menge<TypeinferenceResultSet> results = compiler.typeReconstruction();
SourceFile parsed = compiler.parse(new File(rootDirectory + sourceFileToInfere));
Menge<SourceFile> sourceFiles = new Menge<>();
sourceFiles.add(parsed);
Menge<TypeinferenceResultSet> results = compiler.typeReconstruction(sourceFiles);
TestCase.assertTrue("Es darf nicht mehr als eine Lösungsmöglichkeit geben und nicht "+results.size(), results.size()==1);
for(TypeinferenceResultSet result : results){
TypeInsertSet point = result.getTypeInsertionPoints();

@ -861,14 +861,15 @@ public class UnifyTest {
UnifyType tphT1 = tf.getPlaceholderType("T1");
UnifyType tphT2 = tf.getPlaceholderType("T2");
UnifyType gtv = tf.getSimpleType("gtv");
UnifyType vector = tf.getSimpleType("Vector", gtv);
UnifyType vectorT2 = tf.getSimpleType("Vector", tphT2);
UnifyType string = tf.getSimpleType("String");
UnifyType vectorString = tf.getSimpleType("Vector", string);
fcb.add(vector, vector);
fcb.add(vector, tf.getSimpleType("java.lang.Object"));
IFiniteClosure fc = fcb.getFiniteClosure();
@ -879,7 +880,7 @@ public class UnifyTest {
Set<Set<UnifyPair>> actual = new TypeUnify().unifySequential(eq, fc);
System.out.println("Test OverloadingVector:");
System.out.println(actual);
System.out.println(actual + "\n");
}
@Test