Merge remote-tracking branch 'origin/bytecode2' into bytecode2

# Conflicts:
#	pom.xml
This commit is contained in:
Michael Uhl 2019-01-23 22:05:59 +01:00
commit 9805f956c0
85 changed files with 1499 additions and 755 deletions

View File

@ -1,8 +1,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/maven-v4_0_0.xsd">
http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>de.dhbwstuttgart</groupId>
<artifactId>JavaTXcompiler</artifactId>
@ -68,7 +67,7 @@
</goals>
<configuration>
<sourceDirectory>src/main/antlr4/java8</sourceDirectory>
<outputDirectory>${project.basedir}/target/generated-sources/de/dhbwstuttgart/parser/antlr</outputDirectory>
<outputDirectory>${project.basedir}/target/generated-sources/antlr4/de/dhbwstuttgart/parser/antlr</outputDirectory>
<arguments>
<argument>-package</argument>
<argument>de.dhbwstuttgart.parser.antlr</argument>
@ -82,7 +81,7 @@
</goals>
<configuration>
<sourceDirectory>src/main/antlr4/sat</sourceDirectory>
<outputDirectory>${project.basedir}/target/generated-sources/de/dhbwstuttgart/sat/asp/parser/antlr</outputDirectory>
<outputDirectory>${project.basedir}/target/generated-sources/antlr4/de/dhbwstuttgart/sat/asp/parser/antlr</outputDirectory>
<arguments>
<argument>-package</argument>
<argument>de.dhbwstuttgart.sat.asp.parser.antlr</argument>

View File

@ -148,8 +148,17 @@ public class BytecodeGen implements ASTVisitor {
tphsClass.add(t);
}
String sig = null;
/* if class has generics then creates signature
* Signature looks like:
* <E:Ljava/...>Superclass
*/
if(classOrInterface.getGenerics().iterator().hasNext() || !commonPairs.isEmpty() ||
classOrInterface.getSuperClass().acceptTV(new TypeToSignature()).contains("<")
|| !tphsClass.isEmpty()) {
HashMap<TPHConstraint, HashSet<String>> constraints = Simplify.simplifyConstraintsClass(tphExtractor,tphsClass);
ArrayList<TPHConstraint> consClass = new ArrayList<>();
for(TPHConstraint cons : tphExtractor.allCons) {
for(TPHConstraint cons : constraints.keySet()) {
TypePlaceholder right = null;
for(TypePlaceholder tph : tphsClass) {
if(cons.getLeft().equals(tph.getName())) {
@ -164,14 +173,6 @@ public class BytecodeGen implements ASTVisitor {
right = null;
}
}
String sig = null;
/* if class has generics then creates signature
* Signature looks like:
* <E:Ljava/...>Superclass
*/
if(classOrInterface.getGenerics().iterator().hasNext() || !commonPairs.isEmpty() ||
classOrInterface.getSuperClass().acceptTV(new TypeToSignature()).contains("<")
|| !tphsClass.isEmpty()) {
Signature signature = new Signature(classOrInterface, genericsAndBounds,commonPairs,tphsClass, consClass);
sig = signature.toString();
System.out.println("Signature: => " + sig);

View File

@ -832,7 +832,9 @@ public class BytecodeGenMethod implements StatementVisitor {
System.out.println(methods[i]);
}
methodRefl = getMethod(methodCall.name,methodCall.arglist.getArguments().size(),methCallType, typesOfParams,methods);
}catch (Exception e2) {
}
catch (Exception e2) {
System.out.println("");
//do nothing
}
}

View File

@ -11,6 +11,7 @@ import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
import de.dhbwstuttgart.bytecode.utilities.MethodAndTPH;
import de.dhbwstuttgart.syntaxtree.AbstractASTWalker;
import de.dhbwstuttgart.syntaxtree.Constructor;
import de.dhbwstuttgart.syntaxtree.Method;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.result.GenericInsertPair;
@ -75,4 +76,11 @@ public class TPHExtractor extends AbstractASTWalker{
ListOfMethodsAndTph.add(methodAndTph);
}
@Override
public void visit(Constructor cons) {
inMethod = false;
super.visit(cons);
inMethod = true;
}
}

View File

@ -347,6 +347,245 @@ public class Simplify {
return result;
}
public static HashMap<TPHConstraint, HashSet<String>> simplifyConstraintsClass(TPHExtractor tphExtractor, ArrayList<TypePlaceholder> tphsClass) {
// all constraints that will be simplified
ArrayList<TPHConstraint> allCons = tphExtractor.allCons;
ArrayList<TPHConstraint> consToRemove = new ArrayList<>();
// step 1:
for(TPHConstraint c : allCons) {
String left = c.getLeft();
String right = c.getRight();
if(c.getRel() == Relation.EXTENDS) {
TPHConstraint revCon = getReverseConstraint(allCons,left,right);
if(revCon != null) {
revCon.setRel(Relation.EQUAL);
// the reverse constraint is removed because
// otherwise there is the same constraint twice
// (e.g. A<B and B<A => A=B and B=A)
consToRemove.add(revCon);
c.setRel(Relation.EQUAL);
substituteTPH(allCons,left, right);
}
}
}
allCons.removeAll(consToRemove);
consToRemove = new ArrayList<>();
int size = allCons.size();
HashMap<TPHConstraint, HashSet<String>> result = new HashMap<>();
// check if there is any long cycle (e.g. A<B<C<A)
// to save all types that are in relation
LinkedList<String> allTypes = new LinkedList<>();
// we will put all constraints which are in the cycle, in this ArrayList.
// Later these contraints will be converted to equal-constraints
ArrayList<TPHConstraint> constraints = new ArrayList<>(size);
int visited = 0;
// contains all constraints
HashMap<String,String> ss1 = new HashMap<>();
for(TPHConstraint constr : allCons) {
ss1.put(constr.getLeft(), constr.getRight());
}
for(TPHConstraint c : allCons) {
if(visited >= size)
break;
// Only extends-constraints will be checked
if(c.getRel() == Relation.EXTENDS) {
++visited;
String left = c.getLeft();
String right = c.getRight();
// put the types in linked list
allTypes.add(left);
allTypes.add(right);
constraints.add(c);
boolean isCycle = false;
// iterate through the map to check if there is a cycle
while(ss1.containsKey(right)) {
++visited;
String oldRight = right;
right = ss1.get(right);
TPHConstraint toAdd = getConstraint(oldRight, right, allCons);
if(toAdd != null)
constraints.add(toAdd);
if(left.equals(right)) {
isCycle = true;
break;
}
allTypes.add(right);
}
if(isCycle) {
// convert all constraints to equal constraints
setAllEqual(constraints);
// these constraints will be removed from allCons
consToRemove.addAll(constraints);
// all equal types will be substitute with one type
substituteAllTPH(allCons,constraints,left);
HashSet<String> eq = new HashSet<>();
// put all equal types in a set
for(String t:allTypes) {
eq.add(t);
}
// generate a new constraint (left < Object)
TPHConstraint constraint = new ExtendsConstraint(left, Type.getInternalName(Object.class), Relation.EXTENDS);
// put the generated constraint and its equal set into result set
result.put(constraint, eq);
constraints.clear();
}
allTypes.clear();
}
}
// build an equal set that contains all types
// which are equal and for each equal constraint put left side and right side
// in this set Then generate a constraint type < Object and put it
// with the equal set into the result.
for(TPHConstraint c : allCons) {
if(c.getRel()==Relation.EQUAL) {
if(!isTPHInResEqual(result, c.getLeft())) {
HashSet<String> equalTPHs = getEqualsTPHs(result, c);
TPHConstraint constraint = getKeyConstraint(result,c);
equalTPHs.add(c.getLeft());
equalTPHs.add(c.getRight());
result.put(constraint, equalTPHs);
}
consToRemove.add(c);
size--;
}
}
// remove all equal-constraints
allCons.removeAll(consToRemove);
// add all generated constraints to allCons
allCons.addAll(result.keySet());
if(!allCons.isEmpty() && allCons.size()<2) {
TPHConstraint cons = allCons.get(0);
if(!result.containsKey(cons)) {
result.put(cons, null);
result.put(new ExtendsConstraint(cons.getRight(), Type.getInternalName(Object.class), Relation.EXTENDS), null);
}
return result;
}
size += result.keySet().size();
// all constraints which have Object on the right side will
// be ignored, because they are simplified and can not be changed.
for(TPHConstraint c : allCons) {
if(c.getRight().equals(Type.getInternalName(Object.class)))
size--;
}
// check if there are multiple constraint with the same left side.
// if yes => check if the super type in the method, if not
// then ignore it.
HashMap<String, String> subAndSuper = new HashMap<>();
ArrayList<TPHConstraint> eqCons = new ArrayList<>();
for(TPHConstraint c : allCons) {
subAndSuper.put(c.getLeft(), c.getRight());
}
int numOfVisitedPairs = 0;
for(String sub : subAndSuper.keySet()) {
if(isTPHInConstraint(result,sub))
continue;
if(!classTPHSContainsTPH(tphsClass,sub))
continue;
if(numOfVisitedPairs>=size)
break;
LinkedList<String> tphInRel = new LinkedList<>();
tphInRel.add(sub);
String superT = subAndSuper.get(sub);
tphInRel.add(superT);
numOfVisitedPairs++;
while(subAndSuper.containsKey(superT)) {
superT = subAndSuper.get(superT);
if(tphInRel.contains(superT)) {
break;
}
tphInRel.add(superT);
numOfVisitedPairs++;
}
// Subtype
String subTphRes = tphInRel.getFirst();
// Die größte Supertype
String superTphRes = tphInRel.getLast();
// if there is any constraint X < subTph, then
// add X at the beginning of the list.
while(subAndSuper.containsValue(subTphRes)) {
for(String tph : subAndSuper.keySet()) {
if(classTPHSContainsTPH(tphsClass,tph) && subAndSuper.get(tph).equals(subTphRes)) {
subTphRes = tph;
break;
}
}
if(subTphRes.equals(tphInRel.getFirst())) {
break;
}
if(isTPHInConstraint(result, subTphRes))
break;
tphInRel.addFirst(subTphRes);
numOfVisitedPairs++;
}
subTphRes = tphInRel.getFirst();
HashSet<String> equals = getEqualsTphsFromEqualCons(eqCons,superTphRes);
result.put(new ExtendsConstraint(subTphRes, superTphRes, Relation.EXTENDS), equals);
}
System.out.println("EndResult: ");
result.forEach((c,hs)->{
if(c!=null) {
System.out.print(c.toString() + " -> ");
if(hs == null) {
System.out.print(" [] ");
}else {
hs.forEach(s->{
System.out.print(s + ", ");
});
}
}
System.out.println();
});
System.out.println("----------------");
return result;
}
private static boolean classTPHSContainsTPH(ArrayList<TypePlaceholder> tphsClass, String superTphRes) {
for(TypePlaceholder tph : tphsClass) {
if(tph.getName().equals(superTphRes))

View File

@ -1,3 +1,4 @@
//PL 2018-12-19: typeInferenceOld nach typeInference uebertragen
package de.dhbwstuttgart.core;
@ -15,6 +16,7 @@ import de.dhbwstuttgart.syntaxtree.ParameterList;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.syntaxtree.visual.ASTTypePrinter;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
@ -29,6 +31,7 @@ import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import java.io.File;
@ -44,7 +47,7 @@ public class JavaTXCompiler {
final CompilationEnvironment environment;
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll?
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"src/test/java/logFiles" geschrieben werden soll?
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Arrays.asList(sourceFile));
@ -103,7 +106,8 @@ public class JavaTXCompiler {
return new ArrayList<>(allClasses);
}
public List<ResultSet> typeInference() throws ClassNotFoundException {
/*
public List<ResultSet> typeInferenceOld() throws ClassNotFoundException {
List<ClassOrInterface> allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
//Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
for(SourceFile sf : this.sourceFiles.values()) {
@ -187,10 +191,7 @@ public class JavaTXCompiler {
}
}
return x;//HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS ANDERE SEITE
})
/* PL 2018-11-07 wird in varianceInheritance erledigt
.map( y -> {
}).map( y -> {
if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) {
if (((PlaceholderType)y.getLhsType()).getVariance() != 0 && ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType()).getVariance());
@ -200,12 +201,8 @@ public class JavaTXCompiler {
}
}
return y; } )
*/
.collect(Collectors.toCollection(HashSet::new));
varianceInheritance(xConsSet);
Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
//Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
System.out.println("RESULT: " + result);
@ -236,11 +233,13 @@ public class JavaTXCompiler {
return results.stream().map((unifyPairs ->
new ResultSet(UnifyTypeFactory.convert(unifyPairs, generateTPHMap(cons))))).collect(Collectors.toList());
}
*/
/**
* Vererbt alle Variancen
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a)
* wenn a eine Variance !=0 hat auf alle Typvariablen in Theta.
* @param eq The set of constraints
*/
/*
private void varianceInheritance(Set<UnifyPair> eq) {
Set<PlaceholderType> usedTPH = new HashSet<>();
Set<PlaceholderType> phSet = eq.stream().map(x -> {
@ -267,6 +266,203 @@ public class JavaTXCompiler {
phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x)));
}
}
*/
public List<ResultSet> typeInference() throws ClassNotFoundException {
List<ClassOrInterface> allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
//Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
for(SourceFile sf : this.sourceFiles.values()) {
allClasses.addAll(getAvailableClasses(sf));
allClasses.addAll(sf.getClasses());
}
final ConstraintSet<Pair> cons = getConstraints();
Set<Set<UnifyPair>> results = new HashSet<>();
try {
FileWriter logFile = new FileWriter(new File(System.getProperty("user.dir")+"/src/test/java/logFiles/"+"log_"+sourceFiles.keySet().iterator().next().getName()));
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses,logFile);
System.out.println(finiteClosure);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons);
Function<UnifyPair, UnifyPair> distributeInnerVars =
x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType)
&& ((rhs = x.getRhsType()) instanceof PlaceholderType)
&& (((PlaceholderType)lhs).isInnerType()
|| ((PlaceholderType)rhs).isInnerType()))
{
((PlaceholderType)lhs).setInnerType(true);
((PlaceholderType)rhs).setInnerType(true);
}
return x;
};
logFile.write(unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logFile.write(unifyCons.toString());
TypeUnify unify = new TypeUnify();
//Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString()+"\n");
for(SourceFile sf : this.sourceFiles.values()) {
logFile.write(ASTTypePrinter.print(sf));
}
logFile.flush();
Set<String> methodParaTypeVarNames = allClasses.stream().map(x -> x.getMethods().stream().map(y -> y.getParameterList().getFormalparalist()
.stream().filter(z -> z.getType() instanceof TypePlaceholder)
.map(z -> ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(HashSet::new)))
.reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return a;}, (a,b) -> { a.addAll(b); return a;} ) )
.reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return a;} );
Set<String> constructorParaTypeVarNames = allClasses.stream().map(x -> x.getConstructors().stream().map(y -> y.getParameterList().getFormalparalist()
.stream().filter(z -> z.getType() instanceof TypePlaceholder)
.map(z -> ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(HashSet::new)))
.reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return a;}, (a,b) -> { a.addAll(b); return a;} ) )
.reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return a;} );
Set<String> paraTypeVarNames = methodParaTypeVarNames;
paraTypeVarNames.addAll(constructorParaTypeVarNames);
Set<String> returnTypeVarNames = allClasses.stream().map(x -> x.getMethods().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
.map(z -> ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;} ).get();
Set<String> fieldTypeVarNames = allClasses.stream().map(x -> x.getFieldDecl().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
.map(z -> ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;} ).get();
returnTypeVarNames.addAll(fieldTypeVarNames);
unifyCons = unifyCons.map(x -> {
//Hier muss ueberlegt werden, ob
//1. alle Argument- und Retuntyp-Variablen in allen UnifyPairs
// mit disableWildcardtable() werden.
//2. alle Typvariablen mit Argument- oder Retuntyp-Variablen
//in Beziehung auch auf disableWildcardtable() gesetzt werden muessen
//PL 2018-04-23
if ((x.getLhsType() instanceof PlaceholderType)) {
if (paraTypeVarNames.contains(x.getLhsType().getName())) {
((PlaceholderType)x.getLhsType()).setVariance((byte)1);
((PlaceholderType)x.getLhsType()).disableWildcardtable();
}
if (returnTypeVarNames.contains(x.getLhsType().getName())) {
((PlaceholderType)x.getLhsType()).setVariance((byte)-1);
((PlaceholderType)x.getLhsType()).disableWildcardtable();
}
}
if ((x.getRhsType() instanceof PlaceholderType)) {
if (paraTypeVarNames.contains(x.getRhsType().getName())) {
((PlaceholderType)x.getRhsType()).setVariance((byte)1);
((PlaceholderType)x.getRhsType()).disableWildcardtable();
}
if (returnTypeVarNames.contains(x.getRhsType().getName())) {
((PlaceholderType)x.getRhsType()).setVariance((byte)-1);
((PlaceholderType)x.getRhsType()).disableWildcardtable();
}
}
return x;//HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS ANDERE SEITE
});
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt
do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen
//anderen Seite übertragen
varianceTPHold = new HashSet<>(varianceTPH);
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
unifyCons.map( y -> {
if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) {
if (((PlaceholderType)y.getLhsType()).getVariance() != 0 && ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType()).getVariance());
}
if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType()).getVariance());
}
}
return y; } ); }
while (!varianceTPHold.equals(varianceTPH));
*/
//Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile, log);
//Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, logFile, log);
System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString()+"\n");
logFile.flush();
results.addAll(result);
results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
return y; //alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
}
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
System.out.println("RESULT Final: " + results);
logFile.write("RES_FINAL: " + results.toString()+"\n");
logFile.flush();
logFile.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
logFile.flush();
}
catch (IOException e) {
System.err.println("kein LogFile");
}
return results.stream().map((unifyPairs ->
new ResultSet(UnifyTypeFactory.convert(unifyPairs, generateTPHMap(cons))))).collect(Collectors.toList());
}
/**
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a)
* wenn a eine Variance !=0 hat auf alle Typvariablen in Theta.
* @param eq The set of constraints
*/
private Set<PlaceholderType> varianceInheritanceConstraintSet(ConstraintSet<UnifyPair> cons) {
Set<UnifyPair> eq = cons.getAll();
Set<PlaceholderType> usedTPH = new HashSet<>();
Set<PlaceholderType> phSet = eq.stream().map(x -> {
Set<PlaceholderType> pair = new HashSet<>();
if (x.getLhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getLhsType());
if (x.getRhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getRhsType());
return pair;
}).reduce(new HashSet<>(), (a,b) -> { a.addAll(b); return a;} , (c,d) -> { c.addAll(d); return c;});
ArrayList<PlaceholderType> phSetVariance = new ArrayList<>(phSet);
phSetVariance.removeIf(x -> (x.getVariance() == 0));
while(!phSetVariance.isEmpty()) {
PlaceholderType a = phSetVariance.remove(0);
usedTPH.add(a);
//HashMap<PlaceholderType,Integer> ht = new HashMap<>();
//ht.put(a, a.getVariance());
//ConstraintSet<UnifyPair> eq1 = cons;
//eq1.removeIf(x -> !(x.getLhsType() instanceof PlaceholderType && ((PlaceholderType)x.getLhsType()).equals(a)));
//durch if-Abfrage im foreach geloest
cons.forEach(x -> {
if (x.getLhsType() instanceof PlaceholderType && ((PlaceholderType)x.getLhsType()).equals(a)) {
x.getRhsType().accept(new distributeVariance(), a.getVariance());
}
});
//` eq1 = new HashSet<>(eq);
//eq1.removeIf(x -> !(x.getRhsType() instanceof PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)));
//durch if-Abfrage im foreach geloest
cons.forEach(x -> {
if (x.getRhsType() instanceof PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)) {
x.getLhsType().accept(new distributeVariance(), a.getVariance());
}
});
phSetVariance = new ArrayList<>(phSet); //macht vermutlich keinen Sinn PL 2018-10-18, doch, es koennen neue TPHs mit Variancen dazugekommen sein PL 2018-11-07
phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x)));
}
return usedTPH;
}
private Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
HashMap<String, TypePlaceholder> ret = new HashMap<>();

View File

@ -80,6 +80,9 @@ public abstract class AbstractASTWalker implements ASTVisitor{
for(Field f : classOrInterface.getFieldDecl()){
f.accept(this);
}
for(Constructor c : classOrInterface.getConstructors()){
c.accept(this);
}
for(Method m : classOrInterface.getMethods()){
m.accept(this);
}

View File

@ -1,5 +1,6 @@
package de.dhbwstuttgart.syntaxtree.factory;
import java.io.FileWriter;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -29,7 +30,7 @@ public class UnifyTypeFactory {
private static ArrayList<PlaceholderType> PLACEHOLDERS = new ArrayList<>();
public static FiniteClosure generateFC(List<ClassOrInterface> fromClasses) throws ClassNotFoundException {
public static FiniteClosure generateFC(List<ClassOrInterface> fromClasses, FileWriter logFile) throws ClassNotFoundException {
/*
Die transitive Hülle muss funktionieren.
Man darf schreiben List<A> extends AL<A>
@ -40,7 +41,7 @@ public class UnifyTypeFactory {
Generell dürfen sie immer die gleichen Namen haben.
TODO: die transitive Hülle bilden
*/
return new FiniteClosure(FCGenerator.toUnifyFC(fromClasses));
return new FiniteClosure(FCGenerator.toUnifyFC(fromClasses), logFile);
}
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr){
@ -63,26 +64,26 @@ public class UnifyTypeFactory {
* Convert from
* ASTType -> UnifyType
*/
public static UnifyType convert(RefTypeOrTPHOrWildcardOrGeneric t){
public static UnifyType convert(RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType){
if(t instanceof GenericRefType){
return UnifyTypeFactory.convert((GenericRefType)t);
return UnifyTypeFactory.convert((GenericRefType)t, innerType);
}else
if(t instanceof FunN){
return UnifyTypeFactory.convert((FunN)t);
return UnifyTypeFactory.convert((FunN)t, innerType);
}else if(t instanceof TypePlaceholder){
return UnifyTypeFactory.convert((TypePlaceholder)t);
return UnifyTypeFactory.convert((TypePlaceholder)t, innerType);
}else if(t instanceof ExtendsWildcardType){
return UnifyTypeFactory.convert((ExtendsWildcardType)t);
return UnifyTypeFactory.convert((ExtendsWildcardType)t, innerType);
}else if(t instanceof SuperWildcardType){
return UnifyTypeFactory.convert((SuperWildcardType)t);
return UnifyTypeFactory.convert((SuperWildcardType)t, innerType);
}else if(t instanceof RefType){
return UnifyTypeFactory.convert((RefType)t);
return UnifyTypeFactory.convert((RefType)t, innerType);
}
//Es wurde versucht ein Typ umzuwandeln, welcher noch nicht von der Factory abgedeckt ist
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
}
public static UnifyType convert(RefType t){
public static UnifyType convert(RefType t, Boolean innerType){
//Check if it is a FunN Type:
Pattern p = Pattern.compile("Fun(\\d+)");
Matcher m = p.matcher(t.getName().toString());
@ -90,14 +91,14 @@ public class UnifyTypeFactory {
if(b){
Integer N = Integer.valueOf(m.group(1));
if((N + 1) == t.getParaList().size()){
return convert(new FunN(t.getParaList()));
return convert(new FunN(t.getParaList()), false);
}
}
UnifyType ret;
if(t.getParaList() != null && t.getParaList().size() > 0){
List<UnifyType> params = new ArrayList<>();
for(RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()){
params.add(UnifyTypeFactory.convert(pT));
params.add(UnifyTypeFactory.convert(pT, true));
}
ret = new ReferenceType(t.getName().toString(),new TypeParams(params));
}else{
@ -106,39 +107,45 @@ public class UnifyTypeFactory {
return ret;
}
public static UnifyType convert(FunN t){
public static UnifyType convert(FunN t, Boolean innerType){
UnifyType ret;
List<UnifyType> params = new ArrayList<>();
if(t.getParaList() != null && t.getParaList().size() > 0){
for(RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()){
params.add(UnifyTypeFactory.convert(pT));
params.add(UnifyTypeFactory.convert(pT, false));
}
}
ret = FunNType.getFunNType(new TypeParams(params));
return ret;
}
public static UnifyType convert(TypePlaceholder tph){
public static UnifyType convert(TypePlaceholder tph, Boolean innerType){
if (tph.getName().equals("AFR")) {
System.out.println("XXX"+innerType);
}
PlaceholderType ntph = new PlaceholderType(tph.getName());
int in = PLACEHOLDERS.indexOf(ntph);
if (in == -1) {
PLACEHOLDERS.add(ntph);
ntph.setInnerType(innerType);
return ntph;
}
else {
return PLACEHOLDERS.get(in);
PlaceholderType oldpht = PLACEHOLDERS.get(in);
oldpht.setInnerType(oldpht.isInnerType() || innerType);
return oldpht;
}
}
public static UnifyType convert(GenericRefType t){
public static UnifyType convert(GenericRefType t, Boolean innerType){
return new ReferenceType(t.getParsedName());
}
public static UnifyType convert(WildcardType t){
public static UnifyType convert(WildcardType t, Boolean innerType){
if(t.isExtends())
return new ExtendsType(UnifyTypeFactory.convert(t.getInnerType()));
return new ExtendsType(UnifyTypeFactory.convert(t.getInnerType(), false));
else if(t.isSuper())
return new SuperType(UnifyTypeFactory.convert(t.getInnerType()));
return new SuperType(UnifyTypeFactory.convert(t.getInnerType(), false));
else throw new NotImplementedException();
}
@ -152,22 +159,42 @@ public class UnifyTypeFactory {
}
public static UnifyPair convert(Pair p) {
UnifyPair ret = null;
if(p.GetOperator().equals(PairOperator.SMALLERDOT)) {
UnifyPair ret = generateSmallerDotPair(UnifyTypeFactory.convert(p.TA1)
, UnifyTypeFactory.convert(p.TA2));
return ret;
ret = generateSmallerDotPair(UnifyTypeFactory.convert(p.TA1, false)
, UnifyTypeFactory.convert(p.TA2, false));
//return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLERNEQDOT)) {
UnifyPair ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(p.TA1)
, UnifyTypeFactory.convert(p.TA2));
return ret;
ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(p.TA1, false)
, UnifyTypeFactory.convert(p.TA2, false));
//return ret;
}else if(p.GetOperator().equals(PairOperator.EQUALSDOT)) {
UnifyPair ret = generateEqualDotPair(UnifyTypeFactory.convert(p.TA1)
, UnifyTypeFactory.convert(p.TA2));
return ret;
ret = generateEqualDotPair(UnifyTypeFactory.convert(p.TA1, false)
, UnifyTypeFactory.convert(p.TA2, false));
//return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLER)){
return generateSmallerPair(UnifyTypeFactory.convert(p.TA1),
UnifyTypeFactory.convert(p.TA2));
ret = generateSmallerPair(UnifyTypeFactory.convert(p.TA1, false),
UnifyTypeFactory.convert(p.TA2, false));
}else throw new NotImplementedException();
UnifyType lhs, rhs;
if (((lhs = ret.getLhsType()) instanceof PlaceholderType)
&& ((PlaceholderType)lhs).isWildcardable()
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
if (lhs.getName().equals("AQ")) {
System.out.println("");
}
((PlaceholderType)rhs).enableWildcardtable();
}
if (((rhs = ret.getLhsType()) instanceof PlaceholderType)
&& ((PlaceholderType)rhs).isWildcardable()
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
if (rhs.getName().equals("AQ")) {
System.out.println("");
}
((PlaceholderType)lhs).enableWildcardtable();
}
return ret;
}
/**

View File

@ -5,6 +5,7 @@ import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.*;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
@ -52,4 +53,29 @@ public class ConstraintSet<A> {
ret.oderConstraints = newOder;
return ret;
}
public void forEach (Consumer<? super A> c) {
undConstraints.stream().forEach(c);
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
as.stream().forEach(c));
}
}
public Set<A> getAll () {
Set<A> ret = new HashSet<>();
ret.addAll(undConstraints);
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as));
}
return ret;
}
public List<Set<Constraint<A>>> getOderConstraints() {
return oderConstraints;
}
public Set<A> getUndConstraints() {
return undConstraints;
}
}

View File

@ -1,3 +1,4 @@
//PL 2018-12-19: Merge chekcen
package de.dhbwstuttgart.typeinference.typeAlgo;
import de.dhbwstuttgart.exceptions.NotImplementedException;
@ -229,6 +230,13 @@ public class TYPEStmt implements StatementVisitor{
binary.operation.equals(BinaryExpr.Operator.SUB)){
Set<Constraint<Pair>> numericAdditionOrStringConcatenation = new HashSet<>();
// TODO PL 2018-11-06
// Auf importierte Typen einschraenken
// pruefen, ob Typen richtig bestimmt werden.
//Zuerst der Fall für Numerische AusdrücPairOpnumericeratorke, das sind Mul, Mod und Div immer:
//see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17
//Expression muss zu Numeric Convertierbar sein. also von Numeric erben

View File

@ -22,6 +22,8 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
public class Match implements IMatch {
@Override
//A<X> =. A<Integer> ==> True
//A<Integer> =. A<X> ==> False
public Optional<Unifier> match(ArrayList<UnifyPair> termsList) {
// Start with the identity unifier. Substitutions will be added later.

View File

@ -1,9 +1,11 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
@ -22,4 +24,10 @@ public class TypeUnify {
return res;
}
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log);
Set<Set<UnifyPair>> res = unifyTask.compute();
return res;
}
}

View File

@ -1,3 +1,4 @@
//PL 2018-12-19: Merge checken
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
@ -18,6 +19,7 @@ import java.util.function.BinaryOperator;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
@ -71,7 +73,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
protected IRuleSet rules;
protected Set<UnifyPair> eq;
protected Set<UnifyPair> eq; //und-constraints
protected List<Set<Set<UnifyPair>>> oderConstraintsField;
protected IFiniteClosure fc;
@ -85,6 +89,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Integer noAllErasedElements = 0;
static Integer noou = 0;
static int noBacktracking;
public TypeUnifyTask() {
@ -101,6 +107,25 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
rules = new RuleSet(logFile);
}
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) {
this.eq = eq;
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.oderConstraintsField = oderConstraints.stream().map(x -> {
Set<Set<UnifyPair>> ret = new HashSet<>();
for (Constraint<UnifyPair> y : x) {
ret.add(new HashSet<>(y));
}
return ret;
}).collect(Collectors.toCollection(ArrayList::new));
//x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.fc = fc;
this.oup = new OrderingUnifyPair(fc);
this.parallel = parallel;
this.logFile = logFile;
this.log = log;
rules = new RuleSet(logFile);
}
/**
* Vererbt alle Variancen
@ -134,12 +159,279 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
*/
@Override
protected Set<Set<UnifyPair>> compute() {
Set<Set<UnifyPair>> res = unify(eq, fc, parallel);
Set<UnifyPair> neweq = new HashSet<>(eq);
/* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */
oderConstraintsField.stream()
.filter(x -> x.size()==1)
.map(y -> y.stream().findFirst().get()).forEach(x -> neweq.addAll(x));
ArrayList<Set<Set<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, 0);
if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
else return res;
}
/*
@Override
protected Set<Set<UnifyPair>> compute() {
Set<Set<UnifyPair>> fstElems = new HashSet<>();
fstElems.add(eq);
Set<Set<UnifyPair>> res = computeCartesianRecursiveOderConstraints(fstElems, oderConstraints, fc, parallel);
if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
else return res;
}
*/
public Set<Set<UnifyPair>> computeCartesianRecursiveOderConstraints(Set<Set<UnifyPair>> fstElems, List<Set<Set<UnifyPair>>> topLevelSets, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
fstElems.addAll(topLevelSets.stream()
.filter(x -> x.size()==1)
.map(y -> y.stream().findFirst().get())
.collect(Collectors.toCollection(HashSet::new)));
ArrayList<Set<Set<UnifyPair>>> remainingSets = topLevelSets.stream()
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
Set<UnifyPair> eq = new HashSet<>();
fstElems.stream().forEach(x -> eq.addAll(x));
Set<Set<UnifyPair>> result = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe);
return result;
}
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0);
writeLog("nextSet: " + nextSet.toString());
List<Set<UnifyPair>> nextSetasList =new ArrayList<>(nextSet);
try {
//List<Set<UnifyPair>>
//nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet);
}
catch (java.lang.IllegalArgumentException e) {
System.out.print("");
}
Set<Set<UnifyPair>> result = new HashSet<>();
int variance = 0;
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType)
.filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType)c.getLhsType()).getVariance())
.reduce((a,b)-> {if (a==b) return a; else return 0; }))
.filter(d -> d.isPresent())
.map(e -> e.get())
.findAny();
if (xi.isPresent()) {
variance = xi.get();
}
//if (variance == 1 && nextSetasList.size() > 1) {
// List<Set<UnifyPair>> al = new ArrayList<>(nextSetasList.size());
// for (int ii = 0; ii < nextSetasList.size();ii++) {
// al.add(0,nextSetasList.get(ii));
// }
// nextSetasList = al;
//}
//Set<UnifyPair> a = nextSetasListIt.next();
/*if (nextSetasList.size()>1) {zu loeschen
if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D"))
System.out.print("");
if (variance == 1) {
a_next = oup.max(nextSetasList.iterator());
}
else if (variance == -1) {
a_next = oup.min(nextSetasList.iterator());
}
else if (variance == 0) {
a_next = nextSetasList.iterator().next();
}
}
else {
a_next = nextSetasList.iterator().next();
}
*/
if (!nextSetasList.iterator().hasNext())
System.out.print("");
if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print("");
writeLog("nextSetasList: " + nextSetasList.toString());
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
Set<UnifyPair> a = null;
if (variance == 1) {
a = oup.max(nextSetasList.iterator());
nextSetasList.remove(a);
}
else if (variance == -1) {
a = oup.min(nextSetasList.iterator());
nextSetasList.remove(a);
}
else if (variance == 0) {
a = nextSetasList.remove(0);
}
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
//nextSetasList.remove(a);
/* zu loeschen
if (nextSetasList.size() > 0) {
if (nextSetasList.size()>1) {
if (variance == 1) {
a_next = oup.max(nextSetasList.iterator());
}
else if (variance == -1) {
a_next = oup.min(nextSetasList.iterator());
}
else {
a_next = nextSetasList.iterator().next();
}
}
else {
a_next = nextSetasList.iterator().next();
}
}
*/
//PL 2018-03-01
//TODO: 1. Maximum und Minimum unterscheiden
//TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
//for(Set<UnifyPair> a : newSet) {
i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
elems.add(a);
Set<Set<UnifyPair>> res = new HashSet<>();
if (remainingSets.isEmpty()) {
noou++;
writeLog("Vor unify Aufruf: " + eq.toString());
writeLog("No of Unify " + noou);
System.out.println(noou);
Set<UnifyPair> eq = new HashSet<>();
elems.stream().forEach(x -> eq.addAll(x));
res = unify(eq, new ArrayList<>(), fc, parallel, rekTiefe);
}
else {//duerfte gar nicht mehr vorkommen PL 2018-04-03
res = computeCartesianRecursiveOderConstraints(elems, remainingSets, fc, parallel, rekTiefe);
}
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = res;
}
else {
if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result))
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
result.addAll(res);
}
//else {
//wenn Korrekte Ergebnisse da und Feherfälle dazukommen Fehlerfälle ignorieren
// if (isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) {
// result = result;
// }
//}
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
if (!result.isEmpty() && !isUndefinedPairSetSet(res)) {
if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print("");
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetasList).iterator();
if (variance == 1) {
System.out.println("");
while (nextSetasListIt.hasNext()) {
Set<UnifyPair> a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) {
nextSetasList.remove(a_next);
}
else {
System.out.println("");
}
}
}
else { if (variance == -1) {
System.out.println("");
while (nextSetasListIt.hasNext()) {
Set<UnifyPair> a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == -1)) {
nextSetasList.remove(0);
}
else {
System.out.println("");
}
}
}
else if (variance == 0) {
//break;
}}
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
/* PL 2018-11-05 wird falsch weil es auf der obersten Ebene ist.
if (isUndefinedPairSetSet(res)) {
int nofstred= 0;
Set<UnifyPair> abhSubst = res.stream()
.map(b ->
b.stream()
.map(x -> x.getAllSubstitutions())
.reduce((y,z) -> { y.addAll(z); return y;}).get())
.reduce((y,z) -> { y.addAll(z); return y;}).get();
Set<UnifyPair> b = a;//effective final a
Set<UnifyPair> durchschnitt = abhSubst.stream()
.filter(x -> b.contains(x))
//.filter(y -> abhSubst.contains(y))
.collect(Collectors.toCollection(HashSet::new));
//Set<PlaceholderType> vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
int len = nextSetasList.size();
Set<UnifyPair> undefRes = res.stream().reduce((y,z) -> { y.addAll(z); return y;}).get(); //flatten aller undef results
Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream()
.map(x -> { Set<UnifyPair> su = x.getAllSubstitutions(); //alle benutzten Substitutionen
su.add(x.getGroundBasePair()); // urspruengliches Paar
su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen
return new Pair<>(su, x.getGroundBasePair());})
.collect(Collectors.toCollection(HashSet::new));
if (res.size() > 1) {
System.out.println();
}
nextSetasList = nextSetasList.stream().filter(x -> {
//Boolean ret = false;
//for (PlaceholderType var : vars) {
// ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get();
//}
return (!x.containsAll(durchschnitt));//Was passiert wenn durchschnitt leer ist??
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
.collect(Collectors.toCollection(ArrayList::new));
nofstred = nextSetasList.size();
//NOCH NICHT korrekt PL 2018-10-12
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
// .collect(Collectors.toCollection(ArrayList::new));
writeLog("res (undef): " + res.toString());
writeLog("abhSubst: " + abhSubst.toString());
writeLog("a: " + a.toString());
writeLog("Durchschnitt: " + durchschnitt.toString());
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetasList.toString());
writeLog("Number first erased Elements (undef): " + (len - nofstred));
writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size()));
writeLog("Number erased Elements (undef): " + (len - nextSetasList.size()));
noAllErasedElements = noAllErasedElements + (len - nextSetasList.size());
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
noBacktracking++;
writeLog("Number of Backtracking: " + noBacktracking);
System.out.println("");
}
*/
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
// return result;
//}
//else {
// result.removeIf(y -> isUndefinedPairSet(y));
//}
//else result.stream().filter(y -> !isUndefinedPairSet(y));
} // End of while (nextSetasList.size() > 0)
return result;
}
/**
* Computes all principal type unifiers for a set of constraints.
@ -147,18 +439,22 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* @param fc The finite closure
* @return The set of all principal type unifiers
*/
protected Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
protected Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
// ).collect(Collectors.toCollection(HashSet::new));
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
//if (aas.isEmpty()) {
// System.out.println("");
//}
//.collect(Collectors.toCollection(HashSet::new)));
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
rekTiefe++;
nOfUnify++;
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString());
//eq = eq.stream().map(x -> {x.setVariance((byte)-1); return x;}).collect(Collectors.toCollection(HashSet::new));
/*
@ -224,11 +520,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<UnifyPair> undefinedPairs = new HashSet<>();
if (printtag) System.out.println("eq2s " + eq2s);
//writeLog("BufferSet: " + bufferSet.toString()+"\n");
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs);
List<Set<Set<UnifyPair>>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints);
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
//PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
//nicht ausgewertet Faculty Beispiel im 1. Schritt
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
//Typen getestet werden.
writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput.toString());
if (printtag) System.out.println("secondLevelSets:" +secondLevelSets);
// If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible.
@ -280,12 +578,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Aufruf von computeCartesianRecursive ANFANG
return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, fc, parallel);
//writeLog("topLevelSets: " + topLevelSets.toString());
return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe);
}
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//Aufruf von computeCartesianRecursive ENDE
//keine Ahnung woher das kommt
@ -312,14 +611,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/*
* Step 5: Substitution
*/
//System.out.println("vor Subst: " + eqPrime);
//writeLog("vor Subst: " + eqPrime);
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime);
//writeLog("nach Subst: " + eqPrimePrime);
/*
* Step 6 a) Restart (fork) for pairs where subst was applied
*/
if(parallel) {
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()
&& oderConstraints.isEmpty()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
//eqPrimePrime Veraenderungen in subst repraesentieren.
@ -339,26 +639,29 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
else { // sequentiell (Step 6b is included)
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()
&& oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
//eqPrimePrime Veraenderungen in subst repraesentieren.
try {
if (isSolvedForm(eqPrime)) {
logFile.write(eqPrime.toString()+"\n");
logFile.write("eqPrime:" + eqPrime.toString()+"\n");
logFile.flush();
}
}
catch (IOException e) { }
catch (IOException e) {
System.err.println("log-File nicht vorhanden");
}
eqPrimePrimeSet.add(eqPrime);
}
else if(eqPrimePrime.isPresent()) {
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), fc, false);
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), oderConstraints, fc, false, rekTiefe);
eqPrimePrimeSet.addAll(unifyres);
}
else {
Set<Set<UnifyPair>> unifyres = unify(eqPrime, fc, false);
Set<Set<UnifyPair>> unifyres = unify(eqPrime, oderConstraints, fc, false, rekTiefe);
eqPrimePrimeSet.addAll(unifyres);
@ -387,7 +690,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
fstElems.addAll(topLevelSets.stream()
.filter(x -> x.size()==1)
@ -397,7 +700,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
Set<Set<UnifyPair>> result = unify2(fstElems, eq, fc, parallel);
Set<Set<UnifyPair>> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe);
return result;
}
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0);
@ -415,7 +718,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> y.getLhsType() instanceof PlaceholderType)
.filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType)c.getLhsType()).getVariance())
.reduce((a,b)-> {if (a==b) return a; else return 0; }))
.reduce((a,b)-> {if (a==b) return a; else return 2; })) //2 kommt insbesondere bei Oder-Constraints vor
.filter(d -> d.isPresent())
.map(e -> e.get())
.findAny();
@ -463,7 +766,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
a = oup.min(nextSetasList.iterator());
nextSetasList.remove(a);
}
else if (variance == 0) {
else if (variance == 0 || variance == 2) {
a = nextSetasList.remove(0);
}
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
@ -492,10 +795,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//for(Set<UnifyPair> a : newSet) {
i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
writeLog("a1: " + rekTiefe + " "+ a.toString()+ "\n");
elems.add(a);
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
//writeLog("Vor unify2 Aufruf: " + eq.toString());
Set<Set<UnifyPair>> res = unify2(elems, eq, fc, parallel);
//writeLog("Vor unify2 Aufruf: " + elems.toString());
Set<Set<UnifyPair>> res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe);
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = res;
@ -505,22 +809,42 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
if (!result.isEmpty() && !res.isEmpty() && !isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) {
if ((!result.isEmpty() && !res.isEmpty() && !isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) //korrekte Loesungen aus und-constraints
&& (a.stream().map(x-> (x.getBasePair() != null)).reduce(true, (x, y) -> (x && y)))) //bei oder-Constraints nicht ausfuehren
{
//TODO: PL 2019-01-15: Bug 129: Im Moment wird nur das Maximum und das Minimum des aktuellen Elements betrachtet.
//Die zu vereinigenden Mengen können mehrere Elemente enthalten. Das ist bisher nicht berücksichtigt
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a
List<PlaceholderType> vars_a = a.stream().filter(x -> (x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
||x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName()))).map(y -> (PlaceholderType)y.getLhsType()).collect(Collectors.toCollection(ArrayList::new));
//PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar
System.out.println("");
List<PlaceholderType> vars_a =
a.stream().filter(x -> ((x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
&& (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getLhsType() instanceof PlaceholderType))
|| ((x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName()))
&& (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getRhsType() instanceof PlaceholderType)))
)
.map(y -> (PlaceholderType)y.getLhsType()).collect(Collectors.toCollection(ArrayList::new));
Set<UnifyPair> fstElemRes = res.iterator().next();
Set<UnifyPair> compRes = fstElemRes.stream().filter(x -> vars_a.contains(((PlaceholderType)x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
List<PlaceholderType> varsLast_a = a_last.stream().filter(x -> (x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
||x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName()))).map(y -> (PlaceholderType)y.getLhsType()).collect(Collectors.toCollection(ArrayList::new));
//System.out.println(a_last);
a_last.forEach(x -> {writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());});
List<PlaceholderType> varsLast_a =
a_last.stream().filter(x -> ((x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
&& (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getLhsType() instanceof PlaceholderType))
|| ((x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName())))
&& (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getRhsType() instanceof PlaceholderType)))
.map(y -> (PlaceholderType)y.getLhsType()).collect(Collectors.toCollection(ArrayList::new));
//[(java.util.Vector<java.lang.Integer> <. gen_aq, , 1), (CEK =. ? extends gen_aq, 1)] KANN VORKOMMEN
//erstes Element genügt, da vars immer auf die gleichen Elemente zugeordnet werden muessen
Set<UnifyPair> fstElemResult = result.iterator().next();
Set<UnifyPair> compResult = fstElemResult.stream().filter(x -> varsLast_a.contains(((PlaceholderType)x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));;
if (variance == 1) {
writeLog("a_last:" + a_last + " a: " + a);
writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
writeLog("compResult:" + compResult + " compRes: " + compRes);
int resOfCompare = oup.compare(compResult, compRes);
if (resOfCompare == -1) {
writeLog("Geloescht result: " + result);
@ -534,6 +858,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//result = result;
}}}
else { if (variance == -1) {
writeLog("a_last:" + a_last + " a: " + a);
writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
writeLog("compResult:" + compResult + " compRes: " + compRes);
int resOfCompare = oup.compare(compResult, compRes);
if (resOfCompare == 1) {
writeLog("Geloescht result: " + result);
@ -575,33 +902,41 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<Set<UnifyPair>>(nextSetasList).iterator();
if (variance == 1) {
System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
while (nextSetasListIt.hasNext()) {
Set<UnifyPair> a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) {
writeLog("Removed: " + a_next.toString());
nextSetasList.remove(a_next);
}
else {
writeLog("Not Removed: " + a_next.toString());
System.out.println("");
}
}
}
else { if (variance == -1) {
System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
while (nextSetasListIt.hasNext()) {
Set<UnifyPair> a_next = nextSetasListIt.next();
if (a.equals(a_next) ||
(oup.compare(a, a_next) == -1)) {
nextSetasList.remove(0);
writeLog("Removed: " + a_next.toString());
nextSetasList.remove(a_next); //PL geaendert 2019-01-09
}
else {
System.out.println("");
writeLog("Not Removed: " + a_next.toString());
}
}
}
else if (variance == 0) {
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
break;
}
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
}
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
@ -614,6 +949,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.map(x -> x.getAllSubstitutions())
.reduce((y,z) -> { y.addAll(z); return y;}).get())
.reduce((y,z) -> { y.addAll(z); return y;}).get();
abhSubst.addAll(
res.stream()
.map(b ->
b.stream()
.map(x -> x.getAllBases())
.reduce((y,z) -> { y.addAll(z); return y;}).get())
.reduce((y,z) -> { y.addAll(z); return y;}).get()
);
Set<UnifyPair> b = a;//effective final a
Set<UnifyPair> durchschnitt = abhSubst.stream()
.filter(x -> b.contains(x))
@ -645,7 +988,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// .collect(Collectors.toCollection(ArrayList::new));
writeLog("res (undef): " + res.toString());
writeLog("abhSubst: " + abhSubst.toString());
writeLog("a: " + a.toString());
writeLog("a2: " + rekTiefe + " " + a.toString());
writeLog("Durchschnitt: " + durchschnitt.toString());
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetasList.toString());
@ -665,7 +1008,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// result.removeIf(y -> isUndefinedPairSet(y));
//}
//else result.stream().filter(y -> !isUndefinedPairSet(y));
writeLog("res: " + res.toString());
}
writeLog("Return computeCR: " + result.toString());
return result;
}
@ -942,15 +1288,18 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
* (as in case 1 where sigma is added to the innermost set).
*/
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, IFiniteClosure fc, Set<UnifyPair> undefined) {
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(8);
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, List<Set<Set<UnifyPair>>> oderConstraintsInput, IFiniteClosure fc, Set<UnifyPair> undefined, List<Set<Set<UnifyPair>>> oderConstraintsOutput) {
oderConstraintsOutput.addAll(oderConstraintsInput);
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(9);
// Init all 8 cases
for(int i = 0; i < 8; i++)
// Init all 8 cases + 9. Case: oderConstraints
for(int i = 0; i < 9; i++)
result.add(new HashSet<>());
ArrayList<UnifyPair> eq2sprime = new ArrayList<>(eq2s);
Iterator<UnifyPair> eq2sprimeit = eq2sprime.iterator();
ArrayList<UnifyPair> eq2sAsList = new ArrayList<>();
Boolean first = true;
while(eq2sprimeit.hasNext()) {// alle mit Variance != 0 nach vorne schieben
UnifyPair up = eq2sprimeit.next();
if ((up.getLhsType() instanceof PlaceholderType && ((PlaceholderType)up.getLhsType()).getVariance() != 0)
@ -959,8 +1308,45 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
eq2s.remove(up);
}
}
if (eq2sAsList.isEmpty()) {
List<Set<Set<UnifyPair>>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind
.filter(x -> x.stream()
.filter(y ->
y.stream().filter(z -> ((z.getLhsType() instanceof PlaceholderType)
&& (((PlaceholderType)(z.getLhsType())).getVariance() != 0))
|| ((z.getRhsType() instanceof PlaceholderType)
&& (((PlaceholderType)(z.getRhsType())).getVariance() != 0))
).findFirst().isPresent()
).findFirst().isPresent()).collect(Collectors.toList());
if (!oderConstraintsVariance.isEmpty()) {
Set<Set<UnifyPair>> ret = oderConstraintsVariance.get(0);
oderConstraintsOutput.remove(ret);
//Set<UnifyPair> retFlat = new HashSet<>();
//ret.stream().forEach(x -> retFlat.addAll(x));
ret.stream().forEach(x -> x.stream().forEach(y -> y.addSubstitutions(x)));
result.get(8).add(ret);
first = false;
}
}
eq2sAsList.addAll(eq2s);
Boolean first = true;
if (eq2sAsList.isEmpty() && first) {//Alle eq2s sind empty und alle oderConstraints mit Variance != 0 sind bearbeitet
if (!oderConstraintsOutput.isEmpty()) {
Set<Set<UnifyPair>> ret = oderConstraintsOutput.remove(0);
//if (ret.iterator().next().iterator().next().getLhsType().getName().equals("M"))
// System.out.println("M");
//Set<UnifyPair> retFlat = new HashSet<>();
//ret.stream().forEach(x -> retFlat.addAll(x));
ret.stream().forEach(x -> x.stream().forEach(y -> y.addSubstitutions(x)));
result.get(8).add(ret);
first = false;
}
}
/*
Bei allen die Abhaengigkeit der Elemente aus eq2sAsList als evtl. als Substitution
hinzufuegen
*/
for(UnifyPair pair : eq2sAsList) {
PairOperator pairOp = pair.getPairOp();
UnifyType lhsType = pair.getLhsType();
@ -970,6 +1356,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if (((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.SMALLERNEQDOT)) && lhsType instanceof PlaceholderType) {
//System.out.println(pair);
if (first) { //writeLog(pair.toString()+"\n");
if (((PlaceholderType)(pair.getLhsType())).getName().equals("AR")) {
System.out.println("AR");
}
Set<Set<UnifyPair>> x1 = unifyCase1(pair, fc);
if (pairOp == PairOperator.SMALLERNEQDOT) {
Set<UnifyPair> remElem = new HashSet<>();
@ -1112,11 +1501,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
allGen = false;
break;
}
//if (thetaPrime.getName().equals("java.util.Vector") //Fuer Bug 127
// && thetaPrime instanceof ReferenceType
// && ((ReferenceType)thetaPrime).getTypeParams().iterator().next().getName().equals("java.util.Vector")
// && ((ReferenceType)((ReferenceType)thetaPrime).getTypeParams().iterator().next()).getTypeParams().iterator().next().getName().equals("java.lang.Integer")) {
// System.out.println("");
//}
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());//cs= [java.util.Vector<NP>, java.util.Vector<java.util.Vector<java.lang.Integer>>, ????java.util.Vector<gen_hv>???]
//PL 18-02-06 entfernt, kommt durch unify wieder rein
//cs.add(thetaPrime);
//PL 18-02-06 entfernt
@ -1158,20 +1551,22 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
for(UnifyType tqp : thetaQPrimes) {
Collection<PlaceholderType> tphs = tqp.getInvolvedPlaceholderTypes();
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
if (!opt.isPresent()) {
continue;
}
Unifier unifier = opt.get();
unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
Set<UnifyPair> substitutionSet = new HashSet<>();
for (Entry<PlaceholderType, UnifyType> sigma : unifier) {
if (!tphs.contains(sigma.getKey())) {//eingefuegt PL 2019-02-02 Bug 127
substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT,
//TODO: nochmals ueberlegen ob hier pair.getSubstitution() korrekt ist, oder ob leere Menge hin müsste
//alle folgenden New UnifyPair ebenfalls ueberpruefen PL 2018-04-19
pair.getSubstitution(), pair));
}
}
//List<UnifyType> freshTphs = new ArrayList<>(); PL 18-02-06 in die For-Schleife verschoben
for (UnifyType tq : thetaQs) {
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq), new HashSet<>());
@ -1195,6 +1590,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
for(int i = 0; !allGen && i < theta.getTypeParams().size(); i++) {
if(freshTphs.size()-1 < i)//IST DAS RICHTIG??? PL 2018-12-12
freshTphs.add(PlaceholderType.freshPlaceholder());
freshTphs.forEach(x -> ((PlaceholderType)x).setInnerType(true));
resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair));
}
@ -1236,6 +1632,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
UnifyType aPrime = PlaceholderType.freshPlaceholder();
((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance());
((PlaceholderType)aPrime).disableWildcardtable();
UnifyType extAPrime = new ExtendsType(aPrime);
UnifyType thetaPrime = extThetaPrime.getExtendedType();
Set<UnifyPair> resultPrime = new HashSet<>();
@ -1256,12 +1653,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
private Set<Set<UnifyPair>> unifyCase3(UnifyPair pair, IFiniteClosure fc) {
PlaceholderType a = (PlaceholderType) pair.getLhsType();
a.reversVariance();
SuperType subThetaPrime = (SuperType) pair.getRhsType();
byte variance = pair.getVariance();
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
((PlaceholderType)aPrime).setVariance(((PlaceholderType)a).getVariance());
((PlaceholderType)aPrime).disableWildcardtable();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType thetaPrime = subThetaPrime.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
@ -1295,7 +1694,27 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
break;
}
for(UnifyType thetaS : fc.greater(theta, pair.getfBounded())) {
//eingefuegt PL 2019-01-03 ANFANG
//fc.setLogTrue();
//writeLog("FBOUNDED: " + pair.getfBounded());
//writeLog("Pair: " + pair);
Set<UnifyType> greater = fc.greater(theta, pair.getfBounded());
//writeLog("GREATER: " + greater + pair + "THETA: " + theta + "FBOUNDED: " + pair.getfBounded() + " ");
if (a.isWildcardable()) {
Set<UnifyType> greater_ext = greater.stream().filter(x -> !(x instanceof ExtendsType) && !(x instanceof SuperType))
.map(x -> {
//BinaryOperator<HashMap<PlaceholderType,PlaceholderType>> combiner = (aa,b) -> { aa.putAll(b); return aa;}; //Variablenumbenennung rausgenommen
//HashMap<PlaceholderType,PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream() //Variablen muessen wahrscheinlich erhalten bleiben
// .reduce(new HashMap<PlaceholderType,PlaceholderType>(),
// (aa, b)-> { aa.put(b,PlaceholderType.freshPlaceholder()); return aa; }, combiner);
return new SuperType (x);})//.accept(new freshPlaceholder(), hm));}
.collect(Collectors.toCollection(HashSet::new));
greater.addAll(greater_ext);
}
//eingefuegt PL 2019-01-03 ENDE
//for(UnifyType thetaS : fc.greater(theta, pair.getfBounded())) {
for(UnifyType thetaS : greater) {
Set<UnifyPair> resultPrime = new HashSet<>();
Match match = new Match();
@ -1303,7 +1722,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
for(int i = 0; !allGen && i < freshTphs.length; i++) {
freshTphs[i] = PlaceholderType.freshPlaceholder();
((PlaceholderType)freshTphs[i]).setVariance(((PlaceholderType)a).getVariance());
Set<UnifyType> fBounded = pair.getfBounded();
Set<UnifyType> fBounded = new HashSet<>(pair.getfBounded()); //PL 2019-01-09 new HashSet eingefuegt
int i_ef = i;
BiFunction<Boolean,UnifyType,Boolean> f = (x,y) ->
@ -1317,7 +1736,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//}
BinaryOperator<Boolean> bo = (x,y) -> (x || y);
if (fBounded.stream().reduce(false,f,bo)) {
resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
resultPrime.add(new UnifyPair(freshTphs[i], thetaS.getTypeParams().get(i), PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
}
else {
fBounded.add(thetaS.getTypeParams().get(i));
@ -1331,7 +1750,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
result.add(resultPrime);
//writeLog(resultPrime.toString());
//writeLog("FBOUNDED2: " + pair.getfBounded());
//writeLog("resultPrime Theta < a: " + greater + pair + "THETA: " + theta + "FBOUNDED: " + pair.getfBounded() + " " + resultPrime.toString());
}
return result;
@ -1354,6 +1774,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
UnifyType freshTph = PlaceholderType.freshPlaceholder();
((PlaceholderType)freshTph).setVariance(a.getVariance());
((PlaceholderType)freshTph).disableWildcardtable();
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair, pair.getfBounded()));
@ -1407,11 +1828,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
void writeLog(String str) {
if (log) {
try {
logFile.write(str+"\n");
logFile.write(str+"\n\n");
logFile.flush();
}
catch (IOException e) { }
catch (IOException e) {
System.err.println("kein LogFile");
}
}
}
}

View File

@ -17,6 +17,7 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
*/
public interface IFiniteClosure {
public void setLogTrue();
/**
* Returns all types of the finite closure that are subtypes of the argument.
* @return The set of subtypes of the argument.

View File

@ -1,5 +1,7 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@ -30,6 +32,11 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
public class FiniteClosure //extends Ordering<UnifyType> //entfernt PL 2018-12-11
implements IFiniteClosure {
FileWriter logFile;
static Boolean log = false;
public void setLogTrue() {
log = true;
}
/**
* A map that maps every type to the node in the inheritance graph that contains that type.
*/
@ -46,10 +53,21 @@ implements IFiniteClosure {
*/
private Set<UnifyPair> pairs;
/**
* Hastable fuer die greater-Werte, damit sie nicht doppelt berechnet werden muessen
*/
Hashtable<hashKeyType, Set<UnifyType>> greaterHash = new Hashtable<>();
/**
* Hastable fuer die smaller-Werte, damit sie nicht doppelt berechnet werden muessen
*/
Hashtable<hashKeyType, Set<UnifyType>> smallerHash = new Hashtable<>();
/**
* Creates a new instance using the inheritance tree defined in the pairs.
*/
public FiniteClosure(Set<UnifyPair> pairs) {
public FiniteClosure(Set<UnifyPair> pairs, FileWriter logFile) {
this.logFile = logFile;
this.pairs = new HashSet<>(pairs);
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
@ -98,12 +116,29 @@ implements IFiniteClosure {
*/
@Override
public Set<UnifyType> smaller(UnifyType type, Set<UnifyType> fBounded) {
Set<UnifyType> ret;
if ((ret = smallerHash.get(new hashKeyType(type))) != null) {
//System.out.println(greaterHash);
return new HashSet<>(ret);
}
if(type instanceof FunNType)
return computeSmallerFunN((FunNType) type, fBounded);
Set<Pair<UnifyType,Set<UnifyType>>> ts = new HashSet<>();
ts.add(new Pair<>(type, fBounded));
return computeSmaller(ts);
Set<UnifyType> result = computeSmaller(ts);
smallerHash.put(new hashKeyType(type), result);
/*
try {
logFile.write("\ntype: " + type + "\nret: " + ret + "\nresult: " + result);//"smallerHash: " + greaterHash.toString());
logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");
}*/
return result;
}
/**
@ -197,6 +232,13 @@ implements IFiniteClosure {
//Eingefuegt PL 2018-05-24 F-Bounded Problematik
public Set<UnifyType> greater(UnifyType type, Set<UnifyType> fBounded) {
Set<UnifyType> ret;
if ((ret = greaterHash.get(new hashKeyType(type))) != null) {
//System.out.println(greaterHash);
return new HashSet<>(ret);
}
if(type instanceof FunNType) {
return computeGreaterFunN((FunNType) type, fBounded);
}
@ -214,6 +256,17 @@ implements IFiniteClosure {
// if T <* T' then sigma(T) <* sigma(T')
Set<Node<UnifyType>> candidates = strInheritanceGraph.get(type.getName());
/*
try {
if (log) logFile.write(candidates.toString());
//log = false;
}
catch (IOException e) {
System.err.println("no LogFile");
}
*/
for(Node<UnifyType> candidate : candidates) {
UnifyType theta1 = candidate.getContent();
@ -222,9 +275,9 @@ implements IFiniteClosure {
termList.add(new UnifyPair(theta1,type, PairOperator.EQUALSDOT));
Optional<Unifier> optSigma = match.match(termList);
//PL 18-04-05 Unifier durch Matcher ersetzt ENDE
if(!optSigma.isPresent())
if(!optSigma.isPresent()) {
continue;
}
Unifier sigma = optSigma.get();
sigma.swapPlaceholderSubstitutionsReverse(theta1.getTypeParams());
@ -237,7 +290,15 @@ implements IFiniteClosure {
PairResultFBounded.add(new Pair<>(theta2.apply(sigma), fBoundedNew));
}
}
/*
try {
if (log) logFile.write(PairResultFBounded.toString());
log = false;
}
catch (IOException e) {
System.err.println("no LogFile");
}
*/
for(Pair<UnifyType,Set<UnifyType>> pt : PairResultFBounded) {
UnifyType t = pt.getKey();
Set<UnifyType> lfBounded = pt.getValue().get();
@ -273,6 +334,16 @@ implements IFiniteClosure {
//System.out.println("");
}
}
greaterHash.put(new hashKeyType(type), result);
/*
try {
logFile.write("\ntype: " + type + "\nret: " + ret + "\nresult: " + result);//"greaterHash: " + greaterHash.toString());
logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");
}*/
return result;
}
@ -607,9 +678,16 @@ implements IFiniteClosure {
return 0;
}
}
if ((right instanceof PlaceholderType) && (left instanceof WildcardType)) {
if (right instanceof PlaceholderType) {//&& (left instanceof WildcardType)) {PL geloescht 2019-01-15 analog zu oben
if ((left instanceof WildcardType) //PL eingefuegt 2019-01-15 analog zu oben
&& ((ex = ((WildcardType)left).wildcardedType) instanceof PlaceholderType)
&& ((PlaceholderType)right).getName().equals(((PlaceholderType)ex).getName())) {// ? extends a <. a oder ? super a <. a
return 1;
}
else {
return 0;
}
}
UnifyPair up = new UnifyPair(left, right, pairop);
TypeUnifyTask unifyTask = new TypeUnifyTask();
HashSet<UnifyPair> hs = new HashSet<>();

View File

@ -30,7 +30,8 @@ public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
* in dem compare(Theta, Theta') aufgerufen wird.
*/
public int compareEq (UnifyPair left, UnifyPair right) {
if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {
//if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht
if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC);
}
else {
@ -54,6 +55,20 @@ public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
UnifyPair up;
if (left instanceof WildcardType || right instanceof WildcardType) {
up = new UnifyPair(left, right, PairOperator.SMALLERDOTWC);
if (((left instanceof ExtendsType)
&& (((ExtendsType)left).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)left).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)) ||
((right instanceof ExtendsType)
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{
System.out.println("");
}
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
{
System.out.println("");
}
}
else {
up = new UnifyPair(left, right, PairOperator.SMALLERDOT);
@ -67,6 +82,19 @@ public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
else {
if (left instanceof WildcardType || right instanceof WildcardType) {
up = new UnifyPair(right, left, PairOperator.SMALLERDOTWC);
if (((left instanceof ExtendsType)
&& (((ExtendsType)left).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)left).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)) ||
((right instanceof ExtendsType)
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{
System.out.println("");
}
if (right instanceof SuperType)
{
System.out.println("");
}
}
else {
up = new UnifyPair(right, left, PairOperator.SMALLERDOT);
@ -87,6 +115,16 @@ public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
* @see com.google.common.collect.Ordering#compare(java.lang.Object, java.lang.Object)
*/
public int compare (Set<UnifyPair> left, Set<UnifyPair> right) {
if ((left.size() == 1) && right.size() == 1) {
if (left.iterator().next().getLhsType().getName().equals("AFS")) {
System.out.println("");
}
if (((right.iterator().next().getRhsType() instanceof SuperType) && (((SuperType)right.iterator().next().getRhsType()).getSuperedType().getName().equals("java.lang.Object")))
||((left.iterator().next().getRhsType() instanceof SuperType) && (((SuperType)left.iterator().next().getRhsType()).getSuperedType().getName().equals("java.lang.Object"))))
{
System.out.println("");
}
}
Set<UnifyPair> lefteq = left.stream()
.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT))
.collect(Collectors.toCollection(HashSet::new));
@ -110,17 +148,25 @@ public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
&& x.getPairOp() == PairOperator.SMALLERDOTWC))
.collect(Collectors.toCollection(HashSet::new));
//System.out.println(left.toString());
//Fall 2 und 3
//Fall 2
//if (lefteq.iterator().next().getLhsType().getName().equals("AJO")) {
// System.out.print("");
//}
if (lefteq.size() == 1 && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) {
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof ExtendsType && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) {
return 1;
}
//Fall 2 und 3
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && rightle.size() == 1) {
//Fall 2
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof ExtendsType && rightle.size() == 1) {
return -1;
}
//Fall 3
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof SuperType && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) {
return -1;
}
//Fall 3
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof SuperType && rightle.size() == 1) {
return 1;
}
//Fall 5
if (lefteq.size() == 1 && leftle.size() == 0 && righteq.size() == 1 && rightle.size() == 1) {
return -1;

View File

@ -46,6 +46,11 @@ public final class PlaceholderType extends UnifyType{
*/
private boolean wildcardable = true;
/**
* is innerType gibt an, ob der Type des PlaceholderType innerhalb eines Typkonstruktorsverwendet wird
*/
private boolean innerType = false;
/**
* variance shows the variance of the pair
* -1: contravariant
@ -108,6 +113,15 @@ public final class PlaceholderType extends UnifyType{
return variance;
}
public void reversVariance() {
if (variance == 1) {
setVariance(-1);
} else {
if (variance == -1) {
setVariance(1);
}}
}
public Boolean isWildcardable() {
return wildcardable;
}
@ -115,6 +129,22 @@ public final class PlaceholderType extends UnifyType{
wildcardable = false;
}
public void enableWildcardtable() {
wildcardable = true;
}
public void setWildcardtable(Boolean wildcardable) {
this.wildcardable = wildcardable;
}
public Boolean isInnerType() {
return innerType;
}
public void setInnerType(Boolean innerType) {
this.innerType = innerType;
}
@Override
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
return fc.smArg(this, fBounded);
@ -157,7 +187,7 @@ public final class PlaceholderType extends UnifyType{
@Override
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
ret.add(this);
return ret;

View File

@ -121,6 +121,10 @@ public class UnifyPair {
pairOp = po;
}
public void addSubstitutions(Set<UnifyPair> sup) {
substitution.addAll(sup);
}
public byte getVariance() {
return variance;
}
@ -152,6 +156,15 @@ public class UnifyPair {
return ret;
}
public Set<UnifyPair> getAllBases () {
Set<UnifyPair> ret = new HashSet<>();
if (basePair != null) {
ret.add(getBasePair());
ret.addAll(basePair.getAllBases());
}
return ret;
}
public UnifyPair getGroundBasePair () {
if (basePair == null) {
return this;
@ -206,12 +219,12 @@ public class UnifyPair {
public String toString() {
String ret = "";
if (lhs instanceof PlaceholderType) {
ret = new Integer(((PlaceholderType)lhs).getVariance()).toString();
ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " " + ((PlaceholderType)lhs).isInnerType();
}
if (rhs instanceof PlaceholderType) {
ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString();
ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " " + ((PlaceholderType)rhs).isInnerType();
}
return "(" + lhs + " " + pairOp + " " + rhs + ", " + ret + ", [" + getfBounded().toString()+ "])";
return "(" + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])";
}
/*

View File

@ -97,7 +97,7 @@ public abstract class UnifyType {
return typeName + params;
}
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
ret.addAll(typeParams.getInvolvedPlaceholderTypes());
return ret;

View File

@ -64,7 +64,7 @@ public abstract class WildcardType extends UnifyType {
@Override
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
ret.addAll(wildcardedType.getInvolvedPlaceholderTypes());
return ret;

View File

@ -0,0 +1,25 @@
package de.dhbwstuttgart.typeinference.unify.model;
public class hashKeyType {
UnifyType realType;
hashKeyType(UnifyType realType) {
this.realType= realType;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof hashKeyType) {
return realType.equals(((hashKeyType)obj).realType);
}
else
{
return false;
}
}
@Override
public int hashCode() {
return realType.hashCode();
}
}

View File

@ -32,11 +32,11 @@ public class FieldTphConsMethTest {
pathToClassFile = System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/";
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
classToTest = loader.loadClass("FieldTphConsMeth");
instanceOfClass = classToTest.getDeclaredConstructor().newInstance();
}
@Test
public void test() throws Exception {
instanceOfClass = classToTest.getConstructor(Object.class).newInstance("C");
Field a = classToTest.getDeclaredField("a");
a.setAccessible(true);
@ -44,6 +44,7 @@ public class FieldTphConsMethTest {
Object result = m.invoke(instanceOfClass, 42);
assertEquals(42,result);
assertEquals("C", a.get(instanceOfClass));
}
}

View File

@ -0,0 +1,53 @@
package bytecode;
import static org.junit.Assert.*;
import java.io.File;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import org.junit.BeforeClass;
import org.junit.Test;
import de.dhbwstuttgart.core.JavaTXCompiler;
public class FieldTphMMethTest {
private static String path;
private static File fileToTest;
private static JavaTXCompiler compiler;
private static ClassLoader loader;
private static Class<?> classToTest;
private static String pathToClassFile;
private static Object instanceOfClass;
private static Object instanceOfClass2;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/FieldTphMMeth.jav";
fileToTest = new File(path);
compiler = new JavaTXCompiler(fileToTest);
compiler.generateBytecode(System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/");
pathToClassFile = System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/";
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
classToTest = loader.loadClass("FieldTphMMeth");
}
@Test
public void test() throws Exception {
instanceOfClass = classToTest.getConstructor(Object.class).newInstance("C",42,true);
instanceOfClass2 = classToTest.getConstructor(Object.class).newInstance("C",42,false);
Field a = classToTest.getDeclaredField("a");
a.setAccessible(true);
Method m = classToTest.getDeclaredMethod("m", Object.class);
Object result = m.invoke(instanceOfClass, 42);
assertEquals(42,result);
assertEquals("C", a.get(instanceOfClass));
assertEquals(42, a.get(instanceOfClass2));
}
}

View File

@ -4,6 +4,7 @@ import static org.junit.Assert.*;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
@ -27,7 +28,7 @@ public class MatrixOpTest {
private static Object instanceOfClass_m3;
@Test
public void test() throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, ClassNotFoundException, IOException, InstantiationException {
public void test() throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, ClassNotFoundException, IOException, InstantiationException, NoSuchFieldException {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/MatrixOP.jav";
fileToTest = new File(path);
compiler = new JavaTXCompiler(fileToTest);
@ -35,7 +36,7 @@ public class MatrixOpTest {
compiler.generateBytecode(pathToClassFile);
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
classToTest = loader.loadClass("MatrixOP");
/*
Vector<Vector<Integer>> vv = new Vector<Vector<Integer>>();
Vector<Integer> v1 = new Vector<Integer> ();
v1.addElement(2);
@ -48,6 +49,7 @@ public class MatrixOpTest {
//m1.addElement(v2);
vv.addElement(v1);
vv.addElement(v2);
instanceOfClass_m1 = classToTest.getDeclaredConstructor(Vector.class).newInstance(vv); //Matrix m1 = new Matrix(vv);
Vector<Vector<Integer>> vv1 = new Vector<Vector<Integer>>();
@ -62,13 +64,23 @@ public class MatrixOpTest {
//m2.addElement(v4);
vv1.addElement(v3);
vv1.addElement(v4);
instanceOfClass_m2 = classToTest.getDeclaredConstructor(Vector.class).newInstance(vv1);//Matrix m2 = new Matrix(vv1);
//Matrix m3 = m1.mul(vv1);
Method mul = classToTest.getDeclaredMethod("mul", Vector.class);
Object result = mul.invoke(instanceOfClass_m1, instanceOfClass_m2);
// Method mul = classToTest.getDeclaredMethod("mul", Vector.class);
// Object result = mul.invoke(instanceOfClass_m1, instanceOfClass_m2);
Field mul = classToTest.getField("mul");
mul.setAccessible(true);
Class<?> lambda = mul.get(instanceOfClass_m1).getClass();
Method apply = lambda.getMethod("apply", Object.class,Object.class);
// Damit man auf die Methode zugreifen kann
apply.setAccessible(true);
Object result = apply.invoke(mul.get(instanceOfClass_m1),instanceOfClass_m1, instanceOfClass_m2);
System.out.println(instanceOfClass_m1.toString() + " * " + instanceOfClass_m2.toString() + " = " + result.toString());
Vector<Vector<Integer>> res = new Vector<Vector<Integer>>();
@ -85,7 +97,7 @@ public class MatrixOpTest {
res.addElement(v6);
instanceOfClass_m3 = classToTest.getDeclaredConstructor(Vector.class).newInstance(res);
assertEquals(result, instanceOfClass_m3);
*/
}
}

View File

@ -3,16 +3,17 @@ package bytecode;
import static org.junit.Assert.*;
import java.io.File;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Vector;
import org.junit.BeforeClass;
import org.junit.Test;
import de.dhbwstuttgart.core.JavaTXCompiler;
public class FieldTph {
public class VectorSuperTest {
private static String path;
private static File fileToTest;
@ -24,20 +25,21 @@ public class FieldTph {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/FieldTph.jav";
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/VectorSuper.jav";
fileToTest = new File(path);
compiler = new JavaTXCompiler(fileToTest);
compiler.generateBytecode(System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/");
pathToClassFile = System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/";
compiler.generateBytecode(pathToClassFile);
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
classToTest = loader.loadClass("FieldTph");
classToTest = loader.loadClass("VectorSuper");
instanceOfClass = classToTest.getDeclaredConstructor().newInstance();
}
@Test
public void test() {
Field[] fields = classToTest.getFields();
assertEquals(1, fields.length);
}
public void test1() throws Exception {
Method m = classToTest.getDeclaredMethod("m", Vector.class);
//Object result = m.invoke(instanceOfClass, 1);
//assertEquals(1,result);
}
}

View File

@ -26,8 +26,8 @@ public class YTest {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/Y.jav";
fileToTest = new File(path);
// compiler = new JavaTXCompiler(fileToTest);
// compiler.generateBytecode(System.getProperty("user.dir")+"/testBytecode/generatedBC/");
// pathToClassFile = System.getProperty("user.dir")+"/testBytecode/generatedBC/";
// compiler.generateBytecode(System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/");
// pathToClassFile = System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/";
// loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
// classToTest = loader.loadClass("Y");
/*

View File

@ -1,9 +0,0 @@
import java.lang.Integer;
import java.lang.Long;
public class AddLong{
Long add(Integer a, Long b) {
Long c = a+b;
return c;
}
}

View File

@ -1,3 +0,0 @@
public class EmptyClass{
}

View File

@ -1,37 +0,0 @@
public class EmptyMethod{
static String s1 ="";
String s2;
public void m1(){
//String s = "";
System.out.println("test");
//Integer ab = Math.abs(1);
//Math.abs(1);
//String lV = "local";
//s1 = "1";
//s1.concat("2");
s2 = s1;
//m2();
Clazz i = new Clazz();
Integer i = new Integer(1);
}
public void m2(){}
}
class Clazz{}
/*
public class EmptyMethod2{
public static test = "5";
public void m1(Integer i, String j, Boolean b){
//String s = "";
EmptyMethod em = new EmptyMethod();
em.m1();
em.s1 = "";
//Integer ab = Math.abs(1);
//Math.abs(1);
//String lV = "local";
//s1 = "1";
//s1.concat("2");
//s2 = s1;
}
}*/

View File

@ -1,8 +0,0 @@
class Expressions{
void test(){
var x = 2;
x = x + 2;
}
}

View File

@ -1,10 +0,0 @@
import java.util.Vector;
class Matrix extends Vector<Vector<Integer>> {
methode(m) {
m.add(1);
Matrix i;
methode(i);
}
}

View File

@ -1,15 +0,0 @@
import java.lang.Integer;
class Faculty {
Integer mul(Integer x, Integer y) {
return x;
}
m () {
var fact = (Integer x) -> {
return mul(x, fact.apply(x));
};
return fact;
}
}

View File

@ -1,17 +0,0 @@
import java.lang.Integer;
class Faculty {
m () {
var fact = (Integer x) -> {
if (x == 1) {
return x;
}
else {
return x * (fact.apply(x-1));
}
};
return fact;
}
}

View File

@ -1,19 +0,0 @@
import java.lang.Integer;
class Faculty {
Integer mul(Integer x, Integer y) {
return x;
}
Fun1<java.lang.Integer,java.lang.Integer> m () {
var fact = (Integer x) -> {
return mul(x, fact.apply(x));
};
return fact;
}
}
interface Fun1<A,B>{
B apply(A a);
}

View File

@ -1,11 +0,0 @@
import java.lang.String;
class Fields{
test2 = "test";
test;
m(){
var test3;
return test;
}
}

View File

@ -1,22 +0,0 @@
import java.lang.String;
class Generics<B> {
//<A extends B> A mt1(A a, B b){
B mt1(B a, B b){
return mt1(a, a);
}
}
class Test {
methode(String s){
return new Generics<String>().mt1(s,s);
}
}
/*
Problem:
auto test = new List<String>();
auto test2 = new List<Integer>();
... //code, welcher möglicherweise test und test2 vertauscht
test.add("hallo");
*/

View File

@ -1,14 +0,0 @@
import java.lang.Integer;
import java.lang.Boolean;
import java.lang.Object;
public class IfTest{
Object m1(b) {
Integer i;
if(b) {
return i;
}else{
return b;
}
}
}

View File

@ -1,8 +0,0 @@
import java.util.Vector;
class Import {
void methode(){
Vector v;
v.add(v);
}
}

View File

@ -1,13 +0,0 @@
class Apply { }
public class Lambda {
m () {
var lam1 = (x) -> {
return x;
};
return lam1.apply(new Apply());
}
}

View File

@ -1,33 +0,0 @@
import java.lang.String;
public class Lambda2
{
public static void main(List<String> args){
var listOfStrings = new List<String>();
var listOfObjects;
listOfObjects = map(listOfStrings, (a) -> a);
}
public map(a , b){
b.apply(a);
return a;
}
/*
public static <I,O> List<O> map(List<I> input, Function<I,O> func) {
List<O> output;
output = new List<O>();
output.add(func.apply(input.get()));
return output;
}
*/
}
class List<A>{
A get();
void add(A);
}
class Function<A,B>{
B apply(A a);
}

View File

@ -1,24 +0,0 @@
import java.lang.String;
public class Lambda2
{
/*
public static <A> List<A> map(List<? extends A> input,
Function<? super A, ? extends A> func){
input.add(func.apply(input.get()));
}
*/
public map(input,func){
input.add(func.apply(input.get()));
return map(new List<String>(), func);
}
}
class List<A>{
A get();
void add(A);
}
class Function<A,B>{
B apply(A a);
}

View File

@ -1,6 +0,0 @@
public class LambdaField {
f = x -> x;
}

View File

@ -1,14 +0,0 @@
import java.lang.Runnable;
import java.lang.String;
import java.lang.System;
public class LamRunnable{
public LamRunnable(){
Runnable lam = () -> {System.out.println("lambda");};
lam.run();
}
}

View File

@ -1,20 +0,0 @@
import java.lang.Integer;
import java.lang.String;
class ListenerOverload{
call(p){
call(p.left);
call(p.right);
}
call(Integer i){}
call(String s){}
}
class Pair<A,B>{
A left;
B right;
}

View File

@ -1,28 +0,0 @@
import java.util.Vector;
import java.lang.Integer;
class Matrix extends Vector<Vector<Integer>> {
Integer mul1(Integer x, Integer y) { return x;}
Integer add1(Integer x, Integer y) { return x;}
mul(m) {
var ret = new Matrix();
var i = 0;
while(i < size()) {
var v1 = this.elementAt(i);
var v2 = new Vector<Integer>();
var j = 0;
while(j < v1.size()) {
var erg = 0;
var k = 0;
while(k < v1.size()) {
erg = erg + v1.elementAt(k) * m.elementAt(k).elementAt(j);
//erg = add1(erg, mul1(v1.elementAt(k),
// m.elementAt(k).elementAt(j)));
k++; }
v2.addElement(new Integer(erg));
j++; }
ret.addElement(v2);
i++; }
return ret;
}
}

View File

@ -1,11 +0,0 @@
class Meth_Gen {
m1(x, y) {
m2(x);
x = y;
}
m2(y) {
m1(y, y);
}
}

View File

@ -1,14 +0,0 @@
import java.lang.String;
class Generics<B> {
//<A extends B> A mt1(A a, B b){
B mt1(B a, B b){
return mt1(a, a);
}
}
class Test {
methode(String s){
return new Generics<String>().mt1(s,s);
}
}

View File

@ -1,17 +0,0 @@
class Methods {
mt4(a,b,c) { return a.add(b).sub(c) ; }
mt1(a) {return a;}
mt2(a) {return a.f; }
mt3(a) {return a.add(); }
}
class Test {
java.lang.Object f;
add(){}
add(b){return b;}
sub(b){}
}

View File

@ -1,7 +0,0 @@
class Methods {
mt4(a,b,c) { return a.mt3(b).mt3(c) ; }
mt3(a) {return a.mt3(a); }
}

View File

@ -1,11 +0,0 @@
public class Op1{
public Op1() {
Runnable lam = () -> {
String test = "";
String b = "b";
test = b;
System.out.println(test);};
//lam.run();
}
}

View File

@ -1,5 +0,0 @@
package strucType.input;
class Neu
{
}

View File

@ -1,8 +0,0 @@
import java.util.List;
import java.util.Collection;
class Sorting{
void merge(a, b){
a.addAll(b);
}
}

View File

@ -1,6 +0,0 @@
public class Subclass extends Superclass {
public void printMethod() {
super.printMethod();
}
}

View File

@ -1,6 +0,0 @@
public class Superclass {
public void printMethod() {
System.out.println("Printed in Superclass.");
}
}

View File

@ -1,23 +0,0 @@
import java.util.ArrayList;
import java.util.Vector;
import java.lang.Object;
class MyVector{
id(x){
Object i;
x.add(i);
x.add(i);
x.add(i);
x.add(i);
x.add(i);
x.add(i);
x.add(i);
x.add(i);
x.add(i);
x.add(i);
x.add(i);
x.add(i);
return x;
}
}

View File

@ -1,18 +0,0 @@
import java.util.List;
class Test{
methode(param1, param2, param3) {
param2.add(param3);
return param1.meth(param2);
}
}
interface Klasse1{
Klasse1 meth(List p);
Klasse1 meth(Klasse2 p);
}
interface Klasse2{
Klasse1 meth(Klasse1 p);
Klasse2 meth(Klasse2 p);
}

View File

@ -1,13 +0,0 @@
class mathStruc<A> {
mathStruc(A a) { }
A model(){ A a; return a; }
methode(){
var innerOp = o -> ms ->
new mathStruc<A>(o.apply(this.model(),ms.model()));
return innerOp;
}
}

View File

@ -1,15 +0,0 @@
class Test{
methode(param1, param2, param3) {
return param1.meth(param2.meth(param3));
}
}
interface Klasse1{
Klasse1 meth(Klasse1 p);
Klasse1 meth(Klasse2 p);
}
interface Klasse2{
Klasse1 meth(Klasse1 p);
Klasse2 meth(Klasse2 p);
}

View File

@ -1,7 +0,0 @@
class Faculty {
int a;
m (int x) {
return a+x;
}
}

View File

@ -0,0 +1,24 @@
public class FieldTphMMeth {
a;
public FieldTphConsMeth(c,d,e) {
a = m(c,d,e);
}
m(b,d,e) {
if(e) {
return b;
} else{
m2(d);
}
}
m2(b) {
a = m3(b);
}
m3(b){
return b;
}
}

View File

@ -2,7 +2,7 @@ import java.util.Vector;
import java.lang.Integer;
//import java.lang.Float;
//import java.lang.Byte;
import java.lang.Boolean;
//import java.lang.Boolean;
public class Matrix extends Vector<Vector<Integer>> {

View File

@ -18,7 +18,7 @@ public class MatrixOP extends Vector<Vector<Integer>> {
}
}
mul = (m1, m2) -> {
public mul = (m1, m2) -> {
var ret = new MatrixOP();
var i = 0;
while(i < size()) {

View File

@ -1,6 +1,6 @@
import java.util.List;
import java.lang.Integer;
import java.util.Collection;
//import java.util.Collection;
class Merge {

View File

@ -0,0 +1,11 @@
import java.util.Vector;
import java.lang.Integer;
public class VectorSuper {
m(x){
Integer y = 1;
x.addElement(y);
//return x;
}
}

View File

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'>
<appender name="CONSOLE" class="de.dhbwstuttgart.logger.ConsoleAppender">
<param name="Target" value="System.out"/>
<layout class="de.dhbwstuttgart.logger.PatternLayout">
<param name="ConversionPattern" value="%-15C{1} %-5p [%-9c] %m%n"/>
</layout>
</appender>
<logger name="trtest">
<level value="DEBUG"/>
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="funcTest">
<level value="DEBUG"/>
<appender-ref ref="CONSOLE"/>
</logger>
</log4j:configuration>

View File

@ -1,6 +0,0 @@
public class Field{
public void m(){
MethFieldVar mF = new MethFieldVar();
mF.s1 = "Field S1";
}
}

View File

@ -1,8 +0,0 @@
import java.util.Vector;
class Import {
void methode(){
Vector v = new Vector();
v.add(v);
}
}

View File

@ -1,10 +0,0 @@
import java.util.function.Function;
public class Lam1{
public Lam1() {
Function<String,String> fun = (x) -> x+"1";
fun.apply("2");
Runnable lam = () -> System.out.println("lambda");
lam.run();
}
}

View File

@ -1,8 +0,0 @@
public class LamRun{
public void mRun(){
Runnable lam = () -> System.out.println("lambda");
lam.run();
}
}

View File

@ -1,38 +0,0 @@
public class MethFieldVar{
String s1;// = "";
String s2;
/* public void meth(Integer i, String j, Boolean b){
//String local = "a";
//int localL = local.length();
//int l = s.length();
String s = null;
//s = "";
//return s.length();//l+localL;
}
*/
public void mm(){
// return "mm";
}
public void m2(){
System.out.println("");
// Math.abs(1);
// String lV = "local";
// s1 = "1";
// s1.concat("2");
s2 = s1;
mm();
Clazz i = new Clazz();
Runnable lam = ()->{
String test = "";
String b = "b";
test = b;
System.out.println(test);
};
}
}
class Clazz{}

View File

@ -1,6 +0,0 @@
public class Subclass extends Superclass {
public void printMethod() {
super.printMethod();
}
}

View File

@ -1,14 +0,0 @@
public class Superclass {
public void printMethod() {
System.out.println("Printed in Superclass.");
}
}
public class Subclass extends Superclass {
public void printMethod() {
super.printMethod();
}
}

View File

@ -1,6 +0,0 @@
public class Superclass {
public void printMethod() {
System.out.println("Printed in Superclass.");
}
}

View File

@ -1,11 +0,0 @@
class TestMyTest{
public static void main(String[] a){
//test1
//new TestClass();
//test if statement
//new TestIf(new Boolean(true));
// test lambda
//new TestClass();
new LamRun();
}
}

View File

@ -0,0 +1,6 @@
class Fuc1{
Integer mul(Integer x, Integer y) {
return x;
}
}

View File

@ -0,0 +1,14 @@
import java.util.function.Function;
class Fac2 {
Integer mul(Integer x, Integer y) {
return x;
}
Function<Integer,Integer> m () {
Function<Integer,Integer> fact = (Integer x) -> {
return mul(x, x);
};
return fact;
}
}

View File

@ -0,0 +1,10 @@
import java.util.function.Function;
class LamAssign {
Function<Integer,Integer> m () {
Function<Integer,Integer> lam1 = (Integer x) -> {
return x;
};
return lam1;
}
}

View File

@ -0,0 +1,12 @@
import java.util.function.Function;
class LamAssignWithM {
Integer mul(Integer x, Integer y) {
return x;
}
Function<Integer,Integer> m () {
Function<Integer,Integer> lam1 = (Integer x) -> {
return mul(x,x);
};
return lam1;
}
}

View File

@ -0,0 +1,14 @@
import java.util.function.Function;
class LamWithAnField {
Integer mul(Integer x, Integer y) {
return x;
}
LamWithField temp= new LamWithField();
Function<Integer,Integer> m () {
Function<Integer,Integer> lam1 = (Integer x) -> {
return temp.res*x;
};
return lam1;
}
}

View File

@ -0,0 +1,14 @@
import java.util.function.Function;
class LamWithField {
Integer mul(Integer x, Integer y) {
return x;
}
Integer res = new Integer(5);
Function<Integer,Integer> m () {
Function<Integer,Integer> lam1 = (Integer x) -> {
return res*x;
};
return lam1;
}
}

View File

@ -0,0 +1,6 @@
class ReturnM1{
Integer r;
Integer mul(Integer x, Integer y) {
return r;
}
}

View File

@ -1,5 +0,0 @@
public class testTets(){
public static void main(String[] args){
new tetsF();
}
}

View File

@ -1,5 +0,0 @@
public class testTets{
public static void main(String[] args){
new TetsF();
}
}

View File

@ -1,5 +0,0 @@
public class testTets(){
public static void main(String[] args){
new tetsF();
}
}